gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* $Id$
*/
/*
Copyright (c) 2014-2015 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.ws.content;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import java.net.URL;
import java.util.Properties;
import javax.xml.namespace.QName;
import javax.xml.ws.Service;
import org.lockss.account.AccountManager;
import org.lockss.account.UserAccount;
import org.lockss.config.ConfigManager;
import org.lockss.config.Configuration;
import org.lockss.daemon.Crawler;
import org.lockss.plugin.PluginManager;
import org.lockss.plugin.PluginTestUtil;
import org.lockss.plugin.simulated.SimulatedArchivalUnit;
import org.lockss.plugin.simulated.SimulatedContentGenerator;
import org.lockss.plugin.simulated.SimulatedPlugin;
import org.lockss.protocol.MockIdentityManager;
import org.lockss.servlet.AdminServletManager;
import org.lockss.servlet.LockssServlet;
import org.lockss.servlet.ServletManager;
import org.lockss.test.ConfigurationUtil;
import org.lockss.test.LockssTestCase;
import org.lockss.test.MockAuState;
import org.lockss.test.MockLockssDaemon;
import org.lockss.test.NoCrawlEndActionsFollowLinkCrawler;
import org.lockss.test.TcpTestUtil;
import org.lockss.ws.cxf.AuthorizationInterceptor;
import org.lockss.ws.entities.ContentResult;
import org.lockss.ws.entities.LockssWebServicesFault;
/**
* Functional test class for org.lockss.ws.content.ContentService.
*
* @author Fernando Garcia-Loygorri
*/
public class FuncContentService extends LockssTestCase {
private static final String USER_NAME = "lockss-u";
private static final String PASSWORD = "lockss-p";
private static final String PASSWORD_SHA1 =
"SHA1:ac4fc8fa9930a24c8d002d541c37ca993e1bc40f";
private static final String TARGET_NAMESPACE =
"http://content.ws.lockss.org/";
private static final String SERVICE_NAME = "ContentServiceImplService";
private String tempDirPath;
private PluginManager pluginMgr;
private AccountManager accountManager;
private ContentService proxy;
private SimulatedArchivalUnit sau;
public void setUp() throws Exception {
super.setUp();
tempDirPath = setUpDiskSpace();
int port = TcpTestUtil.findUnboundTcpPort();
ConfigurationUtil.addFromArgs(AdminServletManager.PARAM_PORT, "" + port,
ServletManager.PARAM_PLATFORM_USERNAME, USER_NAME,
ServletManager.PARAM_PLATFORM_PASSWORD, PASSWORD_SHA1);
MockLockssDaemon theDaemon = getMockLockssDaemon();
accountManager = theDaemon.getAccountManager();
accountManager.startService();
MockIdentityManager idMgr = new MockIdentityManager();
theDaemon.setIdentityManager(idMgr);
idMgr.initService(theDaemon);
pluginMgr = theDaemon.getPluginManager();
pluginMgr.setLoadablePluginsReady(true);
theDaemon.setDaemonInited(true);
theDaemon.getRemoteApi().startService();
theDaemon.getServletManager().startService();
pluginMgr.startService();
sau = PluginTestUtil.createAndStartSimAu(simAuConfig(tempDirPath));
sau.generateContentTree();
Crawler crawler =
new NoCrawlEndActionsFollowLinkCrawler(sau, new MockAuState());
crawler.doCrawl();
theDaemon.setAusStarted(true);
// The client authentication.
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(USER_NAME, PASSWORD.toCharArray());
}
});
String addressLocation =
"http://localhost:" + port + "/ws/ContentService?wsdl";
Service service = Service.create(new URL(addressLocation), new QName(
TARGET_NAMESPACE, SERVICE_NAME));
proxy = service.getPort(ContentService.class);
}
/**
* Tests the fetching of a file.
*/
public void testFetchFile() throws Exception {
String auId = sau.getAuId();
UserAccount userAccount = accountManager.getUser(USER_NAME);
// User "userAdminRole" should succeed.
userAccount.setRoles(LockssServlet.ROLE_USER_ADMIN);
String url = "http://www.example.com/001file.bin";
String outputFileSpec = tempDirPath + "/001file.bin";
ContentResult result = proxy.fetchFile(url, null);
assertEquals(256, result.writeContentToFile(outputFileSpec).length());
Properties properties = result.getProperties();
assertEquals("1", properties.get("org.lockss.version.number"));
assertEquals("text/plain", properties.get("x-lockss-content-type"));
assertEquals("http://www.example.com/001file.bin",
properties.get("x-lockss-node-url"));
result = proxy.fetchVersionedFile(url, auId, null);
assertEquals(256, result.writeContentToFile(outputFileSpec).length());
properties = result.getProperties();
assertEquals("1", properties.get("org.lockss.version.number"));
assertEquals("text/plain", properties.get("x-lockss-content-type"));
assertEquals("http://www.example.com/001file.bin",
properties.get("x-lockss-node-url"));
// User "contentAdminRole" should fail.
userAccount.setRoles(LockssServlet.ROLE_CONTENT_ADMIN);
try {
result = proxy.fetchVersionedFile(url, null, null);
fail("Test should have failed for role "
+ LockssServlet.ROLE_CONTENT_ADMIN);
} catch (LockssWebServicesFault lwsf) {
// Expected authorization failure.
assertEquals(AuthorizationInterceptor.NO_REQUIRED_ROLE,
lwsf.getMessage());
}
// User "auAdminRole" should fail.
userAccount.setRoles(LockssServlet.ROLE_AU_ADMIN);
try {
result = proxy.fetchFile(url, null);
fail("Test should have failed for role "
+ LockssServlet.ROLE_CONTENT_ADMIN);
} catch (LockssWebServicesFault lwsf) {
// Expected authorization failure.
assertEquals(AuthorizationInterceptor.NO_REQUIRED_ROLE,
lwsf.getMessage());
}
// User "accessContentRole" should succeed.
userAccount.setRoles(LockssServlet.ROLE_CONTENT_ACCESS);
url = "http://www.example.com/branch1/branch1/index.html";
outputFileSpec = tempDirPath + "/index.html";
result = proxy.fetchVersionedFile(url, null, null);
assertEquals(398, result.writeContentToFile(outputFileSpec).length());
properties = result.getProperties();
assertEquals("1", properties.get("org.lockss.version.number"));
assertEquals("text/html", properties.get("x-lockss-content-type"));
assertEquals("http://www.example.com/branch1/branch1/index.html",
properties.get("x-lockss-node-url"));
result = proxy.fetchFile(url, auId);
assertEquals(398, result.writeContentToFile(outputFileSpec).length());
properties = result.getProperties();
assertEquals("1", properties.get("org.lockss.version.number"));
assertEquals("text/html", properties.get("x-lockss-content-type"));
assertEquals("http://www.example.com/branch1/branch1/index.html",
properties.get("x-lockss-node-url"));
result = proxy.fetchVersionedFile(url, auId, 1);
assertEquals(398, result.writeContentToFile(outputFileSpec).length());
properties = result.getProperties();
assertEquals("1", properties.get("org.lockss.version.number"));
assertEquals("text/html", properties.get("x-lockss-content-type"));
assertEquals("http://www.example.com/branch1/branch1/index.html",
properties.get("x-lockss-node-url"));
// Once more with a bad AU identifier.
userAccount.setRoles(LockssServlet.ROLE_USER_ADMIN);
try {
auId = "wrongAuId";
result = proxy.fetchFile(url, auId);
fail("Test should have failed for auId '" + auId + "'");
} catch (LockssWebServicesFault lwsf) {
// Expected authorization failure.
assertEquals("org.lockss.ws.entities.LockssWebServicesFault: "
+ "Missing AU with auid 'wrongAuId'", lwsf.getMessage());
}
// Once more with a bad URL.
userAccount.setRoles(LockssServlet.ROLE_CONTENT_ACCESS);
try {
url = "wrongURL";
result = proxy.fetchVersionedFile(url, null, null);
fail("Test should have failed for url '" + url + "'");
} catch (LockssWebServicesFault lwsf) {
// Expected authorization failure.
assertEquals("org.lockss.ws.entities.LockssWebServicesFault: "
+ "Missing CachedUrl for url 'wrongURL'", lwsf.getMessage());
}
// Non-existent version.
try {
url = "http://www.example.com/branch1/branch1/index.html";
auId = sau.getAuId();
result = proxy.fetchVersionedFile(url, auId, 2);
fail("Test should have failed for version " + 2);
} catch (LockssWebServicesFault lwsf) {
// Expected authorization failure.
assertTrue(lwsf.getMessage().startsWith("java.lang.Exception: Version 2 "
+ "of http://www.example.com/branch1/branch1/index.html for the "
+ "requested Archival Unit "
+ "'org|lockss|plugin|simulated|SimulatedPlugin&"));
assertTrue(lwsf.getMessage().endsWith(" has no content"));
}
}
private Configuration simAuConfig(String rootPath) {
Configuration conf = ConfigManager.newConfiguration();
conf.put("root", rootPath);
conf.put("depth", "2");
conf.put("branch", "2");
conf.put("numFiles", "3");
conf.put("fileTypes", "" + SimulatedContentGenerator.FILE_TYPE_BIN);
conf.put(SimulatedPlugin.AU_PARAM_ODD_BRANCH_CONTENT, "true");
return conf;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
// Generated from protobuf
package org.apache.drill.exec.proto.beans;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.List;
import com.dyuproject.protostuff.GraphIOUtil;
import com.dyuproject.protostuff.Input;
import com.dyuproject.protostuff.Message;
import com.dyuproject.protostuff.Output;
import com.dyuproject.protostuff.Schema;
public final class GetTablesReq implements Externalizable, Message<GetTablesReq>, Schema<GetTablesReq>
{
public static Schema<GetTablesReq> getSchema()
{
return DEFAULT_INSTANCE;
}
public static GetTablesReq getDefaultInstance()
{
return DEFAULT_INSTANCE;
}
static final GetTablesReq DEFAULT_INSTANCE = new GetTablesReq();
private LikeFilter catalogNameFilter;
private LikeFilter schemaNameFilter;
private LikeFilter tableNameFilter;
private List<String> tableTypeFilter;
public GetTablesReq()
{
}
// getters and setters
// catalogNameFilter
public LikeFilter getCatalogNameFilter()
{
return catalogNameFilter;
}
public GetTablesReq setCatalogNameFilter(LikeFilter catalogNameFilter)
{
this.catalogNameFilter = catalogNameFilter;
return this;
}
// schemaNameFilter
public LikeFilter getSchemaNameFilter()
{
return schemaNameFilter;
}
public GetTablesReq setSchemaNameFilter(LikeFilter schemaNameFilter)
{
this.schemaNameFilter = schemaNameFilter;
return this;
}
// tableNameFilter
public LikeFilter getTableNameFilter()
{
return tableNameFilter;
}
public GetTablesReq setTableNameFilter(LikeFilter tableNameFilter)
{
this.tableNameFilter = tableNameFilter;
return this;
}
// tableTypeFilter
public List<String> getTableTypeFilterList()
{
return tableTypeFilter;
}
public GetTablesReq setTableTypeFilterList(List<String> tableTypeFilter)
{
this.tableTypeFilter = tableTypeFilter;
return this;
}
// java serialization
public void readExternal(ObjectInput in) throws IOException
{
GraphIOUtil.mergeDelimitedFrom(in, this, this);
}
public void writeExternal(ObjectOutput out) throws IOException
{
GraphIOUtil.writeDelimitedTo(out, this, this);
}
// message method
public Schema<GetTablesReq> cachedSchema()
{
return DEFAULT_INSTANCE;
}
// schema methods
public GetTablesReq newMessage()
{
return new GetTablesReq();
}
public Class<GetTablesReq> typeClass()
{
return GetTablesReq.class;
}
public String messageName()
{
return GetTablesReq.class.getSimpleName();
}
public String messageFullName()
{
return GetTablesReq.class.getName();
}
public boolean isInitialized(GetTablesReq message)
{
return true;
}
public void mergeFrom(Input input, GetTablesReq message) throws IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 1:
message.catalogNameFilter = input.mergeObject(message.catalogNameFilter, LikeFilter.getSchema());
break;
case 2:
message.schemaNameFilter = input.mergeObject(message.schemaNameFilter, LikeFilter.getSchema());
break;
case 3:
message.tableNameFilter = input.mergeObject(message.tableNameFilter, LikeFilter.getSchema());
break;
case 4:
if(message.tableTypeFilter == null)
message.tableTypeFilter = new ArrayList<String>();
message.tableTypeFilter.add(input.readString());
break;
default:
input.handleUnknownField(number, this);
}
}
}
public void writeTo(Output output, GetTablesReq message) throws IOException
{
if(message.catalogNameFilter != null)
output.writeObject(1, message.catalogNameFilter, LikeFilter.getSchema(), false);
if(message.schemaNameFilter != null)
output.writeObject(2, message.schemaNameFilter, LikeFilter.getSchema(), false);
if(message.tableNameFilter != null)
output.writeObject(3, message.tableNameFilter, LikeFilter.getSchema(), false);
if(message.tableTypeFilter != null)
{
for(String tableTypeFilter : message.tableTypeFilter)
{
if(tableTypeFilter != null)
output.writeString(4, tableTypeFilter, true);
}
}
}
public String getFieldName(int number)
{
switch(number)
{
case 1: return "catalogNameFilter";
case 2: return "schemaNameFilter";
case 3: return "tableNameFilter";
case 4: return "tableTypeFilter";
default: return null;
}
}
public int getFieldNumber(String name)
{
final Integer number = __fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<String,Integer> __fieldMap = new java.util.HashMap<String,Integer>();
static
{
__fieldMap.put("catalogNameFilter", 1);
__fieldMap.put("schemaNameFilter", 2);
__fieldMap.put("tableNameFilter", 3);
__fieldMap.put("tableTypeFilter", 4);
}
}
| |
package com.epam.dlab.backendapi.service.impl;
import com.epam.dlab.auth.UserInfo;
import com.epam.dlab.backendapi.dao.ComputationalDAO;
import com.epam.dlab.backendapi.dao.ExploratoryDAO;
import com.epam.dlab.backendapi.dao.KeyDAO;
import com.epam.dlab.backendapi.domain.RequestId;
import com.epam.dlab.backendapi.service.ExploratoryService;
import com.epam.dlab.backendapi.service.UserResourceService;
import com.epam.dlab.backendapi.util.RequestBuilder;
import com.epam.dlab.dto.UserInstanceDTO;
import com.epam.dlab.dto.UserInstanceStatus;
import com.epam.dlab.dto.aws.edge.EdgeInfoAws;
import com.epam.dlab.dto.base.DataEngineType;
import com.epam.dlab.dto.base.edge.EdgeInfo;
import com.epam.dlab.dto.reuploadkey.ReuploadKeyCallbackDTO;
import com.epam.dlab.dto.reuploadkey.ReuploadKeyDTO;
import com.epam.dlab.dto.reuploadkey.ReuploadKeyStatus;
import com.epam.dlab.dto.reuploadkey.ReuploadKeyStatusDTO;
import com.epam.dlab.exceptions.DlabException;
import com.epam.dlab.model.ResourceData;
import com.epam.dlab.model.ResourceType;
import com.epam.dlab.rest.client.RESTService;
import com.mongodb.client.result.UpdateResult;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.*;
import static com.epam.dlab.dto.UserInstanceStatus.REUPLOADING_KEY;
import static com.epam.dlab.dto.UserInstanceStatus.RUNNING;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class ReuploadKeyServiceImplTest {
private final String USER = "test";
private final String TOKEN = "token";
private final String EXPLORATORY_NAME = "explName";
private UserInfo userInfo;
@Mock
private KeyDAO keyDAO;
@Mock
private RESTService provisioningService;
@Mock
private RequestBuilder requestBuilder;
@Mock
private RequestId requestId;
@Mock
private ExploratoryService exploratoryService;
@Mock
private ComputationalDAO computationalDAO;
@Mock
private ExploratoryDAO exploratoryDAO;
@Mock
private UserResourceService userResourceService;
@InjectMocks
private ReuploadKeyServiceImpl reuploadKeyService;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
public void setUp() {
userInfo = getUserInfo();
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKey() {
doNothing().when(userResourceService).updateReuploadKeyFlagForUserResources(anyString(), anyBoolean());
List<UserInstanceDTO> instances = Collections.singletonList(getUserInstance());
when(exploratoryService.getInstancesWithStatuses(anyString(), any(UserInstanceStatus.class),
any(UserInstanceStatus.class))).thenReturn(instances);
List<ResourceData> resourceList = new ArrayList<>();
resourceList.add(new ResourceData(ResourceType.EXPLORATORY, "someId", EXPLORATORY_NAME, null));
when(userResourceService.convertToResourceData(any(List.class))).thenReturn(resourceList);
Optional<EdgeInfoAws> edgeInfo = Optional.of(new EdgeInfoAws());
Mockito.<Optional<? extends EdgeInfo>>when(keyDAO.getEdgeInfoWhereStatusIn(anyString(), anyVararg()))
.thenReturn(edgeInfo);
doNothing().when(keyDAO).updateEdgeStatus(anyString(), anyString());
doNothing().when(exploratoryDAO).updateStatusForExploratories(any(UserInstanceStatus.class), anyString(),
any(UserInstanceStatus.class));
doNothing().when(computationalDAO).updateStatusForComputationalResources(any(UserInstanceStatus.class),
anyString(), any(List.class), any(List.class), any(UserInstanceStatus.class));
ReuploadKeyDTO reuploadFile = mock(ReuploadKeyDTO.class);
when(requestBuilder.newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class)))
.thenReturn(reuploadFile);
String expectedUuid = "someUuid";
when(provisioningService.post(anyString(), anyString(), any(ReuploadKeyDTO.class), any()))
.thenReturn(expectedUuid);
String keyContent = "keyContent";
String actualUuid = reuploadKeyService.reuploadKey(userInfo, keyContent);
assertNotNull(actualUuid);
assertEquals(expectedUuid, actualUuid);
assertEquals(2, resourceList.size());
verify(userResourceService).updateReuploadKeyFlagForUserResources(USER, true);
verify(exploratoryService).getInstancesWithStatuses(USER, RUNNING, RUNNING);
verify(userResourceService).convertToResourceData(instances);
verify(keyDAO).getEdgeInfoWhereStatusIn(USER, RUNNING);
verify(keyDAO).updateEdgeStatus(USER, "reuploading key");
verify(exploratoryDAO).updateStatusForExploratories(REUPLOADING_KEY, USER, RUNNING);
verify(computationalDAO).updateStatusForComputationalResources(REUPLOADING_KEY, USER,
Arrays.asList(RUNNING, REUPLOADING_KEY), Arrays.asList(DataEngineType.SPARK_STANDALONE,
DataEngineType.CLOUD_SERVICE), RUNNING);
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(keyContent), any(List.class));
verify(provisioningService).post("/reupload_key", TOKEN, reuploadFile, String.class);
verifyNoMoreInteractions(userResourceService, exploratoryService, keyDAO, exploratoryDAO, computationalDAO,
requestBuilder, provisioningService);
verifyZeroInteractions(requestId);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyWithoutEdge() {
doNothing().when(userResourceService).updateReuploadKeyFlagForUserResources(anyString(), anyBoolean());
List<UserInstanceDTO> instances = Collections.singletonList(getUserInstance());
when(exploratoryService.getInstancesWithStatuses(anyString(), any(UserInstanceStatus.class),
any(UserInstanceStatus.class))).thenReturn(instances);
List<ResourceData> resourceList = new ArrayList<>();
resourceList.add(new ResourceData(ResourceType.EXPLORATORY, "someId", EXPLORATORY_NAME, null));
when(userResourceService.convertToResourceData(any(List.class))).thenReturn(resourceList);
when(keyDAO.getEdgeInfoWhereStatusIn(anyString(), anyVararg())).thenReturn(Optional.empty());
doNothing().when(exploratoryDAO).updateStatusForExploratories(any(UserInstanceStatus.class), anyString(),
any(UserInstanceStatus.class));
doNothing().when(computationalDAO).updateStatusForComputationalResources(any(UserInstanceStatus.class),
anyString(), any(List.class), any(List.class), any(UserInstanceStatus.class));
ReuploadKeyDTO reuploadFile = mock(ReuploadKeyDTO.class);
when(requestBuilder.newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class)))
.thenReturn(reuploadFile);
String expectedUuid = "someUuid";
when(provisioningService.post(anyString(), anyString(), any(ReuploadKeyDTO.class), any()))
.thenReturn(expectedUuid);
String keyContent = "keyContent";
String actualUuid = reuploadKeyService.reuploadKey(userInfo, keyContent);
assertNotNull(actualUuid);
assertEquals(expectedUuid, actualUuid);
assertEquals(1, resourceList.size());
verify(userResourceService).updateReuploadKeyFlagForUserResources(USER, true);
verify(exploratoryService).getInstancesWithStatuses(USER, RUNNING, RUNNING);
verify(userResourceService).convertToResourceData(instances);
verify(keyDAO).getEdgeInfoWhereStatusIn(USER, RUNNING);
verify(exploratoryDAO).updateStatusForExploratories(REUPLOADING_KEY, USER, RUNNING);
verify(computationalDAO).updateStatusForComputationalResources(REUPLOADING_KEY, USER,
Arrays.asList(RUNNING, REUPLOADING_KEY), Arrays.asList(DataEngineType.SPARK_STANDALONE,
DataEngineType.CLOUD_SERVICE), RUNNING);
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(keyContent), any(List.class));
verify(provisioningService).post("/reupload_key", TOKEN, reuploadFile, String.class);
verifyNoMoreInteractions(userResourceService, exploratoryService, keyDAO, exploratoryDAO, computationalDAO,
requestBuilder, provisioningService);
verifyZeroInteractions(requestId);
}
@Test
public void updateResourceDataForEdgeWhenStatusCompleted() {
ResourceData resource = new ResourceData(ResourceType.EDGE, "someId", null, null);
doNothing().when(keyDAO).updateEdgeStatus(anyString(), anyString());
doNothing().when(keyDAO).updateEdgeReuploadKey(anyString(), anyBoolean(), anyVararg());
ReuploadKeyStatusDTO dto = getReuploadKeyStatusDTO(resource, ReuploadKeyStatus.COMPLETED);
reuploadKeyService.updateResourceData(dto);
verify(keyDAO).updateEdgeStatus(USER, "running");
verify(keyDAO).updateEdgeReuploadKey(USER, false, UserInstanceStatus.values());
verifyNoMoreInteractions(keyDAO);
verifyZeroInteractions(exploratoryDAO, computationalDAO);
}
@Test
public void updateResourceDataForEdgeWhenStatusFailed() {
ResourceData resource = new ResourceData(ResourceType.EDGE, "someId", null, null);
doNothing().when(keyDAO).updateEdgeStatus(anyString(), anyString());
ReuploadKeyStatusDTO dto = getReuploadKeyStatusDTO(resource, ReuploadKeyStatus.FAILED);
reuploadKeyService.updateResourceData(dto);
verify(keyDAO).updateEdgeStatus(USER, "running");
verifyNoMoreInteractions(keyDAO);
verifyZeroInteractions(exploratoryDAO, computationalDAO);
}
@Test
public void updateResourceDataForExploratoryWhenStatusCompleted() {
ResourceData resource = new ResourceData(ResourceType.EXPLORATORY, "someId", EXPLORATORY_NAME, null);
when(exploratoryDAO.updateStatusForExploratory(anyString(), anyString(),
any(UserInstanceStatus.class))).thenReturn(mock(UpdateResult.class));
doNothing().when(exploratoryDAO).updateReuploadKeyForExploratory(anyString(), anyString(), anyBoolean());
ReuploadKeyStatusDTO dto = getReuploadKeyStatusDTO(resource, ReuploadKeyStatus.COMPLETED);
reuploadKeyService.updateResourceData(dto);
verify(exploratoryDAO).updateStatusForExploratory(USER, EXPLORATORY_NAME, RUNNING);
verify(exploratoryDAO).updateReuploadKeyForExploratory(USER, EXPLORATORY_NAME, false);
verifyNoMoreInteractions(exploratoryDAO);
verifyZeroInteractions(keyDAO, computationalDAO);
}
@Test
public void updateResourceDataForExploratoryWhenStatusFailed() {
ResourceData resource = new ResourceData(ResourceType.EXPLORATORY, "someId", EXPLORATORY_NAME, null);
when(exploratoryDAO.updateStatusForExploratory(anyString(), anyString(),
any(UserInstanceStatus.class))).thenReturn(mock(UpdateResult.class));
ReuploadKeyStatusDTO dto = getReuploadKeyStatusDTO(resource, ReuploadKeyStatus.FAILED);
reuploadKeyService.updateResourceData(dto);
verify(exploratoryDAO).updateStatusForExploratory(USER, EXPLORATORY_NAME, RUNNING);
verifyNoMoreInteractions(exploratoryDAO);
verifyZeroInteractions(keyDAO, computationalDAO);
}
@Test
public void updateResourceDataForClusterWhenStatusCompleted() {
ResourceData resource = new ResourceData(ResourceType.COMPUTATIONAL, "someId", EXPLORATORY_NAME, "compName");
doNothing().when(computationalDAO).updateStatusForComputationalResource(anyString(), anyString(), anyString(),
any(UserInstanceStatus.class));
doNothing().when(computationalDAO).updateReuploadKeyFlagForComputationalResource(anyString(), anyString(),
anyString(), anyBoolean());
ReuploadKeyStatusDTO dto = getReuploadKeyStatusDTO(resource, ReuploadKeyStatus.COMPLETED);
reuploadKeyService.updateResourceData(dto);
verify(computationalDAO).updateStatusForComputationalResource(USER, EXPLORATORY_NAME, "compName", RUNNING);
verify(computationalDAO).updateReuploadKeyFlagForComputationalResource(USER, EXPLORATORY_NAME, "compName",
false);
verifyNoMoreInteractions(computationalDAO);
verifyZeroInteractions(exploratoryDAO, keyDAO);
}
@Test
public void updateResourceDataForClusterWhenStatusFailed() {
ResourceData resource = new ResourceData(ResourceType.COMPUTATIONAL, "someId", EXPLORATORY_NAME, "compName");
doNothing().when(computationalDAO).updateStatusForComputationalResource(anyString(), anyString(), anyString(),
any(UserInstanceStatus.class));
ReuploadKeyStatusDTO dto = getReuploadKeyStatusDTO(resource, ReuploadKeyStatus.FAILED);
reuploadKeyService.updateResourceData(dto);
verify(computationalDAO).updateStatusForComputationalResource(USER, EXPLORATORY_NAME, "compName", RUNNING);
verifyNoMoreInteractions(computationalDAO);
verifyZeroInteractions(exploratoryDAO, keyDAO);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyActionForEdge() {
doNothing().when(keyDAO).updateEdgeStatus(anyString(), anyString());
ReuploadKeyDTO reuploadFile = mock(ReuploadKeyDTO.class);
when(requestBuilder.newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class)))
.thenReturn(reuploadFile);
String expectedUuid = "someUuid";
when(provisioningService.post(anyString(), anyString(), any(ReuploadKeyDTO.class), any(), any(Map.class)))
.thenReturn(expectedUuid);
when(requestId.put(anyString(), anyString())).thenReturn(expectedUuid);
ResourceData resource = new ResourceData(ResourceType.EDGE, "someId", null, null);
reuploadKeyService.reuploadKeyAction(userInfo, resource);
verify(keyDAO).updateEdgeStatus(USER, "reuploading key");
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(""), any(List.class));
verify(provisioningService).post("/reupload_key", TOKEN, reuploadFile, String.class,
Collections.singletonMap("is_primary_reuploading", false));
verify(requestId).put(USER, expectedUuid);
verifyNoMoreInteractions(keyDAO, requestBuilder, provisioningService, requestId);
verifyZeroInteractions(exploratoryDAO, computationalDAO);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyActionForEdgeWithException() {
doNothing().when(keyDAO).updateEdgeStatus(anyString(), eq("reuploading key"));
doThrow(new DlabException("Couldn't reupload key to edge"))
.when(requestBuilder).newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class));
doNothing().when(keyDAO).updateEdgeStatus(anyString(), eq("running"));
ResourceData resource = new ResourceData(ResourceType.EDGE, "someId", null, null);
try {
reuploadKeyService.reuploadKeyAction(userInfo, resource);
} catch (DlabException e) {
assertEquals("Couldn't reupload key to edge_node for user test:\tCouldn't reupload key to edge",
e.getMessage());
}
verify(keyDAO).updateEdgeStatus(USER, "reuploading key");
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(""), any(List.class));
verify(keyDAO).updateEdgeStatus(USER, "running");
verifyNoMoreInteractions(keyDAO, requestBuilder);
verifyZeroInteractions(exploratoryDAO, computationalDAO, provisioningService, requestId);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyActionForExploratory() {
when(exploratoryDAO.updateStatusForExploratory(anyString(), anyString(),
any(UserInstanceStatus.class))).thenReturn(mock(UpdateResult.class));
ReuploadKeyDTO reuploadFile = mock(ReuploadKeyDTO.class);
when(requestBuilder.newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class)))
.thenReturn(reuploadFile);
String expectedUuid = "someUuid";
when(provisioningService.post(anyString(), anyString(), any(ReuploadKeyDTO.class), any(), any(Map.class)))
.thenReturn(expectedUuid);
when(requestId.put(anyString(), anyString())).thenReturn(expectedUuid);
ResourceData resource = new ResourceData(ResourceType.EXPLORATORY, "someId", EXPLORATORY_NAME, null);
reuploadKeyService.reuploadKeyAction(userInfo, resource);
verify(exploratoryDAO).updateStatusForExploratory(USER, EXPLORATORY_NAME, REUPLOADING_KEY);
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(""), any(List.class));
verify(provisioningService).post("/reupload_key", TOKEN, reuploadFile, String.class,
Collections.singletonMap("is_primary_reuploading", false));
verify(requestId).put(USER, expectedUuid);
verifyNoMoreInteractions(exploratoryDAO, requestBuilder, provisioningService, requestId);
verifyZeroInteractions(keyDAO, computationalDAO);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyActionForExploratoryWithException() {
when(exploratoryDAO.updateStatusForExploratory(anyString(), anyString(),
eq(REUPLOADING_KEY))).thenReturn(mock(UpdateResult.class));
doThrow(new DlabException("Couldn't reupload key to exploratory"))
.when(requestBuilder).newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class));
when(exploratoryDAO.updateStatusForExploratory(anyString(), anyString(),
eq(RUNNING))).thenReturn(mock(UpdateResult.class));
ResourceData resource = new ResourceData(ResourceType.EXPLORATORY, "someId", EXPLORATORY_NAME, null);
try {
reuploadKeyService.reuploadKeyAction(userInfo, resource);
} catch (DlabException e) {
assertEquals("Couldn't reupload key to exploratory explName for user test:\tCouldn't reupload key to " +
"exploratory", e.getMessage());
}
verify(exploratoryDAO).updateStatusForExploratory(USER, EXPLORATORY_NAME, REUPLOADING_KEY);
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(""), any(List.class));
verify(exploratoryDAO).updateStatusForExploratory(USER, EXPLORATORY_NAME, RUNNING);
verifyNoMoreInteractions(exploratoryDAO, requestBuilder);
verifyZeroInteractions(keyDAO, computationalDAO, provisioningService, requestId);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyActionForCluster() {
doNothing().when(computationalDAO).updateStatusForComputationalResource(anyString(), anyString(), anyString(),
any(UserInstanceStatus.class));
ReuploadKeyDTO reuploadFile = mock(ReuploadKeyDTO.class);
when(requestBuilder.newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class)))
.thenReturn(reuploadFile);
String expectedUuid = "someUuid";
when(provisioningService.post(anyString(), anyString(), any(ReuploadKeyDTO.class), any(), any(Map.class)))
.thenReturn(expectedUuid);
when(requestId.put(anyString(), anyString())).thenReturn(expectedUuid);
ResourceData resource = new ResourceData(ResourceType.COMPUTATIONAL, "someId", EXPLORATORY_NAME,
"compName");
reuploadKeyService.reuploadKeyAction(userInfo, resource);
verify(computationalDAO).updateStatusForComputationalResource(USER, EXPLORATORY_NAME,
"compName", REUPLOADING_KEY);
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(""), any(List.class));
verify(provisioningService).post("/reupload_key", TOKEN, reuploadFile, String.class,
Collections.singletonMap("is_primary_reuploading", false));
verify(requestId).put(USER, expectedUuid);
verifyNoMoreInteractions(computationalDAO, requestBuilder, provisioningService, requestId);
verifyZeroInteractions(keyDAO, exploratoryDAO);
}
@Test
@SuppressWarnings("unchecked")
public void reuploadKeyActionForClusterWithException() {
doNothing().when(computationalDAO).updateStatusForComputationalResource(anyString(), anyString(), anyString(),
eq(REUPLOADING_KEY));
doThrow(new DlabException("Couldn't reupload key to cluster"))
.when(requestBuilder).newKeyReupload(any(UserInfo.class), anyString(), anyString(), any(List.class));
doNothing().when(computationalDAO).updateStatusForComputationalResource(anyString(), anyString(), anyString(),
eq(RUNNING));
ResourceData resource = new ResourceData(ResourceType.COMPUTATIONAL, "someId", EXPLORATORY_NAME,
"compName");
try {
reuploadKeyService.reuploadKeyAction(userInfo, resource);
} catch (DlabException e) {
assertEquals("Couldn't reupload key to computational_resource compName affiliated with exploratory " +
"explName for user test:\tCouldn't reupload key to cluster", e.getMessage());
}
verify(computationalDAO).updateStatusForComputationalResource(USER, EXPLORATORY_NAME,
"compName", REUPLOADING_KEY);
verify(requestBuilder).newKeyReupload(refEq(userInfo), anyString(), eq(""), any(List.class));
verify(computationalDAO).updateStatusForComputationalResource(USER, EXPLORATORY_NAME,
"compName", RUNNING);
verifyNoMoreInteractions(computationalDAO, requestBuilder);
verifyZeroInteractions(keyDAO, exploratoryDAO, provisioningService, requestId);
}
private UserInfo getUserInfo() {
return new UserInfo(USER, TOKEN);
}
private UserInstanceDTO getUserInstance() {
return new UserInstanceDTO().withUser(USER).withExploratoryName(EXPLORATORY_NAME);
}
private ReuploadKeyStatusDTO getReuploadKeyStatusDTO(ResourceData resource, ReuploadKeyStatus status) {
return new ReuploadKeyStatusDTO().withReuploadKeyCallbackDto(
new ReuploadKeyCallbackDTO().withResource(resource)).withReuploadKeyStatus(status).withUser(USER);
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.testutil.MoreAsserts.assertThrows;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.packages.PackageFactory.GlobPatternExtractor;
import com.google.devtools.build.lib.packages.util.PackageFactoryApparatus;
import com.google.devtools.build.lib.packages.util.PackageFactoryTestBase;
import com.google.devtools.build.lib.syntax.ParserInput;
import com.google.devtools.build.lib.syntax.StarlarkFile;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.RootedPath;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Unit tests for {@code PackageFactory}.
*/
@RunWith(JUnit4.class)
public class PackageFactoryTest extends PackageFactoryTestBase {
@Test
public void testCreatePackage() throws Exception {
Path buildFile = scratch.file("/pkgname/BUILD", "# empty build file ");
Package pkg = packages.createPackage("pkgname", RootedPath.toRootedPath(root, buildFile));
assertThat(pkg.getName()).isEqualTo("pkgname");
assertThat(Sets.newHashSet(pkg.getTargets(Rule.class))).isEmpty();
}
@Test
public void testCreatePackageIsolatedFromOuterErrors() throws Exception {
ExecutorService e = Executors.newCachedThreadPool();
final Semaphore beforeError = new Semaphore(0);
final Semaphore afterError = new Semaphore(0);
Reporter reporter = new Reporter(new EventBus());
ParsingTracker parser = new ParsingTracker(beforeError, afterError, reporter);
final Logger log = Logger.getLogger(PackageFactory.class.getName());
log.addHandler(parser);
Level originalLevel = log.getLevel();
log.setLevel(Level.FINE);
e.execute(new ErrorReporter(reporter, beforeError, afterError));
e.execute(parser);
// wait for all to finish
e.shutdown();
assertThat(e.awaitTermination(TestUtils.WAIT_TIMEOUT_MILLISECONDS, TimeUnit.MILLISECONDS))
.isTrue();
log.removeHandler(parser);
log.setLevel(originalLevel);
assertThat(parser.hasParsed()).isTrue();
}
@Test
public void testBadRuleName() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/badrulename/BUILD", "cc_library(name = 3)");
Package pkg = packages.createPackage("badrulename", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError("cc_library 'name' attribute must be a string");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testNoRuleName() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/badrulename/BUILD", "cc_library()");
Package pkg = packages.createPackage("badrulename", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError("cc_library rule has no 'name' attribute");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testBadPackageName() throws Exception {
NoSuchPackageException e =
assertThrows(
NoSuchPackageException.class,
() ->
packages.createPackage(
"not even a legal/.../label",
RootedPath.toRootedPath(root, emptyBuildFile("not even a legal/.../label"))));
assertThat(e)
.hasMessageThat()
.contains(
"no such package 'not even a legal/.../label': "
+ "illegal package name: 'not even a legal/.../label' ");
}
@Test
public void testColonInExportsFilesTargetName() throws Exception {
events.setFailFast(false);
Path path =
scratch.file(
"/googledata/cafe/BUILD",
"exports_files(['houseads/house_ads:ca-aol_parenting_html'])");
Package pkg = packages.createPackage("googledata/cafe", RootedPath.toRootedPath(root, path));
events.assertContainsError("target names may not contain ':'");
assertThat(pkg.getTargets(FileTarget.class).toString())
.doesNotContain("houseads/house_ads:ca-aol_parenting_html");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testExportsFilesVisibilityMustBeSequence() throws Exception {
expectEvalError(
"expected value of type 'sequence or NoneType' for parameter 'visibility', "
+ "for call to method exports_files",
"exports_files(srcs=[], visibility=depset(['notice']))");
}
@Test
public void testExportsFilesLicensesMustBeSequence() throws Exception {
expectEvalError(
"expected value of type 'sequence of strings or NoneType' for parameter 'licenses', "
+ "for call to method exports_files",
"exports_files(srcs=[], licenses=depset(['notice']))");
}
@Test
public void testPackageNameWithPROTECTEDIsOk() throws Exception {
events.setFailFast(false);
// One "PROTECTED":
assertThat(isValidPackageName("foo/PROTECTED/bar")).isTrue();
// Multiple "PROTECTED"s:
assertThat(isValidPackageName("foo/PROTECTED/bar/PROTECTED/wiz")).isTrue();
}
@Test
public void testDuplicatedDependencies() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/has_dupe/BUILD",
"cc_library(name='dep')",
"cc_library(name='has_dupe', deps=[':dep', ':dep'])");
Package pkg = packages.createPackage("has_dupe", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError(
"Label '//has_dupe:dep' is duplicated in the 'deps' " + "attribute of rule 'has_dupe'");
assertThat(pkg.containsErrors()).isTrue();
assertThat(pkg.getRule("has_dupe")).isNotNull();
assertThat(pkg.getRule("dep")).isNotNull();
assertThat(pkg.getRule("has_dupe").containsErrors()).isTrue();
assertThat(pkg.getRule("dep").containsErrors()).isTrue(); // because all rules in an
// errant package are
// themselves errant.
}
@Test
public void testPrefixWithinSameRule1() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/orange/BUILD",
"genrule(name='orange', srcs=[], outs=['a', 'a/b'], cmd='')");
packages.createPackage("fruit/orange", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError("rule 'orange' has conflicting output files 'a/b' and 'a");
}
@Test
public void testPrefixWithinSameRule2() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/orange/BUILD",
"genrule(name='orange', srcs=[], outs=['a/b', 'a'], cmd='')");
packages.createPackage("fruit/orange", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError("rule 'orange' has conflicting output files 'a' and 'a/b");
}
@Test
public void testPrefixBetweenRules1() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/kiwi/BUILD",
"genrule(name='kiwi1', srcs=[], outs=['a'], cmd='')",
"genrule(name='kiwi2', srcs=[], outs=['a/b'], cmd='')");
packages.createPackage("fruit/kiwi", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError(
"output file 'a/b' of rule 'kiwi2' conflicts " + "with output file 'a' of rule 'kiwi1'");
}
@Test
public void testPrefixBetweenRules2() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/fruit/kiwi/BUILD",
"genrule(name='kiwi1', srcs=[], outs=['a/b'], cmd='')",
"genrule(name='kiwi2', srcs=[], outs=['a'], cmd='')");
packages.createPackage("fruit/kiwi", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError(
"output file 'a' of rule 'kiwi2' conflicts " + "with output file 'a/b' of rule 'kiwi1'");
}
@Test
public void testPackageConstantIsForbidden() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/pina/BUILD", "cc_library(name=PACKAGE_NAME + '-colada')");
packages.createPackage("pina", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError("The value 'PACKAGE_NAME' has been removed");
}
@Test
public void testPackageNameFunction() throws Exception {
Path buildFile = scratch.file("/pina/BUILD", "cc_library(name=package_name() + '-colada')");
Package pkg = packages.createPackage("pina", RootedPath.toRootedPath(root, buildFile));
events.assertNoWarningsOrErrors();
assertThat(pkg.containsErrors()).isFalse();
assertThat(pkg.getRule("pina-colada")).isNotNull();
assertThat(pkg.getRule("pina-colada").containsErrors()).isFalse();
assertThat(Sets.newHashSet(pkg.getTargets(Rule.class)).size()).isSameInstanceAs(1);
}
@Test
public void testPackageConstantInExternalRepositoryIsForbidden() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/external/a/b/BUILD", "genrule(name='c', srcs=[], outs=['ao'], cmd=REPOSITORY_NAME)");
packages.createPackage(
PackageIdentifier.create("@a", PathFragment.create("b")),
RootedPath.toRootedPath(root, buildFile),
events.reporter());
events.assertContainsError("The value 'REPOSITORY_NAME' has been removed");
}
@Test
public void testPackageFunctionInExternalRepository() throws Exception {
Path buildFile =
scratch.file(
"/external/a/b/BUILD",
"genrule(name='c', srcs=[], outs=['o'], cmd=repository_name() + ' ' + package_name())");
Package pkg =
packages.createPackage(
PackageIdentifier.create("@a", PathFragment.create("b")),
RootedPath.toRootedPath(root, buildFile),
events.reporter());
Rule c = pkg.getRule("c");
assertThat(AggregatingAttributeMapper.of(c).get("cmd", Type.STRING)).isEqualTo("@a b");
}
@Test
public void testDuplicateRuleName() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file(
"/duplicaterulename/BUILD",
"proto_library(name = 'spellcheck_proto',",
" srcs = ['spellcheck.proto'],",
" cc_api_version = 2)",
"cc_library(name = 'spellcheck_proto')", // conflict error stops execution
"x = 1//0"); // not reached
Package pkg =
packages.createPackage("duplicaterulename", RootedPath.toRootedPath(root, buildFile));
events.assertContainsError(
"cc_library rule 'spellcheck_proto' in package 'duplicaterulename' conflicts with existing"
+ " proto_library rule");
events.assertDoesNotContainEvent("division by zero");
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testBuildFileTargetExists() throws Exception {
Path buildFile = scratch.file("/foo/BUILD", "");
Package pkg = packages.createPackage("foo", RootedPath.toRootedPath(root, buildFile));
Target target = pkg.getTarget("BUILD");
assertThat(target.getName()).isEqualTo("BUILD");
// Test that it's memoized:
assertThat(pkg.getTarget("BUILD")).isSameInstanceAs(target);
}
@Test
public void testCreationOfInputFiles() throws Exception {
Path buildFile =
scratch.file(
"/foo/BUILD",
"exports_files(['Z'])",
"cc_library(name='W', deps=['X', 'Y'])",
"cc_library(name='X', srcs=['X'])",
"cc_library(name='Y')");
Package pkg = packages.createPackage("foo", RootedPath.toRootedPath(root, buildFile));
assertThat(pkg.containsErrors()).isFalse();
// X is a rule with a circular self-dependency.
assertThat(pkg.getTarget("X").getClass()).isSameInstanceAs(Rule.class);
// Y is a rule
assertThat(pkg.getTarget("Y").getClass()).isSameInstanceAs(Rule.class);
// Z is a file
assertThat(pkg.getTarget("Z").getClass()).isSameInstanceAs(InputFile.class);
// A is nothing
NoSuchTargetException e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("A"));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"no such target '//foo:A': "
+ "target 'A' not declared in package 'foo' defined by /foo/BUILD");
// These are the only input files: BUILD, Z
Set<String> inputFiles = Sets.newTreeSet();
for (InputFile inputFile : pkg.getTargets(InputFile.class)) {
inputFiles.add(inputFile.getName());
}
assertThat(Lists.newArrayList(inputFiles)).containsExactly("BUILD", "Z").inOrder();
}
@Test
public void testDuplicateRuleIsNotAddedToPackage() throws Exception {
events.setFailFast(false);
Path path =
scratch.file(
"/dup/BUILD",
"proto_library(name = 'dup_proto',",
" srcs = ['dup.proto'],",
" cc_api_version = 2)",
"",
"cc_library(name = 'dup_proto',",
" srcs = ['dup.pb.cc', 'dup.pb.h'])");
Package pkg = packages.createPackage("dup", RootedPath.toRootedPath(root, path));
events.assertContainsError(
"cc_library rule 'dup_proto' in package 'dup' "
+ "conflicts with existing proto_library rule");
assertThat(pkg.containsErrors()).isTrue();
Rule dupProto = pkg.getRule("dup_proto");
// Check that the first rule of the given name "wins", and that each of the
// "winning" rule's outputs is a member of the package.
assertThat(dupProto.getRuleClass()).isEqualTo("proto_library");
for (OutputFile out : dupProto.getOutputFiles()) {
assertThat(pkg.getTargets(FileTarget.class)).contains(out);
}
}
@Test
public void testConflictingRuleDoesNotUpdatePackage() throws Exception {
events.setFailFast(false);
// In this test, rule2's outputs conflict with rule1, so rule2 is rejected.
// However, we must check that neither rule2, nor any of its inputs or
// outputs is a member of the package, and that the conflicting output file
// "out2" still has rule1 as its getGeneratingRule().
Path path =
scratch.file(
"/conflict/BUILD",
"genrule(name = 'rule1',",
" cmd = '',",
" srcs = ['in1', 'in2'],",
" outs = ['out1', 'out2'])",
"genrule(name = 'rule2',",
" cmd = '',",
" srcs = ['in3', 'in4'],",
" outs = ['out3', 'out2'])");
Package pkg = packages.createPackage("conflict", RootedPath.toRootedPath(root, path));
events.assertContainsError(
"generated file 'out2' in rule 'rule2' "
+ "conflicts with existing generated file from rule 'rule1'");
assertThat(pkg.containsErrors()).isTrue();
assertThat(pkg.getRule("rule2")).isNull();
// Ensure that rule2's "out2" didn't overwrite rule1's:
assertThat(((OutputFile) pkg.getTarget("out2")).getGeneratingRule())
.isSameInstanceAs(pkg.getRule("rule1"));
// None of rule2, its inputs, or its outputs should belong to pkg:
List<Target> found = new ArrayList<>();
for (String targetName : ImmutableList.of("rule2", "in3", "in4", "out3")) {
try {
found.add(pkg.getTarget(targetName));
// No fail() here: if there's no exception, we add the name to a list
// and we check below that it's empty.
} catch (NoSuchTargetException e) {
/* good! */
}
}
assertThat(found).isEmpty();
}
// Was: Regression test for bug "Rules declared after an error in
// a package should be considered 'in error'".
// Then: Regression test for bug "Why aren't ERRORS considered
// fatal?*"
// Now: Regression test for: execution should stop at the first EvalException;
// all rules created prior to the exception error are marked in error.
@Test
public void testAllRulesInErrantPackageAreInError() throws Exception {
events.setFailFast(false);
Path path =
scratch.file(
"/error/BUILD",
"genrule(name = 'rule1',",
" cmd = ':',",
" outs = ['out.1'])",
"list = ['bad']",
"x = 1//0", // dynamic error
"genrule(name = 'rule2',",
" cmd = ':',",
" outs = list)");
Package pkg = packages.createPackage("error", RootedPath.toRootedPath(root, path));
events.assertContainsError("division by zero");
assertThat(pkg.containsErrors()).isTrue();
// rule1 would be fine but is still marked as in error:
assertThat(pkg.getRule("rule1").containsErrors()).isTrue();
// rule2's genrule is never executed.
Rule rule2 = pkg.getRule("rule2");
assertThat(rule2).isNull();
}
@Test
public void testHelpfulErrorForMissingExportsFiles() throws Exception {
Path path = scratch.file("/x/BUILD", "cc_library(name='x', srcs=['x.cc'])");
scratch.file("/x/x.cc");
scratch.file("/x/y.cc");
scratch.file("/x/dir/dummy");
Package pkg = packages.createPackage("x", RootedPath.toRootedPath(root, path));
assertThat(pkg.getTarget("x.cc")).isNotNull(); // existing and mentioned.
NoSuchTargetException e =
assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("y.cc"));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"no such target '//x:y.cc': "
+ "target 'y.cc' not declared in package 'x'; "
+ "however, a source file of this name exists. "
+ "(Perhaps add 'exports_files([\"y.cc\"])' to x/BUILD?) "
+ "defined by /x/BUILD");
e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("z.cc"));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"no such target '//x:z.cc': "
+ "target 'z.cc' not declared in package 'x' (did you mean 'x.cc'?) "
+ "defined by /x/BUILD");
e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("dir"));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"no such target '//x:dir': target 'dir' not declared in package 'x'; "
+ "however, a source directory of this name exists. "
+ "(Perhaps add 'exports_files([\"dir\"])' to x/BUILD, "
+ "or define a filegroup?) defined by /x/BUILD");
}
@Test
public void testTestSuitesImplicitlyDependOnAllRulesInPackage() throws Exception {
Path path =
scratch.file(
"/x/BUILD",
"java_test(name='j')",
"test_suite(name='t1')",
"test_suite(name='t2', tests=['//foo'])",
"test_suite(name='t3', tests=['//foo'])",
"cc_test(name='c')");
Package pkg = packages.createPackage("x", RootedPath.toRootedPath(root, path));
// Things to note:
// - the t1 refers to both :j and :c, even though :c is a forward reference.
// - $implicit_tests is empty unless tests=[]
assertThat(attributes(pkg.getRule("t1")).get("$implicit_tests", BuildType.LABEL_LIST))
.containsExactlyElementsIn(
Sets.newHashSet(
Label.parseAbsolute("//x:c", ImmutableMap.of()),
Label.parseAbsolute("//x:j", ImmutableMap.of())));
assertThat(attributes(pkg.getRule("t2")).get("$implicit_tests", BuildType.LABEL_LIST))
.isEmpty();
assertThat(attributes(pkg.getRule("t3")).get("$implicit_tests", BuildType.LABEL_LIST))
.isEmpty();
}
@Test
public void testGlobDirectoryExclusion() throws Exception {
emptyFile("/fruit/data/apple");
emptyFile("/fruit/data/pear");
emptyFile("/fruit/data/berry/black");
emptyFile("/fruit/data/berry/blue");
Path file =
scratch.file(
"/fruit/BUILD",
"cc_library(name = 'yes', srcs = glob(['data/*']))",
"cc_library(name = 'no', srcs = glob(['data/*'], exclude_directories=0))");
Package pkg = packages.eval("fruit", RootedPath.toRootedPath(root, file));
events.assertNoWarningsOrErrors();
List<Label> yesFiles = attributes(pkg.getRule("yes")).get("srcs", BuildType.LABEL_LIST);
List<Label> noFiles = attributes(pkg.getRule("no")).get("srcs", BuildType.LABEL_LIST);
assertThat(yesFiles)
.containsExactly(
Label.parseAbsolute("@//fruit:data/apple", ImmutableMap.of()),
Label.parseAbsolute("@//fruit:data/pear", ImmutableMap.of()));
assertThat(noFiles)
.containsExactly(
Label.parseAbsolute("@//fruit:data/apple", ImmutableMap.of()),
Label.parseAbsolute("@//fruit:data/pear", ImmutableMap.of()),
Label.parseAbsolute("@//fruit:data/berry", ImmutableMap.of()));
}
// TODO(bazel-team): This is really a test for GlobCache.
@Test
public void testRecursiveGlob() throws Exception {
emptyFile("/rg/a.cc");
emptyFile("/rg/foo/bar.cc");
emptyFile("/rg/foo/foo.cc");
emptyFile("/rg/foo/wiz/bam.cc");
emptyFile("/rg/foo/wiz/bum.cc");
emptyFile("/rg/foo/wiz/quid/gav.cc");
Path file =
scratch.file(
"/rg/BUILD",
"cc_library(name = 'ri', srcs = glob(['**/*.cc']))",
"cc_library(name = 're', srcs = glob(['*.cc'], exclude=['**/*.c']))");
Package pkg = packages.eval("rg", RootedPath.toRootedPath(root, file));
events.assertNoWarningsOrErrors();
assertEvaluates(
pkg,
ImmutableList.of(
"BUILD",
"a.cc",
"foo",
"foo/bar.cc",
"foo/foo.cc",
"foo/wiz",
"foo/wiz/bam.cc",
"foo/wiz/bum.cc",
"foo/wiz/quid",
"foo/wiz/quid/gav.cc"),
"**");
assertEvaluates(
pkg,
ImmutableList.of(
"a.cc",
"foo/bar.cc",
"foo/foo.cc",
"foo/wiz/bam.cc",
"foo/wiz/bum.cc",
"foo/wiz/quid/gav.cc"),
"**/*.cc");
assertEvaluates(
pkg, ImmutableList.of("foo/bar.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc"), "**/b*.cc");
assertEvaluates(
pkg,
ImmutableList.of(
"foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"),
"**/*/*.cc");
assertEvaluates(pkg, ImmutableList.of("foo/wiz/quid/gav.cc"), "foo/**/quid/*.cc");
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("*.cc", "*/*.cc", "*/*/*.cc"),
ImmutableList.of("**/*.cc"));
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("**/*.cc"),
ImmutableList.of("**/*.cc"));
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("**/*.cc"),
ImmutableList.of("*.cc", "*/*.cc", "*/*/*.cc", "*/*/*/*.cc"));
assertEvaluates(
pkg,
Collections.<String>emptyList(),
ImmutableList.of("**"),
ImmutableList.of("*", "*/*", "*/*/*", "*/*/*/*"));
assertEvaluates(
pkg,
ImmutableList.of(
"foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"),
ImmutableList.of("**/*.cc"),
ImmutableList.of("*.cc"));
assertEvaluates(
pkg,
ImmutableList.of("a.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"),
ImmutableList.of("**/*.cc"),
ImmutableList.of("*/*.cc"));
assertEvaluates(
pkg,
ImmutableList.of("a.cc", "foo/bar.cc", "foo/foo.cc", "foo/wiz/quid/gav.cc"),
ImmutableList.of("**/*.cc"),
ImmutableList.of("**/wiz/*.cc"));
}
@Test
public void testTooManyArgumentsGlobErrors() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(['incl'],['excl'],3,True,'extraarg')",
"expected no more than 4 positional arguments, but got 5, for call to method glob");
}
@Test
public void testGlobEnforcesListArgument() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(1, exclude=2)",
"expected value of type 'sequence of strings' for parameter 'include', "
+ "for call to method glob");
}
@Test
public void testGlobEnforcesListOfStringsArguments() throws Exception {
events.setFailFast(false);
assertGlobFails(
"glob(['a', 'b'], exclude=['c', 42])",
"expected value of type 'string' for element 1 of 'glob' argument, but got 42 (int)");
}
@Test
public void testGlobNegativeTest() throws Exception {
// Negative test that assertGlob does throw an error when asserting against the wrong values.
IllegalArgumentException e =
assertThrows(
IllegalArgumentException.class,
() ->
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "This_file_doesn_t_exist.java"),
/*includes=*/ ImmutableList.of("W*", "subdir"),
/*excludes=*/ ImmutableList.<String>of(),
/* excludeDirs= */ true));
assertThat(e).hasMessageThat().isEqualTo("ERROR /globs/BUILD:2:73: incorrect glob result");
}
@Test
public void testGlobExcludeDirectories() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java"),
/*includes=*/ ImmutableList.of("W*", "subdir"),
/*excludes=*/ ImmutableList.<String>of(),
/* excludeDirs= */ true);
}
@Test
public void testGlobDoesNotExcludeDirectories() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java", "subdir"),
/*includes=*/ ImmutableList.of("W*", "subdir"),
/*excludes=*/ ImmutableList.<String>of(),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithEmptyExcludedList() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java"),
/*includes=*/ ImmutableList.of("W*"),
/*excludes=*/ Collections.<String>emptyList(),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithQuestionMarkProducesError() throws Exception {
assertGlobProducesError("Wombat?.java", true);
}
@Test
public void testGlobWithoutQuestionMarkDoesntProduceError() throws Exception {
assertGlobProducesError("Wombat*.java", false);
}
@Test
public void testGlobWithNonMatchingExcludedList() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java"),
/*includes=*/ ImmutableList.of("W*"),
/*excludes=*/ ImmutableList.of("*2*"),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithTwoMatchingGlobExpressionsAndNonmatchingExclusion() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("Wombat1.java", "subdir/Wombat3.java"),
/*includes=*/ ImmutableList.of("W*", "subdir/W*"),
/*excludes=*/ ImmutableList.of("*2*"),
/* excludeDirs= */ false);
}
@Test
public void testGlobWithSubdirMatchAndExclusion() throws Exception {
assertGlobMatches(
/*result=*/ ImmutableList.of("subdir/Wombat3.java"),
/*includes=*/ ImmutableList.of("W*", "subdir/W*"),
/*excludes=*/ ImmutableList.of("Wombat*.java"),
/* excludeDirs= */ false);
}
@Test
public void testBadCharacterInGlob() throws Exception {
events.setFailFast(false);
assertGlobFails("glob(['?'])", "glob pattern '?' contains forbidden '?' wildcard");
}
/**
* Tests that a glob evaluation that encounters an I/O error throws instead of constructing a
* package.
*/
@Test
public void testGlobWithIOErrors() throws Exception {
events.setFailFast(false);
scratch.dir("/pkg");
scratch.dir("/pkg/globs");
Path unreadableSubdir = scratch.resolve("/pkg/globs/unreadable_subdir");
unreadableSubdir.createDirectory();
unreadableSubdir.setReadable(false);
Path file = scratch.file("/pkg/BUILD", "cc_library(name = 'c', srcs = glob(['globs/**']))");
assertThrows(
NoSuchPackageException.class,
() -> packages.eval("pkg", RootedPath.toRootedPath(root, file)));
events.assertContainsError("Directory is not readable");
}
@Test
public void testNativeModuleIsDisabled() throws Exception {
events.setFailFast(false);
Path buildFile = scratch.file("/pkg/BUILD", "native.cc_library(name='bar')");
Package pkg = packages.createPackage("pkg", RootedPath.toRootedPath(root, buildFile));
assertThat(pkg.containsErrors()).isTrue();
}
@Test
public void testPackageGroupSpecMinimal() throws Exception {
expectEvalSuccess("package_group(name='skin', packages=[])");
}
@Test
public void testPackageGroupSpecSimple() throws Exception {
expectEvalSuccess("package_group(name='skin', packages=['//group/abelian'])");
}
@Test
public void testPackageGroupSpecEmpty() throws Exception {
expectEvalSuccess("package_group(name='seed')");
}
@Test
public void testPackageGroupSpecIncludes() throws Exception {
expectEvalSuccess(
"package_group(name='wine',",
" includes=['//wine:cabernet_sauvignon',",
" '//wine:pinot_noir'])");
}
@Test
public void testPackageGroupSpecBad() throws Exception {
expectEvalError("invalid package name", "package_group(name='skin', packages=['--25:17--'])");
}
@Test
public void testPackageGroupsWithSameName() throws Exception {
expectEvalError(
"conflicts with existing package group",
"package_group(name='skin', packages=[])",
"package_group(name='skin', packages=[])");
}
@Test
public void testPackageGroupNamedArguments() throws Exception {
expectEvalError(
"expected no more than 0 positional arguments, but got 1,",
"package_group('skin', name = 'x')");
}
@Test
public void testPackageSpecMinimal() throws Exception {
Package pkg = expectEvalSuccess("package(default_visibility=[])");
assertThat(pkg.getDefaultVisibility()).isNotNull();
}
@Test
public void testPackageSpecSimple() throws Exception {
expectEvalSuccess("package(default_visibility=['//group:lie'])");
}
@Test
public void testPackageSpecBad() throws Exception {
expectEvalError("invalid target name", "package(default_visibility=[':::'])");
}
@Test
public void testDoublePackageSpecification() throws Exception {
expectEvalError(
"can only be used once",
"package(default_visibility=[])",
"package(default_visibility=[])");
}
@Test
public void testEmptyPackageSpecification() throws Exception {
expectEvalError("at least one argument must be given to the 'package' function", "package()");
}
@Test
public void testDefaultTestonly() throws Exception {
Package pkg = expectEvalSuccess("package(default_testonly = 1)");
assertThat(pkg.getDefaultTestOnly()).isTrue();
}
@Test
public void testDefaultDeprecation() throws Exception {
String testMessage = "OMG PONIES!";
Package pkg = expectEvalSuccess("package(default_deprecation = \"" + testMessage + "\")");
assertThat(pkg.getDefaultDeprecation()).isEqualTo(testMessage);
}
@Test
public void testExportsBuildFile() throws Exception {
Package pkg =
expectEvalSuccess("exports_files(['BUILD'], visibility=['//visibility:private'])");
assertThat(pkg.getTarget("BUILD")).isEqualTo(pkg.getBuildFile());
}
@Test
public void testDefaultDeprecationPropagation() throws Exception {
String msg = "I am completely operational, and all my circuits are functioning perfectly.";
Path file =
scratch.file(
"/foo/BUILD",
"package(default_deprecation = \"" + msg + "\")",
"sh_library(name = 'bar', srcs=['b'])");
Package pkg = packages.eval("foo", RootedPath.toRootedPath(root, file));
Rule fooRule = (Rule) pkg.getTarget("bar");
String deprAttr =
attributes(fooRule).get("deprecation", com.google.devtools.build.lib.packages.Type.STRING);
assertThat(deprAttr).isEqualTo(msg);
}
@Test
public void testDefaultTestonlyPropagation() throws Exception {
Path file =
scratch.file(
"/foo/BUILD",
"package(default_testonly = 1)",
"sh_library(name = 'foo', srcs=['b'])",
"sh_library(name = 'bar', srcs=['b'], testonly = 0)");
Package pkg = packages.eval("foo", RootedPath.toRootedPath(root, file));
Rule fooRule = (Rule) pkg.getTarget("foo");
assertThat(
attributes(fooRule)
.get("testonly", com.google.devtools.build.lib.packages.Type.BOOLEAN))
.isTrue();
Rule barRule = (Rule) pkg.getTarget("bar");
assertThat(
attributes(barRule)
.get("testonly", com.google.devtools.build.lib.packages.Type.BOOLEAN))
.isFalse();
}
@Test
public void testDefaultDeprecationOverriding() throws Exception {
String msg = "I am completely operational, and all my circuits are functioning perfectly.";
String deceive = "OMG PONIES!";
Path file =
scratch.file(
"/foo/BUILD",
"package(default_deprecation = \"" + deceive + "\")",
"sh_library(name = 'bar', srcs=['b'], deprecation = \"" + msg + "\")");
Package pkg = packages.eval("foo", RootedPath.toRootedPath(root, file));
Rule fooRule = (Rule) pkg.getTarget("bar");
String deprAttr =
attributes(fooRule).get("deprecation", com.google.devtools.build.lib.packages.Type.STRING);
assertThat(deprAttr).isEqualTo(msg);
}
@Test
public void testPackageFeatures() throws Exception {
Path file =
scratch.file(
"/a/BUILD",
"sh_library(name='before')",
"package(features=['b', 'c'])",
"sh_library(name='after')");
Package pkg = packages.eval("a", RootedPath.toRootedPath(root, file));
assertThat(pkg.getFeatures()).containsExactly("b", "c");
}
@Test
public void testTransientErrorsInGlobbing() throws Exception {
events.setFailFast(false);
Path buildFile =
scratch.file("/e/BUILD", "sh_library(name = 'e', data = glob(['*.txt']))");
Path parentDir = buildFile.getParentDirectory();
scratch.file("/e/data.txt");
throwOnReaddir = parentDir;
assertThrows(
NoSuchPackageException.class,
() -> packages.createPackage("e", RootedPath.toRootedPath(root, buildFile)));
events.setFailFast(true);
throwOnReaddir = null;
Package pkg = packages.createPackage("e", RootedPath.toRootedPath(root, buildFile));
assertThat(pkg.containsErrors()).isFalse();
assertThat(pkg.getRule("e")).isNotNull();
List<?> globList = (List) pkg.getRule("e").getAttributeContainer().getAttr("data");
assertThat(globList).containsExactly(Label.parseAbsolute("//e:data.txt", ImmutableMap.of()));
}
@Test
public void testExportTwicePublicOK() throws Exception {
// In theory, this could be an error, but too many existing files rely on it
// and it is okay.
expectEvalSuccess(
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])");
}
@Test
public void testExportTwicePublicOK2() throws Exception {
expectEvalSuccess(
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])");
}
@Test
public void testExportTwiceFail() throws Exception {
expectEvalError(
"visibility for exported file 'a.cc' declared twice",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])");
}
@Test
public void testExportTwiceFail2() throws Exception {
expectEvalError(
"visibility for exported file 'a.cc' declared twice",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:private\" ])");
}
@Test
public void testExportLicenseTwice() throws Exception {
expectEvalError(
"licenses for exported file 'a.cc' declared twice",
"exports_files([\"a.cc\"], licenses = [\"notice\"])",
"exports_files([\"a.cc\"], licenses = [\"notice\"])");
}
@Test
public void testExportGenruleConflict() throws Exception {
expectEvalError(
"generated file 'a.cc' in rule 'foo' conflicts with existing source file",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])",
"genrule(name = 'foo',",
" outs = ['a.cc'],",
" cmd = '')");
}
@Test
public void testGenruleExportConflict() throws Exception {
expectEvalError(
"generated label '//pkg:a.cc' conflicts with existing generated file",
"genrule(name = 'foo',",
" outs = ['a.cc'],",
" cmd = '')",
"exports_files([\"a.cc\"],",
" visibility = [ \"//visibility:public\" ])");
}
@Test
public void testValidEnvironmentGroup() throws Exception {
expectEvalSuccess(
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testIncompleteEnvironmentGroup() throws Exception {
expectEvalError(
"parameter 'defaults' has no default value, for call to function environment_group",
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo'])");
}
@Test
public void testEnvironmentGroupMissingTarget() throws Exception {
expectEvalError(
"environment //pkg:foo does not exist",
"environment_group(name='group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testEnvironmentGroupWrongTargetType() throws Exception {
expectEvalError(
"//pkg:foo is not a valid environment",
"cc_library(name = 'foo')",
"environment_group(name='group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testEnvironmentGroupWrongPackage() throws Exception {
expectEvalError(
"//foo:foo is not in the same package as group //pkg:group",
"environment_group(name='group', environments = ['//foo'], defaults = ['//foo'])");
}
@Test
public void testEnvironmentGroupInvalidDefault() throws Exception {
expectEvalError(
"default //pkg:bar is not a declared environment for group //pkg:group",
"environment(name = 'foo')",
"environment(name = 'bar')",
"environment_group(name='group', environments = [':foo'], defaults = [':bar'])");
}
@Test
public void testEnvironmentGroupDuplicateEnvironments() throws Exception {
expectEvalError(
"label '//pkg:foo' is duplicated in the 'environments' list of 'group'",
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo', ':foo'], defaults = [':foo'])");
}
@Test
public void testEnvironmentGroupDuplicateDefaults() throws Exception {
expectEvalError(
"label '//pkg:foo' is duplicated in the 'defaults' list of 'group'",
"environment(name = 'foo')",
"environment_group(name='group', environments = [':foo'], defaults = [':foo', ':foo'])");
}
@Test
public void testMultipleEnvironmentGroupsValidMembership() throws Exception {
expectEvalSuccess(
"environment(name = 'foo')",
"environment(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [':foo'])",
"environment_group(name='bar_group', environments = [':bar'], defaults = [':bar'])");
}
@Test
public void testMultipleEnvironmentGroupsConflictingMembership() throws Exception {
expectEvalError(
"environment //pkg:foo belongs to both //pkg:bar_group and //pkg:foo_group",
"environment(name = 'foo')",
"environment(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [':foo'])",
"environment_group(name='bar_group', environments = [':foo'], defaults = [':foo'])");
}
@Test
public void testFulfillsReferencesWrongTargetType() throws Exception {
expectEvalError(
"in \"fulfills\" attribute of //pkg:foo: //pkg:bar is not a valid environment",
"environment(name = 'foo', fulfills = [':bar'])",
"cc_library(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [])");
}
@Test
public void testFulfillsNotInEnvironmentGroup() throws Exception {
expectEvalError(
"in \"fulfills\" attribute of //pkg:foo: //pkg:bar is not a member of this group",
"environment(name = 'foo', fulfills = [':bar'])",
"environment(name = 'bar')",
"environment_group(name='foo_group', environments = [':foo'], defaults = [])");
}
@Test
public void testPackageDefaultEnvironments() throws Exception {
Package pkg =
expectEvalSuccess(
"package(",
" default_compatible_with=['//foo'],",
" default_restricted_to=['//bar'],",
")");
assertThat(pkg.getDefaultCompatibleWith())
.containsExactly(Label.parseAbsolute("//foo", ImmutableMap.of()));
assertThat(pkg.getDefaultRestrictedTo())
.containsExactly(Label.parseAbsolute("//bar", ImmutableMap.of()));
}
@Test
public void testPackageDefaultCompatibilityDuplicates() throws Exception {
expectEvalError(
"'//foo:foo' is duplicated in the 'default_compatible_with' list",
"package(default_compatible_with=['//foo', '//bar', '//foo'])");
}
@Test
public void testPackageDefaultRestrictionDuplicates() throws Exception {
expectEvalError(
"'//foo:foo' is duplicated in the 'default_restricted_to' list",
"package(default_restricted_to=['//foo', '//bar', '//foo'])");
}
@Override
protected PackageFactoryApparatus createPackageFactoryApparatus() {
return new PackageFactoryApparatus(events.reporter());
}
@Override
protected String getPathPrefix() {
return "";
}
@Test
public void testGlobPatternExtractor() {
GlobPatternExtractor globPatternExtractor = new GlobPatternExtractor();
globPatternExtractor.visit(
StarlarkFile.parse(
ParserInput.fromLines(
"pattern = '*'",
"some_variable = glob([",
" '**/*',",
" 'a' + 'b',",
" pattern,",
"])",
"other_variable = glob(include = ['a'], exclude = ['b'])",
"third_variable = glob(['c'], exclude_directories = 0)")));
assertThat(globPatternExtractor.getExcludeDirectoriesPatterns())
.containsExactly("ab", "a", "**/*");
assertThat(globPatternExtractor.getIncludeDirectoriesPatterns()).containsExactly("c");
}
}
| |
/*
* Copyright 2014-2019 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.driver;
import io.aeron.driver.media.ReceiveChannelEndpoint;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import io.aeron.logbuffer.LogBufferDescriptor;
import io.aeron.protocol.DataHeaderFlyweight;
import io.aeron.protocol.SetupFlyweight;
import org.agrona.concurrent.UnsafeBuffer;
import java.net.InetSocketAddress;
import static org.mockito.Mockito.*;
public class DataPacketDispatcherTest
{
private static final long CORRELATION_ID_1 = 101;
private static final long CORRELATION_ID_2 = 102;
private static final int STREAM_ID = 10;
private static final int INITIAL_TERM_ID = 3;
private static final int ACTIVE_TERM_ID = 3;
private static final int SESSION_ID = 1;
private static final int TERM_OFFSET = 0;
private static final int LENGTH = DataHeaderFlyweight.HEADER_LENGTH + 100;
private static final int MTU_LENGTH = 1024;
private static final int TERM_LENGTH = LogBufferDescriptor.TERM_MIN_LENGTH;
private static final InetSocketAddress SRC_ADDRESS = new InetSocketAddress("localhost", 4510);
private final DriverConductorProxy mockConductorProxy = mock(DriverConductorProxy.class);
private final Receiver mockReceiver = mock(Receiver.class);
private final DataPacketDispatcher dispatcher = new DataPacketDispatcher(mockConductorProxy, mockReceiver);
private final DataHeaderFlyweight mockHeader = mock(DataHeaderFlyweight.class);
private final SetupFlyweight mockSetupHeader = mock(SetupFlyweight.class);
private final UnsafeBuffer mockBuffer = mock(UnsafeBuffer.class);
private final PublicationImage mockImage = mock(PublicationImage.class);
private final ReceiveChannelEndpoint mockChannelEndpoint = mock(ReceiveChannelEndpoint.class);
@Before
public void setUp()
{
when(mockHeader.sessionId()).thenReturn(SESSION_ID);
when(mockHeader.streamId()).thenReturn(STREAM_ID);
when(mockHeader.termId()).thenReturn(ACTIVE_TERM_ID);
when(mockHeader.termOffset()).thenReturn(TERM_OFFSET);
when(mockImage.sessionId()).thenReturn(SESSION_ID);
when(mockImage.streamId()).thenReturn(STREAM_ID);
when(mockSetupHeader.sessionId()).thenReturn(SESSION_ID);
when(mockSetupHeader.streamId()).thenReturn(STREAM_ID);
when(mockSetupHeader.activeTermId()).thenReturn(ACTIVE_TERM_ID);
when(mockSetupHeader.initialTermId()).thenReturn(INITIAL_TERM_ID);
when(mockSetupHeader.termOffset()).thenReturn(TERM_OFFSET);
when(mockSetupHeader.mtuLength()).thenReturn(MTU_LENGTH);
when(mockSetupHeader.termLength()).thenReturn(TERM_LENGTH);
}
@Test
public void shouldElicitSetupMessageWhenDataArrivesForSubscriptionWithoutImage()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
verify(mockImage, never()).insertPacket(anyInt(), anyInt(), any(), anyInt(), anyInt(), any());
verify(mockChannelEndpoint).sendSetupElicitingStatusMessage(0, SRC_ADDRESS, SESSION_ID, STREAM_ID);
verify(mockReceiver).addPendingSetupMessage(SESSION_ID, STREAM_ID, 0, mockChannelEndpoint, false, SRC_ADDRESS);
}
@Test
public void shouldOnlyElicitSetupMessageOnceWhenDataArrivesForSubscriptionWithoutImage()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
verify(mockImage, never()).insertPacket(anyInt(), anyInt(), any(), anyInt(), anyInt(), any());
verify(mockChannelEndpoint).sendSetupElicitingStatusMessage(0, SRC_ADDRESS, SESSION_ID, STREAM_ID);
verify(mockReceiver).addPendingSetupMessage(SESSION_ID, STREAM_ID, 0, mockChannelEndpoint, false, SRC_ADDRESS);
}
@Test
public void shouldElicitSetupMessageAgainWhenDataArrivesForSubscriptionWithoutImageAfterRemovePendingSetup()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.removePendingSetup(SESSION_ID, STREAM_ID);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
verify(mockImage, never()).insertPacket(anyInt(), anyInt(), any(), anyInt(), anyInt(), any());
verify(mockChannelEndpoint, times(2)).sendSetupElicitingStatusMessage(0, SRC_ADDRESS, SESSION_ID, STREAM_ID);
verify(mockReceiver, times(2))
.addPendingSetupMessage(SESSION_ID, STREAM_ID, 0, mockChannelEndpoint, false, SRC_ADDRESS);
}
@Test
public void shouldRequestCreateImageUponReceivingSetup()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
verify(mockConductorProxy).createPublicationImage(
SESSION_ID, STREAM_ID, INITIAL_TERM_ID, ACTIVE_TERM_ID, TERM_OFFSET, TERM_LENGTH,
MTU_LENGTH, 0, SRC_ADDRESS, SRC_ADDRESS, mockChannelEndpoint);
}
@Test
public void shouldOnlyRequestCreateImageOnceUponReceivingSetup()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
verify(mockConductorProxy).createPublicationImage(
SESSION_ID, STREAM_ID, INITIAL_TERM_ID, ACTIVE_TERM_ID, TERM_OFFSET, TERM_LENGTH,
MTU_LENGTH, 0, SRC_ADDRESS, SRC_ADDRESS, mockChannelEndpoint);
}
@Test
public void shouldNotRequestCreateImageOnceUponReceivingSetupAfterImageAdded()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
verifyZeroInteractions(mockConductorProxy);
}
@Test
public void shouldSetImageInactiveOnRemoveSubscription()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage);
dispatcher.removeSubscription(STREAM_ID);
verify(mockImage).ifActiveGoInactive();
}
@Test
public void shouldSetImageInactiveOnRemoveImage()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage);
dispatcher.removePublicationImage(mockImage);
verify(mockImage).ifActiveGoInactive();
}
@Test
public void shouldIgnoreDataAndSetupAfterImageRemoved()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage);
dispatcher.removePublicationImage(mockImage);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
verifyZeroInteractions(mockConductorProxy);
verifyZeroInteractions(mockReceiver);
}
@Test
public void shouldNotIgnoreDataAndSetupAfterImageRemovedAndCoolDownRemoved()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage);
dispatcher.removePublicationImage(mockImage);
dispatcher.removeCoolDown(SESSION_ID, STREAM_ID);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
dispatcher.onSetupMessage(mockChannelEndpoint, mockSetupHeader, SRC_ADDRESS, 0);
verify(mockImage, never()).insertPacket(anyInt(), anyInt(), any(), anyInt(), anyInt(), any());
final InOrder inOrder = inOrder(mockChannelEndpoint, mockReceiver, mockConductorProxy);
inOrder.verify(mockChannelEndpoint).sendSetupElicitingStatusMessage(0, SRC_ADDRESS, SESSION_ID, STREAM_ID);
inOrder.verify(mockReceiver)
.addPendingSetupMessage(SESSION_ID, STREAM_ID, 0, mockChannelEndpoint, false, SRC_ADDRESS);
inOrder.verify(mockConductorProxy).createPublicationImage(
SESSION_ID, STREAM_ID, INITIAL_TERM_ID, ACTIVE_TERM_ID, TERM_OFFSET, TERM_LENGTH,
MTU_LENGTH, 0, SRC_ADDRESS, SRC_ADDRESS, mockChannelEndpoint);
}
@Test
public void shouldDispatchDataToCorrectImage()
{
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
verify(mockImage).activate();
verify(mockImage).insertPacket(ACTIVE_TERM_ID, TERM_OFFSET, mockBuffer, LENGTH, 0, SRC_ADDRESS);
}
@Test
public void shouldNotRemoveNewPublicationImageFromOldRemovePublicationImageAfterRemoveSubscription()
{
final PublicationImage mockImage1 = mock(PublicationImage.class);
final PublicationImage mockImage2 = mock(PublicationImage.class);
when(mockImage1.sessionId()).thenReturn(SESSION_ID);
when(mockImage1.streamId()).thenReturn(STREAM_ID);
when(mockImage1.correlationId()).thenReturn(CORRELATION_ID_1);
when(mockImage2.sessionId()).thenReturn(SESSION_ID);
when(mockImage2.streamId()).thenReturn(STREAM_ID);
when(mockImage2.correlationId()).thenReturn(CORRELATION_ID_2);
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage1);
dispatcher.removeSubscription(STREAM_ID);
dispatcher.addSubscription(STREAM_ID);
dispatcher.addPublicationImage(mockImage2);
dispatcher.removePublicationImage(mockImage1);
dispatcher.onDataPacket(mockChannelEndpoint, mockHeader, mockBuffer, LENGTH, SRC_ADDRESS, 0);
verify(mockImage1, never()).insertPacket(anyInt(), anyInt(), any(), anyInt(), anyInt(), any());
verify(mockImage2).insertPacket(ACTIVE_TERM_ID, TERM_OFFSET, mockBuffer, LENGTH, 0, SRC_ADDRESS);
}
@Test
public void shouldRemoveSessionSpecificSubscriptionWithoutAny()
{
dispatcher.addSubscription(STREAM_ID, SESSION_ID);
dispatcher.removeSubscription(STREAM_ID, SESSION_ID);
}
}
| |
/**
* Copyright (C) 2011 Smithsonian Astrophysical Observatory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/***********************************************************************
*
* File: TestSpectrumIO.java
*
* Author: mdittmar Created: 2010-12-23
*
* Virtual Astrophysical Observatory; contributed by Center for Astrophysics
*
* Update History:
* 2010-12-23: MCD Create
*
***********************************************************************/
package cfa.vo.sedlib;
import cfa.vo.sedlib.common.SedInconsistentException;
import junit.framework.Test;
import junit.framework.TestSuite;
import cfa.vo.sedlib.io.SedFormat;
/**
Tests Sedlib ability to create, read and write the Spectrum objects
*/
public class TestSpectrumIO extends SedTestBase
{
Sed m_sed = null;
String m_fname = null;
boolean keep = true;
int rc = 1;
public TestSpectrumIO( String name )
{
super(name);
}
public static Test suite()
{
TestSuite suite = new TestSuite( TestSpectrumIO.class );
return suite;
}
/**
* Preparatory steps to be executed before each test.
*/
protected void setUp()
{
}
/**
* Cleanup steps to be executed after each test.
*/
protected void tearDown()
{
m_sed = null;
m_fname = null;
rc = 1;
}
/**
* Tests ability to create and serialize Spectrum objects
* <p>
* Create a new Spectrum object which is fully defined
* and populated with values. Write this object out in
* each of the supported formats.
*/
public void testSpectrum_new()
{
String testName = "testSpectrum_new";
System.out.println(" run "+testName+"()");
/* Create SED with one (empty) segment */
m_sed = EmptyObjects.createSED( 0 );
/* Set Spectrum namespace
m_sed.setNamespace( "spec:" );
*/
/* Get the Segment */
Spectrum segment = CompleteObjects.createSpectrum();
assertNotNull( "Failed to create SEGMENT: " , segment );
/* Assign to SED */
try
{
m_sed.addSegment(segment);
}
catch (Exception exp)
{
throw new RuntimeException (exp.getMessage (), exp);
}
/* Write SED with Spectrum to file in each of the supported formats */
for (SedFormat format : SedFormat.values())
{
if (format == SedFormat.XML)
continue;
m_fname = "Spectrum." + format.exten();
rc = writeSED( format , tu.mkOutFileName( m_fname ), m_sed );
assertEquals( testName + ": Failed to write " + m_fname, 0, rc );
/* Compare output file with baseline */
if ( format == SedFormat.FITS )
rc = tu.diffFits( m_fname );
else
rc = tu.DIFFIT( m_fname );
assertEquals( testName + ": Diff failed - " + m_fname, 0, rc );
if ( (rc == 0 ) && (!keep) )
tu.cleanupFiles( m_fname );
}
}
/**
* Tests ability to load Spectrum objects from VOT format
* <p>
* Load file containing a fully defined Spectrum object
* and write the object back out in the same format.
* If all information is extracted properly, the output file
* should identically match the input.
*/
public void testSpectrum_copyVOT()
{
String testName = "testSpectrum_copyVOT";
System.out.println(" run "+testName+"()");
SedFormat format = SedFormat.VOT;
String outFileName = "Spectrum2."+format.exten ();
m_fname = "Spectrum." + format.exten();
/* Read input file */
m_sed = readSED( format, tu.mkInFileName( m_fname ) );
assertNotNull( testName + ": Document load failed - " + m_fname, m_sed );
/* Write back out.. same format */
rc = writeSED( format , tu.mkOutFileName( outFileName ), m_sed );
assertEquals( testName + ": Failed to write " + outFileName, 0, rc );
rc = tu.DIFFIT( outFileName );
assertEquals( testName + ": Diff failed - " + outFileName, 0, rc );
if ( (rc == 0 ) && (!keep) )
tu.cleanupFiles( outFileName );
}
/**
* Tests ability to load Spectrum objects from FITS format
* <p>
* Load file containing a fully defined Spectrum object
* and write the object back out in the same format.
* If all information is extracted properly, the output file
* should identically match the input.
*/
public void testSpectrum_copyFITS()
{
String testName = "testSpectrum_copyFITS";
System.out.println(" run "+testName+"()");
SedFormat format = SedFormat.FITS;
m_fname = "Spectrum." + format.exten();
/* Read input file */
m_sed = readSED( format, tu.mkInFileName( m_fname ) );
assertNotNull( testName + ": Document load failed - " + m_fname, m_sed );
/* Write back out.. same format */
m_fname = "Spectrum_copy." + format.exten();
rc = writeSED( format , tu.mkOutFileName( m_fname ), m_sed );
assertEquals( testName + ": Failed to write " + m_fname, 0, rc );
rc = tu.diffFits( m_fname );
assertEquals( testName + ": Diff failed - " + m_fname, 0, rc );
if ( (rc == 0 ) && (!keep) )
tu.cleanupFiles( m_fname );
}
/**
* Tests ability to load Spectrum objects from XML format
* <p>
* Load file containing a fully defined Spectrum object
* and write the object back out in the same format.
* If all information is extracted properly, the output file
* should identically match the input.
*/
public void estSpectrum_copyXML()
{
String testName = "testSpectrum_copyXML";
System.out.println(" run "+testName+"()");
SedFormat format = SedFormat.XML;
m_fname = "Spectrum." + format.exten();
/* Read input file */
m_sed = readSED( format, tu.mkInFileName( m_fname ) );
assertNotNull( testName + ": Document load failed - " + m_fname, m_sed );
/* Write back out.. same format */
rc = writeSED( format , tu.mkOutFileName( m_fname ), m_sed );
assertEquals( testName + ": Failed to write " + m_fname, 0, rc );
rc = tu.DIFFIT( m_fname );
assertEquals( testName + ": Diff failed - " + m_fname, 0, rc );
if ( (rc == 0 ) && (!keep) )
tu.cleanupFiles( m_fname );
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authc.saml;
import joptsimple.OptionSet;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.UserException;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.KeyStoreWrapper;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.xpack.core.security.authc.RealmSettings;
import org.elasticsearch.xpack.core.ssl.CertParsingUtils;
import org.elasticsearch.xpack.core.ssl.PemUtils;
import org.hamcrest.CoreMatchers;
import org.junit.Before;
import org.opensaml.saml.common.xml.SAMLConstants;
import org.opensaml.saml.saml2.metadata.EntityDescriptor;
import org.opensaml.saml.saml2.metadata.RequestedAttribute;
import org.opensaml.saml.saml2.metadata.SPSSODescriptor;
import org.opensaml.saml.security.impl.SAMLSignatureProfileValidator;
import org.opensaml.security.credential.Credential;
import org.opensaml.security.credential.UsageType;
import org.opensaml.security.x509.BasicX509Credential;
import org.opensaml.xmlsec.keyinfo.KeyInfoSupport;
import org.opensaml.xmlsec.signature.Signature;
import org.opensaml.xmlsec.signature.X509Certificate;
import org.opensaml.xmlsec.signature.X509Data;
import org.opensaml.xmlsec.signature.support.SignatureValidator;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.iterableWithSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SamlMetadataCommandTests extends SamlTestCase {
private KeyStoreWrapper keyStore;
private KeyStoreWrapper passwordProtectedKeystore;
@Before
public void setup() throws Exception {
SamlUtils.initialize(logger);
this.keyStore = mock(KeyStoreWrapper.class);
when(keyStore.isLoaded()).thenReturn(true);
this.passwordProtectedKeystore = mock(KeyStoreWrapper.class);
when(passwordProtectedKeystore.isLoaded()).thenReturn(true);
when(passwordProtectedKeystore.hasPassword()).thenReturn(true);
doNothing().when(passwordProtectedKeystore).decrypt("keystore-password".toCharArray());
doThrow(new SecurityException("Provided keystore password was incorrect", new IOException()))
.when(passwordProtectedKeystore).decrypt("wrong-password".toCharArray());
}
public void testDefaultOptions() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[0]);
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout")
.put(RealmSettings.PREFIX + "saml.my_saml.attributes.principal", "urn:oid:0.9.2342.19200300.100.1.1");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = new MockTerminal();
if (usedKeyStore.hasPassword()) {
terminal.addSecretInput("keystore-password");
}
// What is the friendly name for "principal" attribute "urn:oid:0.9.2342.19200300.100.1.1" [default: principal]
terminal.addTextInput("");
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
assertThat(descriptor, notNullValue());
assertThat(descriptor.getEntityID(), equalTo("https://kibana.my.corp/"));
assertThat(descriptor.getRoleDescriptors(), iterableWithSize(1));
assertThat(descriptor.getRoleDescriptors().get(0), instanceOf(SPSSODescriptor.class));
final SPSSODescriptor spDescriptor = (SPSSODescriptor) descriptor.getRoleDescriptors().get(0);
assertThat(spDescriptor.getAssertionConsumerServices(), iterableWithSize(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getLocation(), equalTo("https://kibana.my.corp/saml/login"));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).isDefault(), equalTo(true));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getIndex(), equalTo(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(spDescriptor.getAttributeConsumingServices(), iterableWithSize(1));
assertThat(spDescriptor.getAttributeConsumingServices().get(0).isDefault(), equalTo(true));
assertThat(spDescriptor.getAttributeConsumingServices().get(0).getIndex(), equalTo(1));
assertThat(spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes(), iterableWithSize(1));
final RequestedAttribute uidAttribute = spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes().get(0);
assertThat(uidAttribute.getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.1"));
assertThat(uidAttribute.getFriendlyName(), equalTo("principal"));
assertThat(spDescriptor.getSingleLogoutServices(), iterableWithSize(1));
assertThat(spDescriptor.getSingleLogoutServices().get(0).getLocation(), equalTo("https://kibana.my.corp/saml/logout"));
assertThat(spDescriptor.getSingleLogoutServices().get(0).getBinding(), equalTo(SAMLConstants.SAML2_REDIRECT_BINDING_URI));
assertThat(spDescriptor.isAuthnRequestsSigned(), equalTo(useSigningCredentials));
assertThat(spDescriptor.getWantAssertionsSigned(), equalTo(true));
if (useSigningCredentials) {
assertThat(spDescriptor.getKeyDescriptors(), iterableWithSize(1));
assertThat(spDescriptor.getKeyDescriptors().get(0).getUse(), equalTo(UsageType.SIGNING));
assertThat(spDescriptor.getKeyDescriptors().get(0).getKeyInfo().getPGPDatas(), iterableWithSize(0));
assertThat(spDescriptor.getKeyDescriptors().get(0).getKeyInfo().getMgmtDatas(), iterableWithSize(0));
assertThat(spDescriptor.getKeyDescriptors().get(0).getKeyInfo().getSPKIDatas(), iterableWithSize(0));
final List<X509Data> x509 = spDescriptor.getKeyDescriptors().get(0).getKeyInfo().getX509Datas();
assertThat(x509, iterableWithSize(1));
assertThat(x509.get(0).getX509Certificates(), iterableWithSize(1));
final X509Certificate xmlCert = x509.get(0).getX509Certificates().get(0);
assertThat(xmlCert.getValue(), startsWith("MIIDWDCCAkCgAwIBAgIVANRTZaFrK+Pz19O8TZsb3HSJmAWpMA0GCSqGSIb3DQEB"));
// Verify that OpenSAML things the XML representation is the same as our input
final java.security.cert.X509Certificate javaCert = KeyInfoSupport.getCertificate(xmlCert);
assertThat(CertParsingUtils.readCertificates(Collections.singletonList(certPath)), arrayContaining(javaCert));
} else {
assertThat(spDescriptor.getKeyDescriptors(), iterableWithSize(0));
}
}
public void testFailIfMultipleRealmsExist() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Settings settings = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.saml_a.type", "saml")
.put(RealmSettings.PREFIX + "saml.saml_a.sp.entity_id", "https://saml.a/")
.put(RealmSettings.PREFIX + "saml.saml_a.sp.acs", "https://saml.a/")
.put(RealmSettings.PREFIX + "saml.saml_b.type", "saml")
.put(RealmSettings.PREFIX + "saml.saml_b.sp.entity_id", "https://saml.b/")
.put(RealmSettings.PREFIX + "saml.saml_b.sp.acs", "https://saml.b/")
.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[0]);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final UserException userException = expectThrows(UserException.class, () -> command.buildEntityDescriptor(terminal, options, env));
assertThat(userException.getMessage(), containsString("multiple SAML realms"));
assertThat(terminal.getErrorOutput(), containsString("saml_a"));
assertThat(terminal.getErrorOutput(), containsString("saml_b"));
assertThat(terminal.getErrorOutput(), containsString("Use the -realm option"));
}
public void testSpecifyRealmNameAsParameter() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Settings settings = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.saml_a.order", 1)
.put(RealmSettings.PREFIX + "saml.saml_a.type", "saml")
.put(RealmSettings.PREFIX + "saml.saml_a.sp.entity_id", "https://saml.a/")
.put(RealmSettings.PREFIX + "saml.saml_a.sp.acs", "https://saml.a/acs")
.put(RealmSettings.PREFIX + "saml.saml_b.order", 2)
.put(RealmSettings.PREFIX + "saml.saml_b.type", "saml")
.put(RealmSettings.PREFIX + "saml.saml_b.sp.entity_id", "https://saml.b/")
.put(RealmSettings.PREFIX + "saml.saml_b.sp.acs", "https://saml.b/acs")
.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[] {
"-realm", "saml_b"
});
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
assertThat(descriptor, notNullValue());
assertThat(descriptor.getEntityID(), equalTo("https://saml.b/"));
assertThat(descriptor.getRoleDescriptors(), iterableWithSize(1));
assertThat(descriptor.getRoleDescriptors().get(0), instanceOf(SPSSODescriptor.class));
final SPSSODescriptor spDescriptor = (SPSSODescriptor) descriptor.getRoleDescriptors().get(0);
assertThat(spDescriptor.getAssertionConsumerServices(), iterableWithSize(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getLocation(), equalTo("https://saml.b/acs"));
}
public void testHandleAttributes() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Settings settings = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.saml1.order", 1)
.put(RealmSettings.PREFIX + "saml.saml1.type", "saml")
.put(RealmSettings.PREFIX + "saml.saml1.sp.entity_id", "https://saml.example.com/")
.put(RealmSettings.PREFIX + "saml.saml1.sp.acs", "https://saml.example.com/")
.put(RealmSettings.PREFIX + "saml.saml1.attributes.principal", "urn:oid:0.9.2342.19200300.100.1.1")
.put(RealmSettings.PREFIX + "saml.saml1.attributes.name", "displayName")
.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[] {
"-attribute", "urn:oid:0.9.2342.19200300.100.1.3",
"-attribute", "groups"
});
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
// What is the friendly name for command line attribute "urn:oid:0.9.2342.19200300.100.1.3" [default: none]
terminal.addTextInput("mail");
// What is the standard (urn) name for attribute "groups" (required)
terminal.addTextInput("urn:oid:1.3.6.1.4.1.5923.1.5.1.1");
// What is the standard (urn) name for "name" attribute "displayName" (required)
terminal.addTextInput("urn:oid:2.16.840.1.113730.3.1.241");
// What is the friendly name for "principal" "urn:oid:0.9.2342.19200300.100.1.1" [default: principal]
terminal.addTextInput("uid");
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
assertThat(descriptor, notNullValue());
assertThat(descriptor.getEntityID(), equalTo("https://saml.example.com/"));
assertThat(descriptor.getRoleDescriptors(), iterableWithSize(1));
assertThat(descriptor.getRoleDescriptors().get(0), instanceOf(SPSSODescriptor.class));
final SPSSODescriptor spDescriptor = (SPSSODescriptor) descriptor.getRoleDescriptors().get(0);
assertThat(spDescriptor.getAttributeConsumingServices(), iterableWithSize(1));
final List<RequestedAttribute> attributes = spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes();
assertThat(attributes, iterableWithSize(4));
assertThat(attributes.get(0).getFriendlyName(), equalTo("mail"));
assertThat(attributes.get(0).getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.3"));
assertThat(attributes.get(1).getFriendlyName(), equalTo("groups"));
assertThat(attributes.get(1).getName(), equalTo("urn:oid:1.3.6.1.4.1.5923.1.5.1.1"));
assertThat(attributes.get(2).getFriendlyName(), equalTo("displayName"));
assertThat(attributes.get(2).getName(), equalTo("urn:oid:2.16.840.1.113730.3.1.241"));
assertThat(attributes.get(3).getFriendlyName(), equalTo("uid"));
assertThat(attributes.get(3).getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.1"));
}
public void testHandleAttributesInBatchMode() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Settings settings = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.saml1.order", 1)
.put(RealmSettings.PREFIX + "saml.saml1.type", "saml")
.put(RealmSettings.PREFIX + "saml.saml1.sp.entity_id", "https://saml.example.com/")
.put(RealmSettings.PREFIX + "saml.saml1.sp.acs", "https://saml.example.com/")
.put(RealmSettings.PREFIX + "saml.saml1.attributes.principal", "urn:oid:0.9.2342.19200300.100.1.1")
.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[] {
"-attribute", "urn:oid:0.9.2342.19200300.100.1.3",
"-batch"
});
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
assertThat(descriptor, notNullValue());
assertThat(descriptor.getEntityID(), equalTo("https://saml.example.com/"));
assertThat(descriptor.getRoleDescriptors(), iterableWithSize(1));
assertThat(descriptor.getRoleDescriptors().get(0), instanceOf(SPSSODescriptor.class));
final SPSSODescriptor spDescriptor = (SPSSODescriptor) descriptor.getRoleDescriptors().get(0);
assertThat(spDescriptor.getAttributeConsumingServices(), iterableWithSize(1));
final List<RequestedAttribute> attributes = spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes();
assertThat(attributes, iterableWithSize(2));
assertThat(attributes.get(0).getFriendlyName(), nullValue());
assertThat(attributes.get(0).getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.3"));
assertThat(attributes.get(1).getFriendlyName(), equalTo("principal"));
assertThat(attributes.get(1).getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.1"));
}
public void testSigningMetadataWithPfx() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm());
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final Path p12Path = getDataPath("saml.p12");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-bundle", p12Path.toString()
});
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout")
.put(RealmSettings.PREFIX + "saml.my_saml.attributes.principal", "urn:oid:0.9.2342.19200300.100.1.1");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
// What is the friendly name for "principal" attribute "urn:oid:0.9.2342.19200300.100.1.1" [default: principal]
terminal.addTextInput("");
terminal.addSecretInput("");
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
command.possiblySignDescriptor(terminal, options, descriptor, env);
assertThat(descriptor, notNullValue());
// Verify generated signature
assertThat(descriptor.getSignature(), notNullValue());
assertThat(validateSignature(descriptor.getSignature()), equalTo(true));
// Make sure that Signing didn't mangle the XML at all and we can still read metadata
assertThat(descriptor.getEntityID(), equalTo("https://kibana.my.corp/"));
assertThat(descriptor.getRoleDescriptors(), iterableWithSize(1));
assertThat(descriptor.getRoleDescriptors().get(0), instanceOf(SPSSODescriptor.class));
final SPSSODescriptor spDescriptor = (SPSSODescriptor) descriptor.getRoleDescriptors().get(0);
assertThat(spDescriptor.getAssertionConsumerServices(), iterableWithSize(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getLocation(), equalTo("https://kibana.my.corp/saml/login"));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).isDefault(), equalTo(true));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getIndex(), equalTo(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
final RequestedAttribute uidAttribute = spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes().get(0);
assertThat(uidAttribute.getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.1"));
assertThat(uidAttribute.getFriendlyName(), equalTo("principal"));
assertThat(spDescriptor.isAuthnRequestsSigned(), equalTo(useSigningCredentials));
assertThat(spDescriptor.getWantAssertionsSigned(), equalTo(true));
}
public void testSigningMetadataWithPasswordProtectedPfx() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm());
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final Path p12Path = getDataPath("saml_with_password.p12");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-bundle", p12Path.toString(),
"-signing-key-password", "saml"
});
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
command.possiblySignDescriptor(terminal, options, descriptor, env);
assertThat(descriptor, notNullValue());
// Verify generated signature
assertThat(descriptor.getSignature(), notNullValue());
assertThat(validateSignature(descriptor.getSignature()), equalTo(true));
}
public void testErrorSigningMetadataWithWrongPassword() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final Path signingKeyPath = getDataPath("saml_with_password.key");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> keyStore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-cert", certPath.toString(),
"-signing-key", signingKeyPath.toString(),
"-signing-key-password", "wrongpassword"
});
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
final UserException userException = expectThrows(UserException.class, () -> command.possiblySignDescriptor(terminal, options,
descriptor, env));
assertThat(userException.getMessage(), containsString("Unable to create metadata document"));
assertThat(terminal.getErrorOutput(), containsString("Error parsing Private Key from"));
}
public void testSigningMetadataWithPem() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
//Use this keypair for signing the metadata also
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> keyStore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-cert", certPath.toString(),
"-signing-key", keyPath.toString()
});
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
command.possiblySignDescriptor(terminal, options, descriptor, env);
assertThat(descriptor, notNullValue());
// Verify generated signature
assertThat(descriptor.getSignature(), notNullValue());
assertThat(validateSignature(descriptor.getSignature()), equalTo(true));
}
public void testSigningMetadataWithPasswordProtectedPem() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
//Use same keypair for signing the metadata
final Path signingKeyPath = getDataPath("saml_with_password.key");
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-cert", certPath.toString(),
"-signing-key", signingKeyPath.toString(),
"-signing-key-password", "saml"
});
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
command.possiblySignDescriptor(terminal, options, descriptor, env);
assertThat(descriptor, notNullValue());
// Verify generated signature
assertThat(descriptor.getSignature(), notNullValue());
assertThat(validateSignature(descriptor.getSignature()), equalTo(true));
}
public void testSigningMetadataWithPasswordProtectedPemInTerminal() throws Exception {
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
//Use same keypair for signing the metadata
final Path signingKeyPath = getDataPath("saml_with_password.key");
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-cert", certPath.toString(),
"-signing-key", signingKeyPath.toString()
});
final boolean useSigningCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder()
.put("path.home", createTempDir())
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout");
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.certificate", certPath.toString())
.put(RealmSettings.PREFIX + "saml.my_saml.signing.key", keyPath.toString());
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
terminal.addSecretInput("saml");
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
command.possiblySignDescriptor(terminal, options, descriptor, env);
assertThat(descriptor, notNullValue());
// Verify generated signature
assertThat(descriptor.getSignature(), notNullValue());
assertThat(validateSignature(descriptor.getSignature()), equalTo(true));
}
public void testDefaultOptionsWithSigningAndMultipleEncryptionKeys() throws Exception {
assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm());
final KeyStoreWrapper usedKeyStore = randomFrom(keyStore, passwordProtectedKeystore);
final Path dir = createTempDir();
final Path ksEncryptionFile = dir.resolve("saml-encryption.p12");
final Tuple<java.security.cert.X509Certificate, PrivateKey> certEncKeyPair1 = readKeyPair("RSA_2048");
final Tuple<java.security.cert.X509Certificate, PrivateKey> certEncKeyPair2 = readKeyPair("RSA_4096");
final KeyStore ksEncrypt = KeyStore.getInstance("PKCS12");
ksEncrypt.load(null);
ksEncrypt.setKeyEntry(getAliasName(certEncKeyPair1), certEncKeyPair1.v2(), "key-password".toCharArray(),
new Certificate[] { certEncKeyPair1.v1() });
ksEncrypt.setKeyEntry(getAliasName(certEncKeyPair2), certEncKeyPair2.v2(), "key-password".toCharArray(),
new Certificate[] { certEncKeyPair2.v1() });
try (OutputStream out = Files.newOutputStream(ksEncryptionFile)) {
ksEncrypt.store(out, "ks-password".toCharArray());
}
final Path ksSigningFile = dir.resolve("saml-signing.p12");
final Tuple<java.security.cert.X509Certificate, PrivateKey> certKeyPairSign = readRandomKeyPair("RSA");
final KeyStore ksSign = KeyStore.getInstance("PKCS12");
ksSign.load(null);
ksSign.setKeyEntry(getAliasName(certKeyPairSign), certKeyPairSign.v2(), "key-password".toCharArray(),
new Certificate[] { certKeyPairSign.v1() });
try (OutputStream out = Files.newOutputStream(ksSigningFile)) {
ksSign.store(out, "ks-password".toCharArray());
}
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(RealmSettings.PREFIX + "saml.my_saml.signing.keystore.secure_password", "ks-password");
secureSettings.setString(RealmSettings.PREFIX + "saml.my_saml.signing.keystore.secure_key_password", "key-password");
secureSettings.setString(RealmSettings.PREFIX + "saml.my_saml.encryption.keystore.secure_password", "ks-password");
secureSettings.setString(RealmSettings.PREFIX + "saml.my_saml.encryption.keystore.secure_key_password", "key-password");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> usedKeyStore);
final OptionSet options = command.getParser().parse(new String[0]);
final boolean useSigningCredentials = randomBoolean();
final boolean useEncryptionCredentials = randomBoolean();
final Settings.Builder settingsBuilder = Settings.builder().put("path.home", dir)
.put(RealmSettings.PREFIX + "saml.my_saml.type", "saml")
.put(RealmSettings.PREFIX + "saml.my_saml.order", 1)
.put(RealmSettings.PREFIX + "saml.my_saml.idp.entity_id", "https://okta.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.entity_id", "https://kibana.my.corp/")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.acs", "https://kibana.my.corp/saml/login")
.put(RealmSettings.PREFIX + "saml.my_saml.sp.logout", "https://kibana.my.corp/saml/logout")
.put(RealmSettings.PREFIX + "saml.my_saml.attributes.principal", "urn:oid:0.9.2342.19200300.100.1.1");
settingsBuilder.setSecureSettings(secureSettings);
if (useSigningCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.keystore.path", ksSigningFile.toString());
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.signing.keystore.type", "PKCS12");
}
if (useEncryptionCredentials) {
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.encryption.keystore.path", ksEncryptionFile.toString());
settingsBuilder.put(RealmSettings.PREFIX + "saml.my_saml.encryption.keystore.type", "PKCS12");
}
final Settings settings = settingsBuilder.build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = getTerminalPossiblyWithPassword(usedKeyStore);
// What is the friendly name for "principal" attribute
// "urn:oid:0.9.2342.19200300.100.1.1" [default: principal]
terminal.addTextInput("");
final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env);
assertThat(descriptor, notNullValue());
assertThat(descriptor.getEntityID(), equalTo("https://kibana.my.corp/"));
assertThat(descriptor.getRoleDescriptors(), iterableWithSize(1));
assertThat(descriptor.getRoleDescriptors().get(0), instanceOf(SPSSODescriptor.class));
final SPSSODescriptor spDescriptor = (SPSSODescriptor) descriptor.getRoleDescriptors().get(0);
assertThat(spDescriptor.getAssertionConsumerServices(), iterableWithSize(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getLocation(), equalTo("https://kibana.my.corp/saml/login"));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).isDefault(), equalTo(true));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getIndex(), equalTo(1));
assertThat(spDescriptor.getAssertionConsumerServices().get(0).getBinding(), equalTo(SAMLConstants.SAML2_POST_BINDING_URI));
assertThat(spDescriptor.getAttributeConsumingServices(), iterableWithSize(1));
assertThat(spDescriptor.getAttributeConsumingServices().get(0).isDefault(), equalTo(true));
assertThat(spDescriptor.getAttributeConsumingServices().get(0).getIndex(), equalTo(1));
assertThat(spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes(), iterableWithSize(1));
final RequestedAttribute uidAttribute = spDescriptor.getAttributeConsumingServices().get(0).getRequestAttributes().get(0);
assertThat(uidAttribute.getName(), equalTo("urn:oid:0.9.2342.19200300.100.1.1"));
assertThat(uidAttribute.getFriendlyName(), equalTo("principal"));
assertThat(spDescriptor.getSingleLogoutServices(), iterableWithSize(1));
assertThat(spDescriptor.getSingleLogoutServices().get(0).getLocation(), equalTo("https://kibana.my.corp/saml/logout"));
assertThat(spDescriptor.getSingleLogoutServices().get(0).getBinding(), equalTo(SAMLConstants.SAML2_REDIRECT_BINDING_URI));
assertThat(spDescriptor.isAuthnRequestsSigned(), equalTo(useSigningCredentials));
assertThat(spDescriptor.getWantAssertionsSigned(), equalTo(true));
int expectedKeyDescriptorSize = (useSigningCredentials) ? 1 : 0;
expectedKeyDescriptorSize = (useEncryptionCredentials) ? expectedKeyDescriptorSize + 2 : expectedKeyDescriptorSize;
assertThat(spDescriptor.getKeyDescriptors(), iterableWithSize(expectedKeyDescriptorSize));
if (expectedKeyDescriptorSize > 0) {
final Set<java.security.cert.X509Certificate> encryptionCertificatesToMatch = new HashSet<>();
if (useEncryptionCredentials) {
encryptionCertificatesToMatch.add(certEncKeyPair1.v1());
encryptionCertificatesToMatch.add(certEncKeyPair2.v1());
}
spDescriptor.getKeyDescriptors().stream().forEach((keyDesc) -> {
UsageType usageType = keyDesc.getUse();
final List<X509Data> x509 = keyDesc.getKeyInfo().getX509Datas();
assertThat(x509, iterableWithSize(1));
assertThat(x509.get(0).getX509Certificates(), iterableWithSize(1));
final X509Certificate xmlCert = x509.get(0).getX509Certificates().get(0);
final java.security.cert.X509Certificate javaCert;
try {
// Verify that OpenSAML things the XML representation is the same as our input
javaCert = KeyInfoSupport.getCertificate(xmlCert);
} catch (CertificateException ce) {
throw ExceptionsHelper.convertToRuntime(ce);
}
if (usageType == UsageType.SIGNING) {
assertTrue("Found UsageType as SIGNING in SP metadata when not testing for signing credentials", useSigningCredentials);
assertEquals("Signing Certificate from SP metadata does not match", certKeyPairSign.v1(), javaCert);
} else if (usageType == UsageType.ENCRYPTION) {
assertTrue(useEncryptionCredentials);
assertTrue("Encryption Certificate was not found in encryption certificates",
encryptionCertificatesToMatch.remove(javaCert));
} else {
fail("Usage type should have been either SIGNING or ENCRYPTION");
}
});
if (useEncryptionCredentials) {
assertTrue("Did not find all encryption certificates in exported SP metadata", encryptionCertificatesToMatch.isEmpty());
}
}
}
public void testWrongKeystorePassword() {
final Path certPath = getDataPath("saml.crt");
final Path keyPath = getDataPath("saml.key");
final SamlMetadataCommand command = new SamlMetadataCommand((e) -> passwordProtectedKeystore);
final OptionSet options = command.getParser().parse(new String[]{
"-signing-cert", certPath.toString(),
"-signing-key", keyPath.toString()
});
final Settings settings = Settings.builder().put("path.home", createTempDir()).build();
final Environment env = TestEnvironment.newEnvironment(settings);
final MockTerminal terminal = new MockTerminal();
terminal.addSecretInput("wrong-password");
UserException e = expectThrows(UserException.class, () -> {
command.buildEntityDescriptor(terminal, options, env);
});
assertThat(e.getMessage(), CoreMatchers.containsString("Provided keystore password was incorrect"));
}
private String getAliasName(final Tuple<java.security.cert.X509Certificate, PrivateKey> certKeyPair) {
// Keys are pre-generated with the same name, so add the serial no to the alias so that keystore entries won't be overwritten
return certKeyPair.v1().getSubjectX500Principal().getName().toLowerCase(Locale.US) + "-"+
certKeyPair.v1().getSerialNumber()+"-alias";
}
private boolean validateSignature(Signature signature) {
try {
Certificate[] certificates = CertParsingUtils.
readCertificates(Collections.singletonList(getDataPath("saml.crt").toString()), newEnvironment());
PrivateKey key = PemUtils.readPrivateKey(getDataPath("saml.key"),
""::toCharArray);
Credential verificationCredential = new BasicX509Credential((java.security.cert.X509Certificate) certificates[0], key);
SAMLSignatureProfileValidator profileValidator = new SAMLSignatureProfileValidator();
profileValidator.validate(signature);
SignatureValidator.validate(signature, verificationCredential);
return true;
} catch (Exception e) {
return false;
}
}
private MockTerminal getTerminalPossiblyWithPassword(KeyStoreWrapper keyStore) {
final MockTerminal terminal = new MockTerminal();
if (keyStore.hasPassword()) {
terminal.addSecretInput("keystore-password");
}
return terminal;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.esb.form.editors.article.rcp;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.jface.dialogs.ErrorDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.FormColors;
import org.eclipse.ui.forms.IManagedForm;
import org.eclipse.ui.forms.editor.FormEditor;
import org.eclipse.ui.forms.events.ExpansionAdapter;
import org.eclipse.ui.forms.events.ExpansionEvent;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBArtifact;
import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBProjectArtifact;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.esb.forgm.editors.article.FormArticlePlugin;
import org.wso2.developerstudio.esb.form.editors.Activator;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.CustomStore;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.IMessageStore;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.InMemory;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.JDBC;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.JMS;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.RabbitMQ;
import org.wso2.developerstudio.esb.form.editors.article.rcp.message.stores.WSO2MB;
/**
*
* To change the template for this generated type comment go to Window -
* Preferences - Java - Code Generation - Code and Comments
*/
public class MessageStoreFormPage extends AbstractEsbFormPage {
/**
* @param id
* @param title
*/
private static IDeveloperStudioLog log = Logger.getLog(Activator.PLUGIN_ID);
private String[] messageStoreTypes = {"In-Memory Message Store", "JMS Message Store", "WSO2 MB Message Store", "RabbitMQ Message Store", "JDBC Message Store", "Custom Message Store"};
@SuppressWarnings("unused")
private IMessageStore currentMessageStore = null;
// private ArrayList<Section> sectionsList = new ArrayList<Section>();
public Text storeName;
public Combo storeType;
ScrolledForm form ;
FormToolkit toolkit;
private static final String IN_MEMORY_MS_FQN = "org.apache.synapse.message.store.impl.memory.InMemoryStore";
private static final String JMS_MS_FQN = "org.apache.synapse.message.store.impl.jms.JmsStore";
private static final String WSO2MB = "wso2mb";
private static final String RABBITMQ_MS_FQN = "org.apache.synapse.message.store.impl.rabbitmq.RabbitMQStore";
private static final String JDBC_MS_FQN = "org.apache.synapse.message.store.impl.jdbc.JDBCMessageStore";
private static final String customStore = "customStore";
Section guaranteedDeliverySection;
public Combo guaranteedDeliveryEnable;
public Combo failoverMessageStore;
Map<String, IMessageStore> storeMap;
public MessageStoreFormPage(FormEditor editor) {
super(editor, "messageStoreForm", Messages.getString("MessageStorePage.sectionMainTitle"));
}
protected void createFormContent(IManagedForm managedForm) {
form = managedForm.getForm();
toolkit = managedForm.getToolkit();
form.setText(Messages.getString("MessageStorePage.sectionMainTitle"));
form.setBackgroundImage(FormArticlePlugin.getDefault().getImage(FormArticlePlugin.IMG_FORM_BG));
GridLayout layout = new GridLayout();
layout.marginLeft = 20;
layout.marginRight = 20;
layout.marginTop = 10;
layout.numColumns = 1;
layout.makeColumnsEqualWidth = true;
form.getBody().setLayout(layout);
GridData formGridData = new GridData();
formGridData.horizontalSpan = 6;
formGridData.grabExcessHorizontalSpace = true;
form.setLayoutData(formGridData);
/*ColumnLayout layout = new ColumnLayout();
layout.leftMargin = 10;
layout.rightMargin = 10;
layout.maxNumColumns = 2;
form.getBody().setLayout(layout);*/
storeMap = new LinkedHashMap<>();
storeMap.put(IN_MEMORY_MS_FQN, new InMemory(form, toolkit, this));
storeMap.put(JMS_MS_FQN, new JMS(form, toolkit, this));
storeMap.put(WSO2MB, new WSO2MB(form, toolkit, this));
storeMap.put(RABBITMQ_MS_FQN, new RabbitMQ(form, toolkit, this));
storeMap.put(JDBC_MS_FQN, new JDBC(form, toolkit, this));
storeMap.put(customStore, new CustomStore(form, toolkit, this));
createFormBasicSection(form, toolkit);
createFormConnectionSection(form, toolkit);
createFormGuaranteedDeliverySection(form, toolkit);
createFormParameterSection(form, toolkit);
}
@SuppressWarnings("deprecation")
private void createFormBasicSection(final ScrolledForm form, FormToolkit toolkit) {
/* Basic Section */
Section basicSection = toolkit.createSection(form.getBody(), Section.TWISTIE | Section.EXPANDED);
basicSection.setActiveToggleColor(toolkit.getHyperlinkGroup().getActiveForeground());
basicSection.setToggleColor(toolkit.getColors().getColor(FormColors.SEPARATOR));
toolkit.createCompositeSeparator(basicSection);
basicSection.addExpansionListener(new ExpansionAdapter() {
public void expansionStateChanged(ExpansionEvent e) {
form.reflow(false);
}
});
basicSection.setText(Messages.getString("MessageStorePage.section.basic"));
GridData samplegridData = new GridData();
samplegridData.horizontalSpan = 3;
samplegridData.horizontalAlignment = SWT.FILL;
samplegridData.grabExcessHorizontalSpace = true;
basicSection.setLayoutData(samplegridData);
Composite basicSectionClient = toolkit.createComposite(basicSection);
basicSectionClient.setLayout(new GridLayout());
basicSection.setClient(basicSectionClient);
toolkit.createLabel(basicSectionClient, "Message Store Name *");
storeName = toolkit.createText(basicSectionClient, "");
storeName.setBackground(new Color(null, 229,236,253));
GridData storeNameGridData = new GridData();
storeNameGridData.horizontalSpan = 3;
storeNameGridData.horizontalAlignment = GridData.FILL;
storeNameGridData.grabExcessHorizontalSpace = true;
storeName.setLayoutData(storeNameGridData);
storeName.addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
setSave(true);
updateDirtyState();
}
});
toolkit.createLabel(basicSectionClient, "Message Store Type *");
storeType = new Combo(basicSectionClient, SWT.DROP_DOWN);
GridData storeTypeGridData = new GridData();
storeTypeGridData.horizontalSpan = 3;
storeTypeGridData.horizontalAlignment = GridData.FILL;
storeTypeGridData.grabExcessHorizontalSpace = true;
storeType.setLayoutData(storeTypeGridData);
String[] triggerTypes = messageStoreTypes;
storeType.setItems(triggerTypes);
storeType.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
// Create message stores
if (storeType.getSelectionIndex() == 0) {
currentMessageStore = storeMap.get(IN_MEMORY_MS_FQN);
} else if (storeType.getSelectionIndex() == 1) {
currentMessageStore = storeMap.get(JMS_MS_FQN);
} else if (storeType.getSelectionIndex() == 2) {
currentMessageStore = storeMap.get(WSO2MB);
} else if (storeType.getSelectionIndex() == 3) {
currentMessageStore = storeMap.get(RABBITMQ_MS_FQN);
} else if (storeType.getSelectionIndex() == 4) {
currentMessageStore = storeMap.get(JDBC_MS_FQN);
} else if (storeType.getSelectionIndex() == 5) {
currentMessageStore = storeMap.get(customStore);
}
refreshStoreSettings();
setSave(true);
updateDirtyState();
}
});
}
private void createFormConnectionSection(final ScrolledForm form, FormToolkit toolkit) {
for (IMessageStore aStore : storeMap.values()) {
aStore.createConnectionSectionFields();
}
}
private void createFormGuaranteedDeliverySection(final ScrolledForm form, FormToolkit toolkit) {
/* Guaranteed Delivery Section */
guaranteedDeliverySection = this.createSection(form, toolkit, Messages.getString("MessageStorePage.section.guaranteedDelivery"));
GridData samplegridData = new GridData();
samplegridData.horizontalSpan = 3;
samplegridData.horizontalAlignment = SWT.FILL;
samplegridData.grabExcessHorizontalSpace = true;
guaranteedDeliverySection.setLayoutData(samplegridData);
Composite guaranteedDeliverySectionClient = toolkit.createComposite(guaranteedDeliverySection);
guaranteedDeliverySectionClient.setLayout(new GridLayout());
guaranteedDeliverySection.setClient(guaranteedDeliverySectionClient);
guaranteedDeliverySection.setExpanded(false);
toolkit.createLabel(guaranteedDeliverySectionClient, "Enable Producer Guaranteed Delivery");
guaranteedDeliveryEnable = new Combo(guaranteedDeliverySectionClient, SWT.DROP_DOWN);
GridData guaranteedDeliveryEnableGridData = new GridData();
guaranteedDeliveryEnableGridData.horizontalSpan = 3;
guaranteedDeliveryEnableGridData.horizontalAlignment = GridData.FILL;
guaranteedDeliveryEnableGridData.grabExcessHorizontalSpace = true;
guaranteedDeliveryEnable.setLayoutData(guaranteedDeliveryEnableGridData);
String[] states = {"True", "False"};
guaranteedDeliveryEnable.setItems(states);
guaranteedDeliveryEnable.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setSave(true);
updateDirtyState();
super.widgetSelected(e);
}
});
toolkit.createLabel(guaranteedDeliverySectionClient, "Failover Message Store");
failoverMessageStore = new Combo(guaranteedDeliverySectionClient, SWT.DROP_DOWN);
ArrayList<String> availableMSList = getAvailableMessageStores();
String[] list = new String[availableMSList.size()];
list = availableMSList.toArray(list);
failoverMessageStore.setItems(list);
GridData failoverMessageStoreGridData = new GridData();
failoverMessageStoreGridData.horizontalSpan = 3;
failoverMessageStoreGridData.horizontalAlignment = GridData.FILL;
failoverMessageStoreGridData.grabExcessHorizontalSpace = true;
failoverMessageStore.setLayoutData(failoverMessageStoreGridData);
failoverMessageStore.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
setSave(true);
updateDirtyState();
}
});
}
/**
* Get the available message Stores
* @return
*/
private ArrayList<String> getAvailableMessageStores() {
ArrayList<String> availableMS = new ArrayList<String>();
File projectPath = null;
Shell shell = Display.getDefault().getActiveShell();
IProject[] projects = ResourcesPlugin.getWorkspace().getRoot().getProjects();
for (IProject activeProject : projects) {
if (activeProject != null) {
try {
if(activeProject.isOpen()){
if (activeProject.hasNature("org.wso2.developerstudio.eclipse.esb.project.nature")) {
ESBProjectArtifact esbProjectArtifact = new ESBProjectArtifact();
projectPath = activeProject.getLocation().toFile();
try {
esbProjectArtifact.fromFile(activeProject.getFile("artifact.xml").getLocation().toFile());
List<ESBArtifact> allESBArtifacts = esbProjectArtifact.getAllESBArtifacts();
for (ESBArtifact esbArtifact : allESBArtifacts) {
if ("synapse/message-store".equals(esbArtifact.getType())) {
File artifact = new File(projectPath, esbArtifact.getFile());
availableMS.add(artifact.getName().replaceAll("[.]xml$", ""));
}
}
} catch (Exception e) {
log.error("Error occured while scanning the project for artifacts", e);
ErrorDialog.openError(shell, "Error occured while scanning the project for artifacts",
e.getMessage(), null);
}
}
}
} catch (CoreException e) {
log.error("Error occured while scanning the project", e);
ErrorDialog.openError(shell, "Error occured while scanning the project", e.getMessage(), null);
}
}
}
return availableMS;
}
private void createFormParameterSection(final ScrolledForm form, FormToolkit toolkit) {
for (IMessageStore aStore : storeMap.values()) {
aStore.createParameterSectionFields();
}
}
@SuppressWarnings("deprecation")
private Section createSection(final ScrolledForm form, FormToolkit toolkit, final String heading) {
Section section = toolkit.createSection(form.getBody(), Section.TWISTIE | Section.EXPANDED);
section.setActiveToggleColor(toolkit.getHyperlinkGroup().getActiveForeground());
section.setToggleColor(toolkit.getColors().getColor(FormColors.SEPARATOR));
toolkit.createCompositeSeparator(section);
section.addExpansionListener(new ExpansionAdapter() {
public void expansionStateChanged(ExpansionEvent e) {
form.reflow(false);
}
});
section.setText(heading);
return section;
}
public IMessageStore getStoreImpl(String storeClass) {
return storeMap.get(storeClass);
}
public void refreshStoreSettings() {
int selectedIndex = storeType.getSelectionIndex();
int index = 0;
boolean guaranteedDeliveryFlag = false;
for (IMessageStore aStore: storeMap.values()) {
if (index == selectedIndex) {
aStore.showParametersSection();
aStore.showConnectionSection();
if(aStore.hasGuaranteedDelivery() && !guaranteedDeliveryFlag) {
this.guaranteedDeliverySection.setVisible(true);
guaranteedDeliveryFlag = true;
}
} else {
aStore.hideParametersSection();
aStore.hideConnectionSection();
if(aStore.hasGuaranteedDelivery() && !guaranteedDeliveryFlag) {
this.guaranteedDeliverySection.setVisible(false);
}
}
index ++;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.transforms;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.PriorityQueue;
import javax.annotation.Nullable;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderException;
import org.apache.beam.sdk.coders.CoderRegistry;
import org.apache.beam.sdk.coders.CustomCoder;
import org.apache.beam.sdk.coders.ListCoder;
import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn;
import org.apache.beam.sdk.transforms.Combine.AccumulatingCombineFn.Accumulator;
import org.apache.beam.sdk.transforms.Combine.PerKey;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
import org.apache.beam.sdk.util.NameUtils;
import org.apache.beam.sdk.util.NameUtils.NameOverride;
import org.apache.beam.sdk.util.common.ElementByteSizeObserver;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
/**
* {@code PTransform}s for finding the largest (or smallest) set
* of elements in a {@code PCollection}, or the largest (or smallest)
* set of values associated with each key in a {@code PCollection} of
* {@code KV}s.
*/
public class Top {
private Top() {
// do not instantiate
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<T>} and returns a {@code PCollection<List<T>>} with a
* single element containing the largest {@code count} elements of the input
* {@code PCollection<T>}, in decreasing order, sorted using the
* given {@code Comparator<T>}. The {@code Comparator<T>} must also
* be {@code Serializable}.
*
* <p>If {@code count} {@code >} the number of elements in the
* input {@code PCollection}, then all the elements of the input
* {@code PCollection} will be in the resulting
* {@code List}, albeit in sorted order.
*
* <p>All the elements of the result's {@code List}
* must fit into the memory of a single machine.
*
* <p>Example of use:
* <pre> {@code
* PCollection<Student> students = ...;
* PCollection<List<Student>> top10Students =
* students.apply(Top.of(10, new CompareStudentsByAvgGrade()));
* } </pre>
*
* <p>By default, the {@code Coder} of the output {@code PCollection}
* is a {@code ListCoder} of the {@code Coder} of the elements of
* the input {@code PCollection}.
*
* <p>If the input {@code PCollection} is windowed into {@link GlobalWindows},
* an empty {@code List<T>} in the {@link GlobalWindow} will be output if the input
* {@code PCollection} is empty. To use this with inputs with other windowing,
* either {@link Combine.Globally#withoutDefaults withoutDefaults} or
* {@link Combine.Globally#asSingletonView asSingletonView} must be called.
*
* <p>See also {@link #smallest} and {@link #largest}, which sort
* {@code Comparable} elements using their natural ordering.
*
* <p>See also {@link #perKey}, {@link #smallestPerKey}, and
* {@link #largestPerKey}, which take a {@code PCollection} of
* {@code KV}s and return the top values associated with each key.
*/
public static <T, ComparatorT extends Comparator<T> & Serializable>
Combine.Globally<T, List<T>> of(int count, ComparatorT compareFn) {
return Combine.globally(new TopCombineFn<>(count, compareFn));
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<T>} and returns a {@code PCollection<List<T>>} with a
* single element containing the smallest {@code count} elements of the input
* {@code PCollection<T>}, in increasing order, sorted according to
* their natural order.
*
* <p>If {@code count} {@code >} the number of elements in the
* input {@code PCollection}, then all the elements of the input
* {@code PCollection} will be in the resulting {@code PCollection}'s
* {@code List}, albeit in sorted order.
*
* <p>All the elements of the result {@code List}
* must fit into the memory of a single machine.
*
* <p>Example of use:
* <pre> {@code
* PCollection<Integer> values = ...;
* PCollection<List<Integer>> smallest10Values = values.apply(Top.smallest(10));
* } </pre>
*
* <p>By default, the {@code Coder} of the output {@code PCollection}
* is a {@code ListCoder} of the {@code Coder} of the elements of
* the input {@code PCollection}.
*
* <p>If the input {@code PCollection} is windowed into {@link GlobalWindows},
* an empty {@code List<T>} in the {@link GlobalWindow} will be output if the input
* {@code PCollection} is empty. To use this with inputs with other windowing,
* either {@link Combine.Globally#withoutDefaults withoutDefaults} or
* {@link Combine.Globally#asSingletonView asSingletonView} must be called.
*
* <p>See also {@link #largest}.
*
* <p>See also {@link #of}, which sorts using a user-specified
* {@code Comparator} function.
*
* <p>See also {@link #perKey}, {@link #smallestPerKey}, and
* {@link #largestPerKey}, which take a {@code PCollection} of
* {@code KV}s and return the top values associated with each key.
*/
public static <T extends Comparable<T>> Combine.Globally<T, List<T>> smallest(int count) {
return Combine.globally(new TopCombineFn<>(count, new Reversed<T>()));
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<T>} and returns a {@code PCollection<List<T>>} with a
* single element containing the largest {@code count} elements of the input
* {@code PCollection<T>}, in decreasing order, sorted according to
* their natural order.
*
* <p>If {@code count} {@code >} the number of elements in the
* input {@code PCollection}, then all the elements of the input
* {@code PCollection} will be in the resulting {@code PCollection}'s
* {@code List}, albeit in sorted order.
*
* <p>All the elements of the result's {@code List}
* must fit into the memory of a single machine.
*
* <p>Example of use:
* <pre> {@code
* PCollection<Integer> values = ...;
* PCollection<List<Integer>> largest10Values = values.apply(Top.largest(10));
* } </pre>
*
* <p>By default, the {@code Coder} of the output {@code PCollection}
* is a {@code ListCoder} of the {@code Coder} of the elements of
* the input {@code PCollection}.
*
* <p>If the input {@code PCollection} is windowed into {@link GlobalWindows},
* an empty {@code List<T>} in the {@link GlobalWindow} will be output if the input
* {@code PCollection} is empty. To use this with inputs with other windowing,
* either {@link Combine.Globally#withoutDefaults withoutDefaults} or
* {@link Combine.Globally#asSingletonView asSingletonView} must be called.
*
* <p>See also {@link #smallest}.
*
* <p>See also {@link #of}, which sorts using a user-specified
* {@code Comparator} function.
*
* <p>See also {@link #perKey}, {@link #smallestPerKey}, and
* {@link #largestPerKey}, which take a {@code PCollection} of
* {@code KV}s and return the top values associated with each key.
*/
public static <T extends Comparable<T>> Combine.Globally<T, List<T>> largest(int count) {
return Combine.globally(new TopCombineFn<>(count, new Natural<T>()));
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<KV<K, V>>} and returns a
* {@code PCollection<KV<K, List<V>>>} that contains an output
* element mapping each distinct key in the input
* {@code PCollection} to the largest {@code count} values
* associated with that key in the input
* {@code PCollection<KV<K, V>>}, in decreasing order, sorted using
* the given {@code Comparator<V>}. The
* {@code Comparator<V>} must also be {@code Serializable}.
*
* <p>If there are fewer than {@code count} values associated with
* a particular key, then all those values will be in the result
* mapping for that key, albeit in sorted order.
*
* <p>All the values associated with a single key must fit into the
* memory of a single machine, but there can be many more
* {@code KV}s in the resulting {@code PCollection} than can fit
* into the memory of a single machine.
*
* <p>Example of use:
* <pre> {@code
* PCollection<KV<School, Student>> studentsBySchool = ...;
* PCollection<KV<School, List<Student>>> top10StudentsBySchool =
* studentsBySchool.apply(
* Top.perKey(10, new CompareStudentsByAvgGrade()));
* } </pre>
*
* <p>By default, the {@code Coder} of the keys of the output
* {@code PCollection} is the same as that of the keys of the input
* {@code PCollection}, and the {@code Coder} of the values of the
* output {@code PCollection} is a {@code ListCoder} of the
* {@code Coder} of the values of the input {@code PCollection}.
*
* <p>See also {@link #smallestPerKey} and {@link #largestPerKey}, which
* sort {@code Comparable<V>} values using their natural
* ordering.
*
* <p>See also {@link #of}, {@link #smallest}, and {@link #largest}, which
* take a {@code PCollection} and return the top elements.
*/
public static <K, V, ComparatorT extends Comparator<V> & Serializable>
PTransform<PCollection<KV<K, V>>, PCollection<KV<K, List<V>>>>
perKey(int count, ComparatorT compareFn) {
return Combine.perKey(new TopCombineFn<>(count, compareFn));
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<KV<K, V>>} and returns a
* {@code PCollection<KV<K, List<V>>>} that contains an output
* element mapping each distinct key in the input
* {@code PCollection} to the smallest {@code count} values
* associated with that key in the input
* {@code PCollection<KV<K, V>>}, in increasing order, sorted
* according to their natural order.
*
* <p>If there are fewer than {@code count} values associated with
* a particular key, then all those values will be in the result
* mapping for that key, albeit in sorted order.
*
* <p>All the values associated with a single key must fit into the
* memory of a single machine, but there can be many more
* {@code KV}s in the resulting {@code PCollection} than can fit
* into the memory of a single machine.
*
* <p>Example of use:
* <pre> {@code
* PCollection<KV<String, Integer>> keyedValues = ...;
* PCollection<KV<String, List<Integer>>> smallest10ValuesPerKey =
* keyedValues.apply(Top.smallestPerKey(10));
* } </pre>
*
* <p>By default, the {@code Coder} of the keys of the output
* {@code PCollection} is the same as that of the keys of the input
* {@code PCollection}, and the {@code Coder} of the values of the
* output {@code PCollection} is a {@code ListCoder} of the
* {@code Coder} of the values of the input {@code PCollection}.
*
* <p>See also {@link #largestPerKey}.
*
* <p>See also {@link #perKey}, which sorts values using a user-specified
* {@code Comparator} function.
*
* <p>See also {@link #of}, {@link #smallest}, and {@link #largest}, which
* take a {@code PCollection} and return the top elements.
*/
public static <K, V extends Comparable<V>>
PTransform<PCollection<KV<K, V>>, PCollection<KV<K, List<V>>>>
smallestPerKey(int count) {
return Combine.perKey(new TopCombineFn<>(count, new Reversed<V>()));
}
/**
* Returns a {@code PTransform} that takes an input
* {@code PCollection<KV<K, V>>} and returns a
* {@code PCollection<KV<K, List<V>>>} that contains an output
* element mapping each distinct key in the input
* {@code PCollection} to the largest {@code count} values
* associated with that key in the input
* {@code PCollection<KV<K, V>>}, in decreasing order, sorted
* according to their natural order.
*
* <p>If there are fewer than {@code count} values associated with
* a particular key, then all those values will be in the result
* mapping for that key, albeit in sorted order.
*
* <p>All the values associated with a single key must fit into the
* memory of a single machine, but there can be many more
* {@code KV}s in the resulting {@code PCollection} than can fit
* into the memory of a single machine.
*
* <p>Example of use:
* <pre> {@code
* PCollection<KV<String, Integer>> keyedValues = ...;
* PCollection<KV<String, List<Integer>>> largest10ValuesPerKey =
* keyedValues.apply(Top.largestPerKey(10));
* } </pre>
*
* <p>By default, the {@code Coder} of the keys of the output
* {@code PCollection} is the same as that of the keys of the input
* {@code PCollection}, and the {@code Coder} of the values of the
* output {@code PCollection} is a {@code ListCoder} of the
* {@code Coder} of the values of the input {@code PCollection}.
*
* <p>See also {@link #smallestPerKey}.
*
* <p>See also {@link #perKey}, which sorts values using a user-specified
* {@code Comparator} function.
*
* <p>See also {@link #of}, {@link #smallest}, and {@link #largest}, which
* take a {@code PCollection} and return the top elements.
*/
public static <K, V extends Comparable<V>>
PerKey<K, V, List<V>>
largestPerKey(int count) {
return Combine.perKey(new TopCombineFn<>(count, new Natural<V>()));
}
/**
* @deprecated use {@link Natural} instead
*/
@Deprecated
public static class Largest<T extends Comparable<? super T>>
implements Comparator<T>, Serializable {
@Override
public int compare(T a, T b) {
return a.compareTo(b);
}
}
/**
* A {@code Serializable} {@code Comparator} that that uses the compared elements' natural
* ordering.
*/
public static class Natural<T extends Comparable<? super T>>
implements Comparator<T>, Serializable {
@Override
public int compare(T a, T b) {
return a.compareTo(b);
}
}
/**
* @deprecated use {@link Reversed} instead
*/
@Deprecated
public static class Smallest<T extends Comparable<? super T>>
implements Comparator<T>, Serializable {
@Override
public int compare(T a, T b) {
return b.compareTo(a);
}
}
/**
* {@code Serializable} {@code Comparator} that that uses the reverse of the compared elements'
* natural ordering.
*/
public static class Reversed<T extends Comparable<? super T>>
implements Comparator<T>, Serializable {
@Override
public int compare(T a, T b) {
return b.compareTo(a);
}
}
////////////////////////////////////////////////////////////////////////////
/**
* {@code CombineFn} for {@code Top} transforms that combines a
* bunch of {@code T}s into a single {@code count}-long
* {@code List<T>}, using {@code compareFn} to choose the largest
* {@code T}s.
*
* @param <T> type of element being compared
*/
public static class TopCombineFn<T, ComparatorT extends Comparator<T> & Serializable>
extends AccumulatingCombineFn<T, BoundedHeap<T, ComparatorT>, List<T>>
implements NameOverride {
private final int count;
private final ComparatorT compareFn;
public TopCombineFn(int count, ComparatorT compareFn) {
checkArgument(count >= 0, "count must be >= 0 (not %s)", count);
this.count = count;
this.compareFn = compareFn;
}
@Override
public String getNameOverride() {
return String.format("Top(%s)", NameUtils.approximateSimpleName(compareFn));
}
@Override
public BoundedHeap<T, ComparatorT> createAccumulator() {
return new BoundedHeap<>(count, compareFn, new ArrayList<T>());
}
@Override
public Coder<BoundedHeap<T, ComparatorT>> getAccumulatorCoder(
CoderRegistry registry, Coder<T> inputCoder) {
return new BoundedHeapCoder<>(count, compareFn, inputCoder);
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder
.add(DisplayData.item("count", count)
.withLabel("Top Count"))
.add(DisplayData.item("comparer", compareFn.getClass())
.withLabel("Record Comparer"));
}
@Override
public String getIncompatibleGlobalWindowErrorMessage() {
return "Default values are not supported in Top.[of, smallest, largest]() if the output "
+ "PCollection is not windowed by GlobalWindows. Instead, use "
+ "Top.[of, smallest, largest]().withoutDefaults() to output an empty PCollection if the"
+ " input PCollection is empty, or Top.[of, smallest, largest]().asSingletonView() to "
+ "get a PCollection containing the empty list if the input PCollection is empty.";
}
}
/**
* A heap that stores only a finite number of top elements according to its provided
* {@code Comparator}. Implemented as an {@link Accumulator} to facilitate implementation of
* {@link Top}.
*
* <p>This class is <i>not</i> safe for multithreaded use, except read-only.
*/
static class BoundedHeap<T, ComparatorT extends Comparator<T> & Serializable>
implements Accumulator<T, BoundedHeap<T, ComparatorT>, List<T>> {
/**
* A queue with smallest at the head, for quick adds.
*
* <p>Only one of asList and asQueue may be non-null.
*/
@Nullable private PriorityQueue<T> asQueue;
/**
* A list in with largest first, the form of extractOutput().
*
* <p>Only one of asList and asQueue may be non-null.
*/
@Nullable private List<T> asList;
/** The user-provided Comparator. */
private final ComparatorT compareFn;
/** The maximum size of the heap. */
private final int maximumSize;
/**
* Creates a new heap with the provided size, comparator, and initial elements.
*/
private BoundedHeap(int maximumSize, ComparatorT compareFn, List<T> asList) {
this.maximumSize = maximumSize;
this.asList = asList;
this.compareFn = compareFn;
}
@Override
public void addInput(T value) {
maybeAddInput(value);
}
/**
* Adds {@code value} to this heap if it is larger than any of the current elements.
* Returns {@code true} if {@code value} was added.
*/
private boolean maybeAddInput(T value) {
if (maximumSize == 0) {
// Don't add anything.
return false;
}
// If asQueue == null, then this is the first add after the latest call to the
// constructor or asList().
if (asQueue == null) {
asQueue = new PriorityQueue<>(maximumSize, compareFn);
for (T item : asList) {
asQueue.add(item);
}
asList = null;
}
if (asQueue.size() < maximumSize) {
asQueue.add(value);
return true;
} else if (compareFn.compare(value, asQueue.peek()) > 0) {
asQueue.poll();
asQueue.add(value);
return true;
} else {
return false;
}
}
@Override
public void mergeAccumulator(BoundedHeap<T, ComparatorT> accumulator) {
for (T value : accumulator.asList()) {
if (!maybeAddInput(value)) {
// If this element of accumulator does not make the top N, neither
// will the rest, which are all smaller.
break;
}
}
}
@Override
public List<T> extractOutput() {
return asList();
}
/**
* Returns the contents of this Heap as a List sorted largest-to-smallest.
*/
private List<T> asList() {
if (asList == null) {
List<T> smallestFirstList = Lists.newArrayListWithCapacity(asQueue.size());
while (!asQueue.isEmpty()) {
smallestFirstList.add(asQueue.poll());
}
asList = Lists.reverse(smallestFirstList);
asQueue = null;
}
return asList;
}
}
/**
* A {@link Coder} for {@link BoundedHeap}, using Java serialization via {@link CustomCoder}.
*/
private static class BoundedHeapCoder<T, ComparatorT extends Comparator<T> & Serializable>
extends CustomCoder<BoundedHeap<T, ComparatorT>> {
private final Coder<List<T>> listCoder;
private final ComparatorT compareFn;
private final int maximumSize;
public BoundedHeapCoder(int maximumSize, ComparatorT compareFn, Coder<T> elementCoder) {
listCoder = ListCoder.of(elementCoder);
this.compareFn = compareFn;
this.maximumSize = maximumSize;
}
@Override
public void encode(
BoundedHeap<T, ComparatorT> value, OutputStream outStream)
throws CoderException, IOException {
listCoder.encode(value.asList(), outStream);
}
@Override
public BoundedHeap<T, ComparatorT> decode(InputStream inStream)
throws CoderException, IOException {
return new BoundedHeap<>(maximumSize, compareFn, listCoder.decode(inStream));
}
@Override
public void verifyDeterministic() throws NonDeterministicException {
verifyDeterministic(this, "HeapCoder requires a deterministic list coder", listCoder);
}
@Override
public boolean isRegisterByteSizeObserverCheap(
BoundedHeap<T, ComparatorT> value) {
return listCoder.isRegisterByteSizeObserverCheap(value.asList());
}
@Override
public void registerByteSizeObserver(
BoundedHeap<T, ComparatorT> value, ElementByteSizeObserver observer)
throws Exception {
listCoder.registerByteSizeObserver(value.asList(), observer);
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if (!(other instanceof BoundedHeapCoder)) {
return false;
}
BoundedHeapCoder<?, ?> that = (BoundedHeapCoder<?, ?>) other;
return Objects.equals(this.compareFn, that.compareFn)
&& Objects.equals(this.listCoder, that.listCoder)
&& this.maximumSize == that.maximumSize;
}
@Override
public int hashCode() {
return Objects.hash(compareFn, listCoder, maximumSize);
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.pgpencryptstream;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.pgpencryptstream.PGPEncryptStreamMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class PGPEncryptStreamDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = PGPEncryptStreamMeta.class; // for i18n purposes, needed by Translator2!!
private boolean gotPreviousFields = false;
private Label wlGPGLocation;
private TextVar wGPGLocation;
private FormData fdlGPGLocation, fdGPGLocation;
private Label wlKeyName;
private TextVar wKeyName;
private FormData fdlKeyName, fdKeyName;
private Label wlStreamFieldName;
private CCombo wStreamFieldName;
private FormData fdlStreamFieldName, fdStreamFieldName;
private TextVar wResult;
private FormData fdResult, fdlResult;
private Label wlResult;
private Button wbbGpgExe;
private FormData fdbbGpgExe;
private PGPEncryptStreamMeta input;
private Button wKeyNameFromField;
private FormData fdKeyNameFromField, fdlKeyNameFromField;
private Label wlKeyNameFromField;
private Label wlKeyNameFieldName;
private CCombo wKeyNameFieldName;
private FormData fdlKeyNameFieldName, fdKeyNameFieldName;
private Group wGPGGroup;
private FormData fdGPGGroup;
private static final String[] FILETYPES = new String[] { BaseMessages.getString(
PKG, "PGPEncryptStreamDialog.Filetype.All" ) };
public PGPEncryptStreamDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (PGPEncryptStreamMeta) in;
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// ///////////////////////////////
// START OF GPG Fields GROUP //
// ///////////////////////////////
wGPGGroup = new Group( shell, SWT.SHADOW_NONE );
props.setLook( wGPGGroup );
wGPGGroup.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.GPGGroup.Label" ) );
FormLayout GPGGroupgroupLayout = new FormLayout();
GPGGroupgroupLayout.marginWidth = 10;
GPGGroupgroupLayout.marginHeight = 10;
wGPGGroup.setLayout( GPGGroupgroupLayout );
// GPGLocation fieldname ...
wlGPGLocation = new Label( wGPGGroup, SWT.RIGHT );
wlGPGLocation.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.GPGLocationField.Label" ) );
props.setLook( wlGPGLocation );
fdlGPGLocation = new FormData();
fdlGPGLocation.left = new FormAttachment( 0, 0 );
fdlGPGLocation.right = new FormAttachment( middle, -margin );
fdlGPGLocation.top = new FormAttachment( wStepname, margin * 2 );
wlGPGLocation.setLayoutData( fdlGPGLocation );
// Browse Source files button ...
wbbGpgExe = new Button( wGPGGroup, SWT.PUSH | SWT.CENTER );
props.setLook( wbbGpgExe );
wbbGpgExe.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.BrowseFiles.Label" ) );
fdbbGpgExe = new FormData();
fdbbGpgExe.right = new FormAttachment( 100, -margin );
fdbbGpgExe.top = new FormAttachment( wStepname, margin );
wbbGpgExe.setLayoutData( fdbbGpgExe );
wbbGpgExe.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
FileDialog dialog = new FileDialog( shell, SWT.OPEN );
dialog.setFilterExtensions( new String[] { "*" } );
if ( wGPGLocation.getText() != null ) {
dialog.setFileName( transMeta.environmentSubstitute( wGPGLocation.getText() ) );
}
dialog.setFilterNames( FILETYPES );
if ( dialog.open() != null ) {
wGPGLocation.setText( dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName() );
}
}
} );
wGPGLocation = new TextVar( transMeta, wGPGGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wGPGLocation.setToolTipText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.GPGLocationField.Tooltip" ) );
props.setLook( wGPGLocation );
wGPGLocation.addModifyListener( lsMod );
fdGPGLocation = new FormData();
fdGPGLocation.left = new FormAttachment( middle, 0 );
fdGPGLocation.top = new FormAttachment( wStepname, margin * 2 );
fdGPGLocation.right = new FormAttachment( wbbGpgExe, -margin );
wGPGLocation.setLayoutData( fdGPGLocation );
// KeyName fieldname ...
wlKeyName = new Label( wGPGGroup, SWT.RIGHT );
wlKeyName.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.KeyNameField.Label" ) );
props.setLook( wlKeyName );
fdlKeyName = new FormData();
fdlKeyName.left = new FormAttachment( 0, 0 );
fdlKeyName.right = new FormAttachment( middle, -margin );
fdlKeyName.top = new FormAttachment( wGPGLocation, margin );
wlKeyName.setLayoutData( fdlKeyName );
wKeyName = new TextVar( transMeta, wGPGGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wKeyName.setToolTipText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.KeyNameField.Tooltip" ) );
props.setLook( wKeyName );
wKeyName.addModifyListener( lsMod );
fdKeyName = new FormData();
fdKeyName.left = new FormAttachment( middle, 0 );
fdKeyName.top = new FormAttachment( wGPGLocation, margin );
fdKeyName.right = new FormAttachment( 100, 0 );
wKeyName.setLayoutData( fdKeyName );
wlKeyNameFromField = new Label( wGPGGroup, SWT.RIGHT );
wlKeyNameFromField.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.KeyNameFromField.Label" ) );
props.setLook( wlKeyNameFromField );
fdlKeyNameFromField = new FormData();
fdlKeyNameFromField.left = new FormAttachment( 0, 0 );
fdlKeyNameFromField.top = new FormAttachment( wKeyName, margin );
fdlKeyNameFromField.right = new FormAttachment( middle, -margin );
wlKeyNameFromField.setLayoutData( fdlKeyNameFromField );
wKeyNameFromField = new Button( wGPGGroup, SWT.CHECK );
props.setLook( wKeyNameFromField );
wKeyNameFromField.setToolTipText( BaseMessages.getString(
PKG, "PGPEncryptStreamDialog.KeyNameFromField.Tooltip" ) );
fdKeyNameFromField = new FormData();
fdKeyNameFromField.left = new FormAttachment( middle, 0 );
fdKeyNameFromField.top = new FormAttachment( wKeyName, margin );
wKeyNameFromField.setLayoutData( fdKeyNameFromField );
wKeyNameFromField.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
keyNameFromField();
}
} );
// Stream field
wlKeyNameFieldName = new Label( wGPGGroup, SWT.RIGHT );
wlKeyNameFieldName.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.KeyNameFieldName.Label" ) );
props.setLook( wlKeyNameFieldName );
fdlKeyNameFieldName = new FormData();
fdlKeyNameFieldName.left = new FormAttachment( 0, 0 );
fdlKeyNameFieldName.right = new FormAttachment( middle, -margin );
fdlKeyNameFieldName.top = new FormAttachment( wKeyNameFromField, margin );
wlKeyNameFieldName.setLayoutData( fdlKeyNameFieldName );
wKeyNameFieldName = new CCombo( wGPGGroup, SWT.BORDER | SWT.READ_ONLY );
props.setLook( wKeyNameFieldName );
wKeyNameFieldName.addModifyListener( lsMod );
fdKeyNameFieldName = new FormData();
fdKeyNameFieldName.left = new FormAttachment( middle, 0 );
fdKeyNameFieldName.top = new FormAttachment( wKeyNameFromField, margin );
fdKeyNameFieldName.right = new FormAttachment( 100, -margin );
wKeyNameFieldName.setLayoutData( fdKeyNameFieldName );
wKeyNameFieldName.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT );
shell.setCursor( busy );
get();
shell.setCursor( null );
busy.dispose();
}
} );
fdGPGGroup = new FormData();
fdGPGGroup.left = new FormAttachment( 0, margin );
fdGPGGroup.top = new FormAttachment( wStepname, margin );
fdGPGGroup.right = new FormAttachment( 100, -margin );
wGPGGroup.setLayoutData( fdGPGGroup );
// ///////////////////////////////
// END OF GPG GROUP //
// ///////////////////////////////
// Stream field
wlStreamFieldName = new Label( shell, SWT.RIGHT );
wlStreamFieldName.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.StreamFieldName.Label" ) );
props.setLook( wlStreamFieldName );
fdlStreamFieldName = new FormData();
fdlStreamFieldName.left = new FormAttachment( 0, 0 );
fdlStreamFieldName.right = new FormAttachment( middle, -margin );
fdlStreamFieldName.top = new FormAttachment( wGPGGroup, 2 * margin );
wlStreamFieldName.setLayoutData( fdlStreamFieldName );
wStreamFieldName = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
props.setLook( wStreamFieldName );
wStreamFieldName.addModifyListener( lsMod );
fdStreamFieldName = new FormData();
fdStreamFieldName.left = new FormAttachment( middle, 0 );
fdStreamFieldName.top = new FormAttachment( wGPGGroup, 2 * margin );
fdStreamFieldName.right = new FormAttachment( 100, -margin );
wStreamFieldName.setLayoutData( fdStreamFieldName );
wStreamFieldName.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT );
shell.setCursor( busy );
get();
shell.setCursor( null );
busy.dispose();
}
} );
// Result fieldname ...
wlResult = new Label( shell, SWT.RIGHT );
wlResult.setText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.ResultField.Label" ) );
props.setLook( wlResult );
fdlResult = new FormData();
fdlResult.left = new FormAttachment( 0, 0 );
fdlResult.right = new FormAttachment( middle, -margin );
fdlResult.top = new FormAttachment( wStreamFieldName, margin * 2 );
wlResult.setLayoutData( fdlResult );
wResult = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wResult.setToolTipText( BaseMessages.getString( PKG, "PGPEncryptStreamDialog.ResultField.Tooltip" ) );
props.setLook( wResult );
wResult.addModifyListener( lsMod );
fdResult = new FormData();
fdResult.left = new FormAttachment( middle, 0 );
fdResult.top = new FormAttachment( wStreamFieldName, margin * 2 );
fdResult.right = new FormAttachment( 100, 0 );
wResult.setLayoutData( fdResult );
// THE BUTTONS
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, wResult );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
keyNameFromField();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
if ( input.getGPGLocation() != null ) {
wGPGLocation.setText( input.getGPGLocation() );
}
if ( input.getStreamField() != null ) {
wStreamFieldName.setText( input.getStreamField() );
}
if ( input.getResultFieldName() != null ) {
wResult.setText( input.getResultFieldName() );
}
if ( input.getKeyName() != null ) {
wKeyName.setText( input.getKeyName() );
}
wKeyNameFromField.setSelection( input.isKeynameInField() );
if ( input.getKeynameFieldName() != null ) {
wKeyNameFieldName.setText( input.getKeynameFieldName() );
}
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
return;
}
input.setStreamField( wStreamFieldName.getText() );
input.setGPGPLocation( wGPGLocation.getText() );
input.setKeyName( wKeyName.getText() );
input.setResultfieldname( wResult.getText() );
input.setKeynameInField( wKeyNameFromField.getSelection() );
input.setKeynameFieldName( wKeyNameFieldName.getText() );
stepname = wStepname.getText(); // return value
dispose();
}
private void keyNameFromField() {
wlKeyName.setEnabled( !wKeyNameFromField.getSelection() );
wKeyName.setEnabled( !wKeyNameFromField.getSelection() );
wlKeyNameFieldName.setEnabled( wKeyNameFromField.getSelection() );
wKeyNameFieldName.setEnabled( wKeyNameFromField.getSelection() );
}
private void get() {
if ( !gotPreviousFields ) {
try {
String fieldvalue = wStreamFieldName.getText();
wStreamFieldName.removeAll();
String Keyfieldvalue = wKeyNameFieldName.getText();
wKeyNameFieldName.removeAll();
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null ) {
wStreamFieldName.setItems( r.getFieldNames() );
wKeyNameFieldName.setItems( r.getFieldNames() );
}
if ( fieldvalue != null ) {
wStreamFieldName.setText( fieldvalue );
}
if ( Keyfieldvalue != null ) {
wKeyNameFieldName.setText( Keyfieldvalue );
}
gotPreviousFields = true;
} catch ( KettleException ke ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "PGPEncryptStreamDialog.FailedToGetFields.DialogTitle" ),
BaseMessages.getString( PKG, "PGPEncryptStreamDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotsitewise.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains error details for the requested associate project asset action.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotsitewise-2019-12-02/AssetErrorDetails" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AssetErrorDetails implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ID of the asset.
* </p>
*/
private String assetId;
/**
* <p>
* The error code.
* </p>
*/
private String code;
/**
* <p>
* The error message.
* </p>
*/
private String message;
/**
* <p>
* The ID of the asset.
* </p>
*
* @param assetId
* The ID of the asset.
*/
public void setAssetId(String assetId) {
this.assetId = assetId;
}
/**
* <p>
* The ID of the asset.
* </p>
*
* @return The ID of the asset.
*/
public String getAssetId() {
return this.assetId;
}
/**
* <p>
* The ID of the asset.
* </p>
*
* @param assetId
* The ID of the asset.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssetErrorDetails withAssetId(String assetId) {
setAssetId(assetId);
return this;
}
/**
* <p>
* The error code.
* </p>
*
* @param code
* The error code.
* @see AssetErrorCode
*/
public void setCode(String code) {
this.code = code;
}
/**
* <p>
* The error code.
* </p>
*
* @return The error code.
* @see AssetErrorCode
*/
public String getCode() {
return this.code;
}
/**
* <p>
* The error code.
* </p>
*
* @param code
* The error code.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AssetErrorCode
*/
public AssetErrorDetails withCode(String code) {
setCode(code);
return this;
}
/**
* <p>
* The error code.
* </p>
*
* @param code
* The error code.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AssetErrorCode
*/
public AssetErrorDetails withCode(AssetErrorCode code) {
this.code = code.toString();
return this;
}
/**
* <p>
* The error message.
* </p>
*
* @param message
* The error message.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* The error message.
* </p>
*
* @return The error message.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* The error message.
* </p>
*
* @param message
* The error message.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssetErrorDetails withMessage(String message) {
setMessage(message);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAssetId() != null)
sb.append("AssetId: ").append(getAssetId()).append(",");
if (getCode() != null)
sb.append("Code: ").append(getCode()).append(",");
if (getMessage() != null)
sb.append("Message: ").append(getMessage());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AssetErrorDetails == false)
return false;
AssetErrorDetails other = (AssetErrorDetails) obj;
if (other.getAssetId() == null ^ this.getAssetId() == null)
return false;
if (other.getAssetId() != null && other.getAssetId().equals(this.getAssetId()) == false)
return false;
if (other.getCode() == null ^ this.getCode() == null)
return false;
if (other.getCode() != null && other.getCode().equals(this.getCode()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAssetId() == null) ? 0 : getAssetId().hashCode());
hashCode = prime * hashCode + ((getCode() == null) ? 0 : getCode().hashCode());
hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode());
return hashCode;
}
@Override
public AssetErrorDetails clone() {
try {
return (AssetErrorDetails) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.iotsitewise.model.transform.AssetErrorDetailsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.examples.manners;
import org.drools.core.WorkingMemory;
import org.drools.core.base.ClassFieldAccessorCache;
import org.drools.core.base.ClassFieldAccessorStore;
import org.drools.core.base.ClassObjectType;
import org.drools.core.base.field.BooleanFieldImpl;
import org.drools.core.base.field.LongFieldImpl;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.definitions.InternalKnowledgePackage;
import org.drools.core.definitions.impl.KnowledgePackageImpl;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.GroupElement;
import org.drools.core.rule.GroupElementFactory;
import org.drools.core.rule.InvalidRuleException;
import org.drools.core.rule.MvelConstraintTestUtil;
import org.drools.core.rule.Pattern;
import org.drools.core.spi.AlphaNodeFieldConstraint;
import org.drools.core.spi.BetaNodeFieldConstraint;
import org.drools.core.spi.Consequence;
import org.drools.core.spi.ConsequenceException;
import org.drools.core.spi.FieldValue;
import org.drools.core.spi.InternalReadAccessor;
import org.drools.core.spi.KnowledgeHelper;
import org.drools.core.spi.Tuple;
import org.junit.Before;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.StringTokenizer;
public abstract class BaseMannersTest {
/** Number of guests at the dinner (default: 16). */
private final int numGuests = 16;
/** Number of seats at the table (default: 16). */
private final int numSeats = 16;
/** Minimum number of hobbies each guest should have (default: 2). */
private final int minHobbies = 2;
/** Maximun number of hobbies each guest should have (default: 3). */
private final int maxHobbies = 3;
protected InternalKnowledgePackage pkg;
private ClassObjectType contextType;
private ClassObjectType guestType;
private ClassObjectType seatingType;
private ClassObjectType lastSeatType;
private ClassObjectType countType;
private ClassObjectType pathType;
private ClassObjectType chosenType;
ClassFieldAccessorStore store;
@Before
public void setUp() throws Exception {
//Class shadow = ShadowProxyFactory.getProxy( Context.class );
this.contextType = new ClassObjectType( Context.class );
//shadow = ShadowProxyFactory.getProxy( Guest.class );
this.guestType = new ClassObjectType( Guest.class );
//shadow = ShadowProxyFactory.getProxy( Seating.class );
this.seatingType = new ClassObjectType( Seating.class );
//shadow = ShadowProxyFactory.getProxy( LastSeat.class );
this.lastSeatType = new ClassObjectType( LastSeat.class );
//shadow = ShadowProxyFactory.getProxy( Count.class );
this.countType = new ClassObjectType( Count.class );
//shadow = ShadowProxyFactory.getProxy( Path.class );
this.pathType = new ClassObjectType( Path.class );
//shadow = ShadowProxyFactory.getProxy( Chosen.class );
this.chosenType = new ClassObjectType( Chosen.class );
this.pkg = new KnowledgePackageImpl( "org.drools.examples.manners" );
this.pkg.setClassFieldAccessorCache( new ClassFieldAccessorCache( Thread.currentThread().getContextClassLoader() ) );
store = this.pkg.getClassFieldAccessorStore();
store.setEagerWire( true );
this.pkg.addRule( getAssignFirstSeatRule() );
this.pkg.addRule( getFindSeating() );
this.pkg.addRule( getMakePath() );
this.pkg.addRule( getPathDone() );
this.pkg.addRule( getContinueProcessing() );
this.pkg.addRule( getAreWeDone() );
this.pkg.addRule( getAllDone() );
}
/**
* <pre>
* rule assignFirstSeat() {
* Context context;
* Guest guest;
* Count count;
* when {
* context : Context( state == Context.START_UP )
* guest : Guest()
* count : Count()
* } then {
* String guestName = guest.getName();
* drools.assert( new Seating( count.getValue(), 1, true, 1, guestName, 1, guestName) );
* drools.assert( new Path( count.getValue(), 1, guestName ) );
* count.setCount( count.getValue() + 1 );
*
* System.err.println( "seat 1 " + guest.getName() + " );
*
* context.setPath( Context.ASSIGN_SEATS );
* }
* }
* </pre>
*
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getAssignFirstSeatRule() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "assignFirstSeat" );
// -----------
// context : Context( state == Context.START_UP )
// -----------
final Pattern contextPattern = new Pattern( 0,
this.contextType,
"context" );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.START_UP ) );
rule.addPattern( contextPattern );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
// -----------
// guest: Guest()
// -----------
final Pattern guestPattern = new Pattern( 1,
this.guestType,
"guest" );
rule.addPattern( guestPattern );
final Declaration guestDeclaration = rule.getDeclaration( "guest" );
// ------------
// count : Count()
// ------------
final Pattern countPattern = new Pattern( 2,
this.countType,
"count" );
rule.addPattern( countPattern );
final Declaration countDeclaration = rule.getDeclaration( "count" );
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
RuleImpl rule = drools.getRule();
Tuple tuple = drools.getTuple();
Guest guest = (Guest) drools.get( guestDeclaration );
Context context = (Context) drools.get( contextDeclaration );
Count count = (Count) drools.get( countDeclaration );
String guestName = guest.getName();
Seating seating = new Seating( count.getValue(),
0,
true,
1,
guestName,
1,
guestName );
drools.insert( seating );
Path path = new Path( count.getValue(),
1,
guestName );
drools.insert( path );
count.setValue( count.getValue() );
drools.update( tuple.get( countDeclaration ),
count );
context.setState( Context.ASSIGN_SEATS );
// drools.update( tuple.get( contextDeclaration ),
// context );
drools.update( tuple.get( contextDeclaration ) );
// System.err.println( "assign first seat : " + seating + " : " + path );
} catch ( Exception e ) {
e.printStackTrace();
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
* <pre>
* rule findSeating() {
* Context context;
* int seatingId, seatingPid;
* String seatingRightGuestName, leftGuestName;
* Sex rightGuestSex;
* Hobby rightGuestHobby;
* Count count;
*
* when {
* context : Context( state == Context.ASSIGN_SEATS )
* Seating( seatingId:id, seatingPid:pid, pathDone == true
* seatingRightSeat:rightSeat seatingRightGuestName:rightGuestName )
* Guest( name == seatingRightGuestName, rightGuestSex:sex, rightGuestHobby:hobby )
* Guest( leftGuestName:name , sex != rightGuestSex, hobby == rightGuestHobby )
*
* count : Count()
*
* not ( Path( id == seatingId, guestName == leftGuestName) )
* not ( Chosen( id == seatingId, guestName == leftGuestName, hobby == rightGuestHobby) )
* } then {
* int newSeat = rightSeat + 1;
* drools.assert( new Seating( coung.getValue(), rightSeat, rightSeatName, leftGuestName, newSeat, countValue, id, false );
* drools.assert( new Path( countValue, leftGuestName, newSeat );
* drools.assert( new Chosen( id, leftGuestName, rightGuestHobby ) );
*
* System.err.println( "seat " + rightSeat + " " + rightSeatName + " " + leftGuestName );
*
* count.setCount( countValue + 1 );
* context.setPath( Context.MAKE_PATH );
* }
* }
* </pre>
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getFindSeating() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "findSeating" );
// ---------------
// context : Context( state == Context.ASSIGN_SEATS )
// ---------------
final Pattern contextPattern = new Pattern( 0,
this.contextType,
"context" );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.ASSIGN_SEATS ) );
rule.addPattern( contextPattern );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
// -------------------------------
// Seating( seatingId:id, seatingPid:pid, pathDone == true
// seatingRightSeat:rightSeat seatingRightGuestName:rightGuestName )
// -------------------------------
final Pattern seatingPattern = new Pattern( 1,
this.seatingType );
setFieldDeclaration( seatingPattern,
"id",
"seatingId" );
setFieldDeclaration( seatingPattern,
"pid",
"seatingPid" );
seatingPattern.addConstraint( getLiteralConstraint( seatingPattern,
"pathDone",
true ) );
setFieldDeclaration( seatingPattern,
"rightSeat",
"seatingRightSeat" );
setFieldDeclaration( seatingPattern,
"rightGuestName",
"seatingRightGuestName" );
rule.addPattern( seatingPattern );
final Declaration seatingIdDeclaration = rule.getDeclaration( "seatingId" );
final Declaration seatingPidDeclaration = rule.getDeclaration( "seatingPid" );
final Declaration seatingRightGuestNameDeclaration = rule.getDeclaration( "seatingRightGuestName" );
final Declaration seatingRightSeatDeclaration = rule.getDeclaration( "seatingRightSeat" );
// --------------
// Guest( name == seatingRightGuestName, rightGuestSex:sex,
// rightGuestHobby:hobby )
// ---------------
final Pattern rightGuestPattern = new Pattern( 2,
this.guestType );
rightGuestPattern.addConstraint( getBoundVariableConstraint( rightGuestPattern,
"name",
seatingRightGuestNameDeclaration,
"==" ) );
setFieldDeclaration( rightGuestPattern,
"sex",
"rightGuestSex" );
setFieldDeclaration( rightGuestPattern,
"hobby",
"rightGuestHobby" );
rule.addPattern( rightGuestPattern );
final Declaration rightGuestSexDeclaration = rule.getDeclaration( "rightGuestSex" );
final Declaration rightGuestHobbyDeclaration = rule.getDeclaration( "rightGuestHobby" );
// ----------------
// Guest( leftGuestName:name , sex != rightGuestSex, hobby ==
// rightGuestHobby )
// ----------------
final Pattern leftGuestPattern = new Pattern( 3,
this.guestType );
setFieldDeclaration( leftGuestPattern,
"name",
"leftGuestName" );
leftGuestPattern.addConstraint( getBoundVariableConstraint( rightGuestPattern,
"hobby",
rightGuestHobbyDeclaration,
"==" ) );
leftGuestPattern.addConstraint( getBoundVariableConstraint( leftGuestPattern,
"sex",
rightGuestSexDeclaration,
"!=" ) );
rule.addPattern( leftGuestPattern );
final Declaration leftGuestNameDeclaration = rule.getDeclaration( "leftGuestName" );
// ---------------
// count : Count()
// ---------------
final Pattern count = new Pattern( 4,
this.countType,
"count" );
rule.addPattern( count );
final Declaration countDeclaration = rule.getDeclaration( "count" );
// --------------
// not ( Path( id == seatingId, guestName == leftGuestName) )
// --------------
final Pattern notPathPattern = new Pattern( 5,
this.pathType );
notPathPattern.addConstraint( getBoundVariableConstraint( notPathPattern,
"id",
seatingIdDeclaration,
"==" ) );
notPathPattern.addConstraint( getBoundVariableConstraint( notPathPattern,
"guestName",
leftGuestNameDeclaration,
"==" ) );
final GroupElement notPath = GroupElementFactory.newNotInstance();
notPath.addChild( notPathPattern );
rule.addPattern( notPath );
// ------------
// not ( Chosen( id == seatingId, guestName == leftGuestName, hobby ==
// rightGuestHobby ) )
// ------------
final Pattern notChosenPattern = new Pattern( 6,
this.chosenType );
notChosenPattern.addConstraint( getBoundVariableConstraint( notChosenPattern,
"id",
seatingIdDeclaration,
"==" ) );
notChosenPattern.addConstraint( getBoundVariableConstraint( notChosenPattern,
"guestName",
leftGuestNameDeclaration,
"==" ) );
notChosenPattern.addConstraint( getBoundVariableConstraint( notChosenPattern,
"hobby",
rightGuestHobbyDeclaration,
"==" ) );
final GroupElement notChosen = GroupElementFactory.newNotInstance();
notChosen.addChild( notChosenPattern );
rule.addPattern( notChosen );
// ------------
// int newSeat = rightSeat + 1;
// drools.assert( new Seating( coung.getValue(), rightSeat,
// rightSeatName, leftGuestName, newSeat, countValue, id, false );
// drools.assert( new Path( countValue, leftGuestName, newSeat );
// drools.assert( new Chosen( id, leftGuestName, rightGuestHobby ) );
//
// System.err.println( "seat " + rightSeat + " " + rightSeatName + " " +
// leftGuestName );
//
// count.setCount( countValue + 1 );
// context.setPath( Context.MAKE_PATH );
// ------------
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
// MemoryVisitor visitor = new MemoryVisitor( ( InternalWorkingMemory ) workingMemory );
// visitor.visit( workingMemory.getRuleBase() );
RuleImpl rule = drools.getRule();
Tuple tuple = drools.getTuple();
Context context = (Context) drools.get( contextDeclaration );
Count count = (Count) drools.get( countDeclaration );
int seatId = seatingIdDeclaration.getExtractor().getIntValue( (InternalWorkingMemory) workingMemory,
tuple.get( seatingIdDeclaration ).getObject() );
int seatingRightSeat = seatingRightSeatDeclaration.getExtractor().getIntValue( (InternalWorkingMemory) workingMemory,
tuple.get( seatingRightSeatDeclaration ).getObject() );
String leftGuestName = (String) drools.get( leftGuestNameDeclaration );
String rightGuestName = (String) drools.get( seatingRightGuestNameDeclaration );
Hobby rightGuestHobby = (Hobby) drools.get( rightGuestHobbyDeclaration );
Seating seating = new Seating( count.getValue(),
seatId,
false,
seatingRightSeat,
rightGuestName,
seatingRightSeat + 1,
leftGuestName );
drools.insert( seating );
Path path = new Path( count.getValue(),
seatingRightSeat + 1,
leftGuestName );
drools.insert( path );
Chosen chosen = new Chosen( seatId,
leftGuestName,
rightGuestHobby );
drools.insert( chosen );
count.setValue( count.getValue() + 1 );
// if ( count.getValue() == 5 ) {
// drools.retractObject( tuple.getFactHandleForDeclaration( countDeclaration ) );
// } else {
// drools.update( tuple.getFactHandleForDeclaration( countDeclaration ),
// count );
// }
drools.update( tuple.get( countDeclaration ),
count );
context.setState( Context.MAKE_PATH );
drools.update( tuple.get( contextDeclaration ),
context );
System.err.println( "find seating : " + seating + " : " + path + " : " + chosen );
} catch ( Exception e ) {
e.printStackTrace();
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
* <pre>
* rule makePath() {
* Context context;
* int seatingId, seatingPid, pathSeat;
* String pathGuestName;
*
* when {
* Context( state == Context.MAKE_PATH )
* Seating( seatingId:id, seatingPid:pid, pathDone == false )
* Path( id == seatingPid, pathGuestName:guest, pathSeat:seat )
* (not Path( id == seatingId, guestName == pathGuestName )
* } else {
* drools.assert( new Path( seatingId, pathSeat, pathGuestName ) );
*
* }
* }
* </pre>
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getMakePath() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "makePath" );
// -----------
// context : Context( state == Context.MAKE_PATH )
// -----------
final Pattern contextPattern = new Pattern( 0,
this.contextType );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.MAKE_PATH ) );
rule.addPattern( contextPattern );
// ---------------
// Seating( seatingId:id, seatingPid:pid, pathDone == false )
// ---------------
final Pattern seatingPattern = new Pattern( 1,
this.seatingType );
setFieldDeclaration( seatingPattern,
"id",
"seatingId" );
setFieldDeclaration( seatingPattern,
"pid",
"seatingPid" );
seatingPattern.addConstraint( getLiteralConstraint( seatingPattern,
"pathDone",
false ) );
rule.addPattern( seatingPattern );
final Declaration seatingIdDeclaration = rule.getDeclaration( "seatingId" );
final Declaration seatingPidDeclaration = rule.getDeclaration( "seatingPid" );
// -----------
// Path( id == seatingPid, pathGuestName:guestName, pathSeat:seat )
// -----------
final Pattern pathPattern = new Pattern( 2,
this.pathType );
pathPattern.addConstraint( getBoundVariableConstraint( pathPattern,
"id",
seatingPidDeclaration,
"==" ) );
setFieldDeclaration( pathPattern,
"guestName",
"pathGuestName" );
setFieldDeclaration( pathPattern,
"seat",
"pathSeat" );
rule.addPattern( pathPattern );
final Declaration pathGuestNameDeclaration = rule.getDeclaration( "pathGuestName" );
final Declaration pathSeatDeclaration = rule.getDeclaration( "pathSeat" );
// -------------
// (not Path( id == seatingId, guestName == pathGuestName )
// -------------
final Pattern notPathPattern = new Pattern( 3,
this.pathType );
notPathPattern.addConstraint( getBoundVariableConstraint( notPathPattern,
"id",
seatingIdDeclaration,
"==" ) );
notPathPattern.addConstraint( getBoundVariableConstraint( notPathPattern,
"guestName",
pathGuestNameDeclaration,
"==" ) );
final GroupElement not = GroupElementFactory.newNotInstance();
not.addChild( notPathPattern );
rule.addPattern( not );
// ------------
// drools.assert( new Path( id, pathName, pathSeat ) );
// ------------
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
RuleImpl rule = drools.getRule();
Tuple tuple = drools.getTuple();
int id = seatingIdDeclaration.getExtractor().getIntValue( (InternalWorkingMemory) workingMemory,
tuple.get( seatingIdDeclaration ).getObject() );
int seat = pathSeatDeclaration.getExtractor().getIntValue( (InternalWorkingMemory) workingMemory,
tuple.get( pathSeatDeclaration ).getObject() );
String guestName = (String) drools.get( pathGuestNameDeclaration );
Path path = new Path( id,
seat,
guestName );
drools.insert( path );
//System.err.println( "make path : " + path );
} catch ( Exception e ) {
e.printStackTrace();
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
*
* <pre>
* rule pathDone() {
* Context context; Seating seating;
* when {
* context : Context( state == Context.MAKE_PATH )
* seating : Seating( pathDone == false )
* } then {
* seating.setPathDone( true );
* context.setName( Context.CHECK_DONE );
* }
* }
* </pre>
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getPathDone() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "pathDone" );
// -----------
// context : Context( state == Context.MAKE_PATH )
// -----------
final Pattern contextPattern = new Pattern( 0,
this.contextType,
"context" );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.MAKE_PATH ) );
rule.addPattern( contextPattern );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
// ---------------
// seating : Seating( pathDone == false )
// ---------------
final Pattern seatingPattern = new Pattern( 1,
this.seatingType,
"seating" );
seatingPattern.addConstraint( getLiteralConstraint( seatingPattern,
"pathDone",
false ) );
rule.addPattern( seatingPattern );
final Declaration seatingDeclaration = rule.getDeclaration( "seating" );
// ------------
// context.setName( Context.CHECK_DONE );
// seating.setPathDone( true );
// ------------
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
RuleImpl rule = drools.getRule();
Tuple tuple = drools.getTuple();
Context context = (Context) drools.get( contextDeclaration );
Seating seating = (Seating) drools.get( seatingDeclaration );
seating.setPathDone( true );
// if ( seating.getId() == 6 ) {
// System.err.println( "pause" );
// }
drools.update( tuple.get( seatingDeclaration ) );
context.setState( Context.CHECK_DONE );
drools.update( tuple.get( contextDeclaration ),
context );
//System.err.println( "path done" + seating );
} catch ( Exception e ) {
e.printStackTrace();
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
* <pre>
* rule areWeDone() {
* Context context; LastSeat lastSear;
* when {
* context : Context( state == Context.CHECK_DONE )
* LastSeat( lastSeat: seat )
* Seating( rightSeat == lastSeat )
* } then {
* context.setState(Context.PRINT_RESULTS );
* }
* }
* </pre>
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getAreWeDone() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "areWeDone" );
// -----------
// context : Context( state == Context.CHECK_DONE )
// -----------
final Pattern contextPattern = new Pattern( 0,
this.contextType,
"context" );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.CHECK_DONE ) );
rule.addPattern( contextPattern );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
// ---------------
// LastSeat( lastSeat: seat )
// ---------------
final Pattern lastSeatPattern = new Pattern( 1,
this.lastSeatType );
setFieldDeclaration( lastSeatPattern,
"seat",
"lastSeat" );
rule.addPattern( lastSeatPattern );
final Declaration lastSeatDeclaration = rule.getDeclaration( "lastSeat" );
// -------------
// Seating( rightSeat == lastSeat )
// -------------
final Pattern seatingPattern = new Pattern( 2,
this.seatingType,
null );
seatingPattern.addConstraint( getBoundVariableConstraint( seatingPattern,
"rightSeat",
lastSeatDeclaration,
"==" ) );
rule.addPattern( seatingPattern );
// ------------
// context.setName( Context.PRINT_RESULTS );
// ------------
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
RuleImpl rule = drools.getRule();
Tuple tuple = drools.getTuple();
Context context = (Context) drools.get( contextDeclaration );
context.setState( Context.PRINT_RESULTS );
drools.update( tuple.get( contextDeclaration ),
context );
// System.err.println( "We Are Done!!!" );
} catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
* <pre>
* rule continue() {
* Context context;
* when {
* context : Context( state == Context.CHECK_DONE )
* } then {
* context.setState( Context.ASSIGN_SEATS );
* }
* }
* </pre>
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getContinueProcessing() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "continueProcessng" );
// -----------
// context : Context( state == Context.CHECK_DONE )
// -----------
final Pattern contextPattern = new Pattern( 0,
this.contextType,
"context" );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.CHECK_DONE ) );
rule.addPattern( contextPattern );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
// ------------
// context.setName( Context.ASSIGN_SEATS );
// ------------
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
RuleImpl rule = drools.getRule();
Tuple tuple = drools.getTuple();
Context context = (Context) drools.get( contextDeclaration );
context.setState( Context.ASSIGN_SEATS );
drools.update( tuple.get( contextDeclaration ),
context );
//System.err.println( "continue processing" );
} catch ( Exception e ) {
e.printStackTrace();
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
* <pre>
* rule all_done() {
* Context context;
* when {
* context : Context( state == Context.PRINT_RESULTS )
* } then {
* }
* }
* </pre>
*
* @return
* @throws IntrospectionException
* @throws InvalidRuleException
*/
private RuleImpl getAllDone() throws IntrospectionException,
InvalidRuleException {
final RuleImpl rule = new RuleImpl( "alldone" );
// -----------
// context : Context( state == Context.PRINT_RESULTS )
// -----------
final Pattern contextPattern = new Pattern( 0,
this.contextType );
contextPattern.addConstraint( getLiteralConstraint( contextPattern,
"state",
Context.PRINT_RESULTS ) );
rule.addPattern( contextPattern );
final Declaration contextDeclaration = rule.getDeclaration( "context" );
// ------------
//
// ------------
final Consequence consequence = new Consequence() {
public void evaluate(KnowledgeHelper drools,
WorkingMemory workingMemory) throws ConsequenceException {
try {
System.err.println( "all done" );
} catch ( Exception e ) {
throw new ConsequenceException( e );
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
public String getName() {
return "default";
}
};
rule.setConsequence( consequence );
return rule;
}
/**
* Convert the facts from the <code>InputStream</code> to a list of
* objects.
*/
protected List getInputObjects(final InputStream inputStream) throws IOException {
final List list = new ArrayList();
final BufferedReader br = new BufferedReader( new InputStreamReader( inputStream ) );
String line;
while ( (line = br.readLine()) != null ) {
if ( line.trim().length() == 0 || line.trim().startsWith( ";" ) ) {
continue;
}
final StringTokenizer st = new StringTokenizer( line,
"() " );
final String type = st.nextToken();
if ( "guest".equals( type ) ) {
if ( !"name".equals( st.nextToken() ) ) {
throw new IOException( "expected 'name' in: " + line );
}
final String name = st.nextToken();
if ( !"sex".equals( st.nextToken() ) ) {
throw new IOException( "expected 'sex' in: " + line );
}
final String sex = st.nextToken();
if ( !"hobby".equals( st.nextToken() ) ) {
throw new IOException( "expected 'hobby' in: " + line );
}
final String hobby = st.nextToken();
final Guest guest = new Guest( name,
Sex.resolve( sex ),
Hobby.resolve( hobby ) );
list.add( guest );
}
if ( "last_seat".equals( type ) ) {
if ( !"seat".equals( st.nextToken() ) ) {
throw new IOException( "expected 'seat' in: " + line );
}
list.add( new LastSeat( Integer.parseInt( st.nextToken() ) ) );
}
if ( "context".equals( type ) ) {
if ( !"state".equals( st.nextToken() ) ) {
throw new IOException( "expected 'state' in: " + line );
}
list.add( new Context( st.nextToken() ) );
}
}
inputStream.close();
return list;
}
private InputStream generateData() {
final String LINE_SEPARATOR = System.getProperty( "line.separator" );
final StringWriter writer = new StringWriter();
final int maxMale = this.numGuests / 2;
final int maxFemale = this.numGuests / 2;
int maleCount = 0;
int femaleCount = 0;
// init hobbies
final List hobbyList = new ArrayList();
for ( int i = 1; i <= this.maxHobbies; i++ ) {
hobbyList.add( "h" + i );
}
final Random rnd = new Random();
for ( int i = 1; i <= this.numGuests; i++ ) {
char sex = rnd.nextBoolean() ? 'm' : 'f';
if ( sex == 'm' && maleCount == maxMale ) {
sex = 'f';
}
if ( sex == 'f' && femaleCount == maxFemale ) {
sex = 'm';
}
if ( sex == 'm' ) {
maleCount++;
}
if ( sex == 'f' ) {
femaleCount++;
}
final List guestHobbies = new ArrayList( hobbyList );
final int numHobbies = this.minHobbies + rnd.nextInt( this.maxHobbies - this.minHobbies + 1 );
for ( int j = 0; j < numHobbies; j++ ) {
final int hobbyIndex = rnd.nextInt( guestHobbies.size() );
final String hobby = (String) guestHobbies.get( hobbyIndex );
writer.write( "(guest (name n" + i + ") (sex " + sex + ") (hobby " + hobby + "))" + LINE_SEPARATOR );
guestHobbies.remove( hobbyIndex );
}
}
writer.write( "(last_seat (seat " + this.numSeats + "))" + LINE_SEPARATOR );
writer.write( LINE_SEPARATOR );
writer.write( "(context (state start))" + LINE_SEPARATOR );
return new ByteArrayInputStream( writer.getBuffer().toString().getBytes() );
}
public static int getIndex(final Class clazz,
final String name) throws IntrospectionException {
final PropertyDescriptor[] descriptors = Introspector.getBeanInfo( clazz ).getPropertyDescriptors();
for ( int i = 0; i < descriptors.length; i++ ) {
if ( descriptors[i].getName().equals( name ) ) {
return i;
}
}
return -1;
}
private AlphaNodeFieldConstraint getLiteralConstraint(final Pattern pattern,
final String fieldName,
final int fieldValue) throws IntrospectionException {
final Class clazz = ((ClassObjectType) pattern.getObjectType()).getClassType();
final InternalReadAccessor extractor = store.getReader( clazz,
fieldName );
final FieldValue field = new LongFieldImpl( fieldValue );
return new MvelConstraintTestUtil( fieldName + " == " + fieldValue,
new LongFieldImpl( fieldValue ),
extractor );
}
private AlphaNodeFieldConstraint getLiteralConstraint(final Pattern pattern,
final String fieldName,
final boolean fieldValue) throws IntrospectionException {
final Class clazz = ((ClassObjectType) pattern.getObjectType()).getClassType();
final InternalReadAccessor extractor = store.getReader( clazz,
fieldName );
final FieldValue field = new BooleanFieldImpl( fieldValue );
return new MvelConstraintTestUtil( fieldName + " == " + fieldValue,
new BooleanFieldImpl( fieldValue ),
extractor );
}
private void setFieldDeclaration(final Pattern pattern,
final String fieldName,
final String identifier) throws IntrospectionException {
final Class clazz = ((ClassObjectType) pattern.getObjectType()).getClassType();
final InternalReadAccessor extractor = store.getReader( clazz,
fieldName );
pattern.addDeclaration( identifier ).setReadAccessor( extractor );
}
private BetaNodeFieldConstraint getBoundVariableConstraint(final Pattern pattern,
final String fieldName,
final Declaration declaration,
final String operator) throws IntrospectionException {
final Class clazz = ((ClassObjectType) pattern.getObjectType()).getClassType();
final InternalReadAccessor extractor = store.getReader( clazz,
fieldName );
String expression = fieldName + " " + operator + " " + declaration.getIdentifier();
return new MvelConstraintTestUtil(expression, declaration, extractor);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.gen;
import com.facebook.presto.bytecode.BytecodeBlock;
import com.facebook.presto.bytecode.ClassDefinition;
import com.facebook.presto.bytecode.DynamicClassLoader;
import com.facebook.presto.bytecode.FieldDefinition;
import com.facebook.presto.bytecode.MethodDefinition;
import com.facebook.presto.bytecode.Parameter;
import com.facebook.presto.bytecode.Variable;
import com.facebook.presto.bytecode.control.IfStatement;
import com.facebook.presto.bytecode.expression.BytecodeExpression;
import com.facebook.presto.bytecode.instruction.JumpInstruction;
import com.facebook.presto.bytecode.instruction.LabelNode;
import com.facebook.presto.operator.JoinProbe;
import com.facebook.presto.operator.JoinProbeFactory;
import com.facebook.presto.operator.LookupJoinOperator;
import com.facebook.presto.operator.LookupJoinOperatorFactory;
import com.facebook.presto.operator.LookupJoinOperators.JoinType;
import com.facebook.presto.operator.LookupSource;
import com.facebook.presto.operator.LookupSourceFactory;
import com.facebook.presto.operator.OperatorFactory;
import com.facebook.presto.operator.SimpleJoinProbe;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.PageBuilder;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ExecutionError;
import com.google.common.util.concurrent.UncheckedExecutionException;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import static com.facebook.presto.bytecode.Access.FINAL;
import static com.facebook.presto.bytecode.Access.PRIVATE;
import static com.facebook.presto.bytecode.Access.PUBLIC;
import static com.facebook.presto.bytecode.Access.a;
import static com.facebook.presto.bytecode.CompilerUtils.defineClass;
import static com.facebook.presto.bytecode.CompilerUtils.makeClassName;
import static com.facebook.presto.bytecode.Parameter.arg;
import static com.facebook.presto.bytecode.ParameterizedType.type;
import static com.facebook.presto.bytecode.expression.BytecodeExpressions.constantInt;
import static com.facebook.presto.bytecode.expression.BytecodeExpressions.constantLong;
import static com.facebook.presto.bytecode.expression.BytecodeExpressions.newInstance;
import static com.facebook.presto.sql.gen.SqlTypeBytecodeExpression.constantType;
public class JoinProbeCompiler
{
private final LoadingCache<JoinOperatorCacheKey, HashJoinOperatorFactoryFactory> joinProbeFactories = CacheBuilder.newBuilder().maximumSize(1000).build(
new CacheLoader<JoinOperatorCacheKey, HashJoinOperatorFactoryFactory>()
{
@Override
public HashJoinOperatorFactoryFactory load(JoinOperatorCacheKey key)
throws Exception
{
return internalCompileJoinOperatorFactory(key.getTypes(), key.getProbeChannels(), key.getProbeHashChannel(), key.isFilterFunctionPresent());
}
});
public OperatorFactory compileJoinOperatorFactory(int operatorId,
PlanNodeId planNodeId,
LookupSourceFactory lookupSourceFactory,
List<? extends Type> probeTypes,
List<Integer> probeJoinChannel,
Optional<Integer> probeHashChannel,
JoinType joinType,
boolean filterFunctionPresent)
{
try {
HashJoinOperatorFactoryFactory operatorFactoryFactory = joinProbeFactories.get(new JoinOperatorCacheKey(probeTypes, probeJoinChannel, probeHashChannel, joinType, filterFunctionPresent));
return operatorFactoryFactory.createHashJoinOperatorFactory(operatorId, planNodeId, lookupSourceFactory, probeTypes, probeJoinChannel, joinType);
}
catch (ExecutionException | UncheckedExecutionException | ExecutionError e) {
throw Throwables.propagate(e.getCause());
}
}
public HashJoinOperatorFactoryFactory internalCompileJoinOperatorFactory(List<Type> types, List<Integer> probeJoinChannel, Optional<Integer> probeHashChannel, boolean filterFunctionPresent)
{
Class<? extends JoinProbe> joinProbeClass = compileJoinProbe(types, probeJoinChannel, probeHashChannel);
ClassDefinition classDefinition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName("JoinProbeFactory"),
type(Object.class),
type(JoinProbeFactory.class));
classDefinition.declareDefaultConstructor(a(PUBLIC));
Parameter lookupSource = arg("lookupSource", LookupSource.class);
Parameter page = arg("page", Page.class);
MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), "createJoinProbe", type(JoinProbe.class), lookupSource, page);
method.getBody()
.newObject(joinProbeClass)
.dup()
.append(lookupSource)
.append(page)
.invokeConstructor(joinProbeClass, LookupSource.class, Page.class)
.retObject();
DynamicClassLoader classLoader = new DynamicClassLoader(joinProbeClass.getClassLoader());
JoinProbeFactory joinProbeFactory;
if (probeJoinChannel.isEmpty()) {
// see comment in PagesIndex#createLookupSource
joinProbeFactory = new SimpleJoinProbe.SimpleJoinProbeFactory(types, probeJoinChannel, probeHashChannel);
}
else {
Class<? extends JoinProbeFactory> joinProbeFactoryClass = defineClass(classDefinition, JoinProbeFactory.class, classLoader);
try {
joinProbeFactory = joinProbeFactoryClass.newInstance();
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
Class<? extends OperatorFactory> operatorFactoryClass = IsolatedClass.isolateClass(
classLoader,
OperatorFactory.class,
LookupJoinOperatorFactory.class,
LookupJoinOperator.class);
return new HashJoinOperatorFactoryFactory(joinProbeFactory, operatorFactoryClass);
}
@VisibleForTesting
public JoinProbeFactory internalCompileJoinProbe(List<Type> types, List<Integer> probeChannels, Optional<Integer> probeHashChannel)
{
return new ReflectionJoinProbeFactory(compileJoinProbe(types, probeChannels, probeHashChannel));
}
private Class<? extends JoinProbe> compileJoinProbe(List<Type> types, List<Integer> probeChannels, Optional<Integer> probeHashChannel)
{
CallSiteBinder callSiteBinder = new CallSiteBinder();
ClassDefinition classDefinition = new ClassDefinition(
a(PUBLIC, FINAL),
makeClassName("JoinProbe"),
type(Object.class),
type(JoinProbe.class));
// declare fields
FieldDefinition lookupSourceField = classDefinition.declareField(a(PRIVATE, FINAL), "lookupSource", LookupSource.class);
FieldDefinition positionCountField = classDefinition.declareField(a(PRIVATE, FINAL), "positionCount", int.class);
List<FieldDefinition> blockFields = new ArrayList<>();
for (int i = 0; i < types.size(); i++) {
FieldDefinition channelField = classDefinition.declareField(a(PRIVATE, FINAL), "block_" + i, Block.class);
blockFields.add(channelField);
}
List<FieldDefinition> probeBlockFields = new ArrayList<>();
for (int i = 0; i < probeChannels.size(); i++) {
FieldDefinition channelField = classDefinition.declareField(a(PRIVATE, FINAL), "probeBlock_" + i, Block.class);
probeBlockFields.add(channelField);
}
FieldDefinition probeBlocksArrayField = classDefinition.declareField(a(PRIVATE, FINAL), "probeBlocks", Block[].class);
FieldDefinition probePageField = classDefinition.declareField(a(PRIVATE, FINAL), "probePage", Page.class);
FieldDefinition pageField = classDefinition.declareField(a(PRIVATE, FINAL), "page", Page.class);
FieldDefinition positionField = classDefinition.declareField(a(PRIVATE), "position", int.class);
FieldDefinition probeHashBlockField = classDefinition.declareField(a(PRIVATE, FINAL), "probeHashBlock", Block.class);
generateConstructor(classDefinition, probeChannels, probeHashChannel, lookupSourceField, blockFields, probeBlockFields, probeBlocksArrayField, probePageField, pageField, probeHashBlockField, positionField, positionCountField);
generateGetChannelCountMethod(classDefinition, blockFields.size());
generateAppendToMethod(classDefinition, callSiteBinder, types, blockFields, positionField);
generateAdvanceNextPosition(classDefinition, positionField, positionCountField);
generateGetCurrentJoinPosition(classDefinition, callSiteBinder, lookupSourceField, probePageField, pageField, probeHashChannel, probeHashBlockField, positionField);
generateCurrentRowContainsNull(classDefinition, probeBlockFields, positionField);
generateGetPosition(classDefinition, positionField);
generateGetPage(classDefinition, pageField);
return defineClass(classDefinition, JoinProbe.class, callSiteBinder.getBindings(), getClass().getClassLoader());
}
private static void generateConstructor(ClassDefinition classDefinition,
List<Integer> probeChannels,
Optional<Integer> probeHashChannel,
FieldDefinition lookupSourceField,
List<FieldDefinition> blockFields,
List<FieldDefinition> probeChannelFields,
FieldDefinition probeBlocksArrayField,
FieldDefinition probePageField,
FieldDefinition pageField,
FieldDefinition probeHashBlockField,
FieldDefinition positionField,
FieldDefinition positionCountField)
{
Parameter lookupSource = arg("lookupSource", LookupSource.class);
Parameter page = arg("page", Page.class);
MethodDefinition constructorDefinition = classDefinition.declareConstructor(a(PUBLIC), lookupSource, page);
Variable thisVariable = constructorDefinition.getThis();
BytecodeBlock constructor = constructorDefinition
.getBody()
.comment("super();")
.append(thisVariable)
.invokeConstructor(Object.class);
constructor.comment("this.lookupSource = lookupSource;")
.append(thisVariable.setField(lookupSourceField, lookupSource));
constructor.comment("this.positionCount = page.getPositionCount();")
.append(thisVariable.setField(positionCountField, page.invoke("getPositionCount", int.class)));
constructor.comment("Set block fields");
for (int index = 0; index < blockFields.size(); index++) {
constructor.append(thisVariable.setField(
blockFields.get(index),
page.invoke("getBlock", Block.class, constantInt(index))));
}
constructor.comment("Set probe channel fields");
for (int index = 0; index < probeChannelFields.size(); index++) {
constructor.append(thisVariable.setField(
probeChannelFields.get(index),
thisVariable.getField(blockFields.get(probeChannels.get(index)))));
}
constructor.comment("this.probeBlocks = new Block[<probeChannelCount>];");
constructor
.append(thisVariable)
.push(probeChannelFields.size())
.newArray(Block.class)
.putField(probeBlocksArrayField);
for (int index = 0; index < probeChannelFields.size(); index++) {
constructor
.append(thisVariable)
.getField(probeBlocksArrayField)
.push(index)
.append(thisVariable)
.getField(probeChannelFields.get(index))
.putObjectArrayElement();
}
constructor.comment("this.page = page")
.append(thisVariable.setField(pageField, page));
constructor.comment("this.probePage = new Page(probeBlocks)")
.append(thisVariable.setField(probePageField, newInstance(Page.class, thisVariable.getField(probeBlocksArrayField))));
if (probeHashChannel.isPresent()) {
Integer index = probeHashChannel.get();
constructor.comment("this.probeHashBlock = blocks[hashChannel.get()]")
.append(thisVariable.setField(
probeHashBlockField,
thisVariable.getField(blockFields.get(index))));
}
constructor.comment("this.position = -1;")
.append(thisVariable.setField(positionField, constantInt(-1)));
constructor.ret();
}
private static void generateGetChannelCountMethod(ClassDefinition classDefinition, int channelCount)
{
classDefinition.declareMethod(
a(PUBLIC),
"getChannelCount",
type(int.class))
.getBody()
.push(channelCount)
.retInt();
}
private static void generateAppendToMethod(
ClassDefinition classDefinition,
CallSiteBinder callSiteBinder,
List<Type> types, List<FieldDefinition> blockFields,
FieldDefinition positionField)
{
Parameter pageBuilder = arg("pageBuilder", PageBuilder.class);
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"appendTo",
type(void.class),
pageBuilder);
Variable thisVariable = method.getThis();
for (int index = 0; index < blockFields.size(); index++) {
Type type = types.get(index);
method.getBody()
.comment("%s.appendTo(block_%s, position, pageBuilder.getBlockBuilder(%s));", type.getClass(), index, index)
.append(constantType(callSiteBinder, type).invoke("appendTo", void.class,
thisVariable.getField(blockFields.get(index)),
thisVariable.getField(positionField),
pageBuilder.invoke("getBlockBuilder", BlockBuilder.class, constantInt(index))));
}
method.getBody()
.ret();
}
private static void generateAdvanceNextPosition(ClassDefinition classDefinition, FieldDefinition positionField, FieldDefinition positionCountField)
{
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"advanceNextPosition",
type(boolean.class));
Variable thisVariable = method.getThis();
method.getBody()
.comment("this.position = this.position + 1;")
.append(thisVariable)
.append(thisVariable)
.getField(positionField)
.push(1)
.intAdd()
.putField(positionField);
LabelNode lessThan = new LabelNode("lessThan");
LabelNode end = new LabelNode("end");
method.getBody()
.comment("return position < positionCount;")
.append(thisVariable)
.getField(positionField)
.append(thisVariable)
.getField(positionCountField)
.append(JumpInstruction.jumpIfIntLessThan(lessThan))
.push(false)
.gotoLabel(end)
.visitLabel(lessThan)
.push(true)
.visitLabel(end)
.retBoolean();
}
private static void generateGetCurrentJoinPosition(ClassDefinition classDefinition,
CallSiteBinder callSiteBinder,
FieldDefinition lookupSourceField,
FieldDefinition probePageField,
FieldDefinition pageField,
Optional<Integer> probeHashChannel,
FieldDefinition probeHashBlockField,
FieldDefinition positionField)
{
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"getCurrentJoinPosition",
type(long.class));
Variable thisVariable = method.getThis();
BytecodeBlock body = method.getBody()
.append(new IfStatement()
.condition(thisVariable.invoke("currentRowContainsNull", boolean.class))
.ifTrue(constantLong(-1).ret()));
BytecodeExpression position = thisVariable.getField(positionField);
BytecodeExpression hashChannelsPage = thisVariable.getField(probePageField);
BytecodeExpression allChannelsPage = thisVariable.getField(pageField);
BytecodeExpression probeHashBlock = thisVariable.getField(probeHashBlockField);
if (probeHashChannel.isPresent()) {
body.append(thisVariable.getField(lookupSourceField).invoke("getJoinPosition", long.class,
position,
hashChannelsPage,
allChannelsPage,
constantType(callSiteBinder, BigintType.BIGINT).invoke("getLong",
long.class,
probeHashBlock,
position)))
.retLong();
}
else {
body.append(thisVariable.getField(lookupSourceField).invoke("getJoinPosition", long.class, position, hashChannelsPage, allChannelsPage)).retLong();
}
}
private static void generateCurrentRowContainsNull(ClassDefinition classDefinition, List<FieldDefinition> probeBlockFields, FieldDefinition positionField)
{
MethodDefinition method = classDefinition.declareMethod(
a(PRIVATE),
"currentRowContainsNull",
type(boolean.class));
Variable thisVariable = method.getThis();
for (FieldDefinition probeBlockField : probeBlockFields) {
LabelNode checkNextField = new LabelNode("checkNextField");
method.getBody()
.append(thisVariable.getField(probeBlockField).invoke("isNull", boolean.class, thisVariable.getField(positionField)))
.ifFalseGoto(checkNextField)
.push(true)
.retBoolean()
.visitLabel(checkNextField);
}
method.getBody()
.push(false)
.retInt();
}
private static void generateGetPosition(ClassDefinition classDefinition, FieldDefinition positionField)
{
// dummy implementation for now
// compiled class is used only in usecase case when result of this method is ignored.
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"getPosition",
type(int.class));
Variable thisVariable = method.getThis();
method.getBody()
.append(thisVariable.getField(positionField))
.retInt();
}
private static void generateGetPage(ClassDefinition classDefinition, FieldDefinition pageField)
{
// dummy implementation for now
// compiled class is used only in usecase case when result of this method is ignored.
MethodDefinition method = classDefinition.declareMethod(
a(PUBLIC),
"getPage",
type(Page.class));
Variable thisVariable = method.getThis();
method.getBody()
.append(thisVariable.getField(pageField))
.ret(Page.class);
}
public static class ReflectionJoinProbeFactory
implements JoinProbeFactory
{
private final Constructor<? extends JoinProbe> constructor;
public ReflectionJoinProbeFactory(Class<? extends JoinProbe> joinProbeClass)
{
try {
constructor = joinProbeClass.getConstructor(LookupSource.class, Page.class);
}
catch (NoSuchMethodException e) {
throw Throwables.propagate(e);
}
}
@Override
public JoinProbe createJoinProbe(LookupSource lookupSource, Page page)
{
try {
return constructor.newInstance(lookupSource, page);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
}
private static final class JoinOperatorCacheKey
{
private final List<Type> types;
private final List<Integer> probeChannels;
private final JoinType joinType;
private final Optional<Integer> probeHashChannel;
private final boolean filterFunctionPresent;
private JoinOperatorCacheKey(List<? extends Type> types,
List<Integer> probeChannels,
Optional<Integer> probeHashChannel,
JoinType joinType,
boolean filterFunctionPresent)
{
this.probeHashChannel = probeHashChannel;
this.types = ImmutableList.copyOf(types);
this.probeChannels = ImmutableList.copyOf(probeChannels);
this.joinType = joinType;
this.filterFunctionPresent = filterFunctionPresent;
}
private List<Type> getTypes()
{
return types;
}
private List<Integer> getProbeChannels()
{
return probeChannels;
}
private Optional<Integer> getProbeHashChannel()
{
return probeHashChannel;
}
private boolean isFilterFunctionPresent()
{
return filterFunctionPresent;
}
@Override
public int hashCode()
{
return Objects.hash(types, probeChannels, joinType, filterFunctionPresent);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (!(obj instanceof JoinOperatorCacheKey)) {
return false;
}
JoinOperatorCacheKey other = (JoinOperatorCacheKey) obj;
return Objects.equals(this.types, other.types) &&
Objects.equals(this.probeChannels, other.probeChannels) &&
Objects.equals(this.probeHashChannel, other.probeHashChannel) &&
Objects.equals(this.joinType, other.joinType) &&
Objects.equals(this.filterFunctionPresent, other.filterFunctionPresent);
}
}
private static class HashJoinOperatorFactoryFactory
{
private final JoinProbeFactory joinProbeFactory;
private final Constructor<? extends OperatorFactory> constructor;
private HashJoinOperatorFactoryFactory(JoinProbeFactory joinProbeFactory, Class<? extends OperatorFactory> operatorFactoryClass)
{
this.joinProbeFactory = joinProbeFactory;
try {
constructor = operatorFactoryClass.getConstructor(int.class, PlanNodeId.class, LookupSourceFactory.class, List.class, JoinType.class, JoinProbeFactory.class);
}
catch (NoSuchMethodException e) {
throw Throwables.propagate(e);
}
}
public OperatorFactory createHashJoinOperatorFactory(
int operatorId,
PlanNodeId planNodeId,
LookupSourceFactory lookupSourceFactory,
List<? extends Type> probeTypes,
List<Integer> probeJoinChannel,
JoinType joinType)
{
try {
return constructor.newInstance(operatorId, planNodeId, lookupSourceFactory, probeTypes, joinType, joinProbeFactory);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
}
public static void checkState(boolean left, boolean right)
{
if (left != right) {
throw new IllegalStateException();
}
}
}
| |
// Portions copyright 2002, Google, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.kozaxinan.fixoposcreen.iab.utils;
// This code was converted from code at http://iharder.sourceforge.net/base64/
// Lots of extraneous features were removed.
/* The original code said:
* <p>
* I am placing this code in the Public Domain. Do with it as you will.
* This software comes with no guarantees or warranties but with
* plenty of well-wishing instead!
* Please visit
* <a href="http://iharder.net/xmlizable">http://iharder.net/xmlizable</a>
* periodically to check for updates or to contribute improvements.
* </p>
*
* @author Robert Harder
* @author rharder@usa.net
* @version 1.3
*/
/**
* Base64 converter class. This code is not a complete MIME encoder;
* it simply converts binary data to base64 data and back.
* <p/>
* <p>Note {@link CharBase64} is a GWT-compatible implementation of this
* class.
*/
public class Base64 {
/**
* Specify encoding (value is {@code true}).
*/
public final static boolean ENCODE = true;
/**
* Specify decoding (value is {@code false}).
*/
public final static boolean DECODE = false;
/**
* The equals sign (=) as a byte.
*/
private final static byte EQUALS_SIGN = (byte) '=';
/**
* The new line character (\n) as a byte.
*/
private final static byte NEW_LINE = (byte) '\n';
/**
* The 64 valid Base64 values.
*/
private final static byte[] ALPHABET =
{(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F',
(byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K',
(byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P',
(byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U',
(byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z',
(byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e',
(byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j',
(byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o',
(byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't',
(byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y',
(byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3',
(byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8',
(byte) '9', (byte) '+', (byte) '/'};
/**
* The 64 valid web safe Base64 values.
*/
private final static byte[] WEBSAFE_ALPHABET =
{(byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F',
(byte) 'G', (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K',
(byte) 'L', (byte) 'M', (byte) 'N', (byte) 'O', (byte) 'P',
(byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U',
(byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z',
(byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e',
(byte) 'f', (byte) 'g', (byte) 'h', (byte) 'i', (byte) 'j',
(byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o',
(byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't',
(byte) 'u', (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y',
(byte) 'z', (byte) '0', (byte) '1', (byte) '2', (byte) '3',
(byte) '4', (byte) '5', (byte) '6', (byte) '7', (byte) '8',
(byte) '9', (byte) '-', (byte) '_'};
/**
* Translates a Base64 value to either its 6-bit reconstruction value
* or a negative number indicating some other meaning.
*/
private final static byte[] DECODABET = {-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8
-5, -5, // Whitespace: Tab and Linefeed
-9, -9, // Decimal 11 - 12
-5, // Whitespace: Carriage Return
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26
-9, -9, -9, -9, -9, // Decimal 27 - 31
-5, // Whitespace: Space
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42
62, // Plus sign at decimal 43
-9, -9, -9, // Decimal 44 - 46
63, // Slash at decimal 47
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine
-9, -9, -9, // Decimal 58 - 60
-1, // Equals sign at decimal 61
-9, -9, -9, // Decimal 62 - 64
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N'
14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z'
-9, -9, -9, -9, -9, -9, // Decimal 91 - 96
26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm'
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z'
-9, -9, -9, -9, -9 // Decimal 123 - 127
/* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */
};
/**
* The web safe decodabet
*/
private final static byte[] WEBSAFE_DECODABET =
{-9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8
-5, -5, // Whitespace: Tab and Linefeed
-9, -9, // Decimal 11 - 12
-5, // Whitespace: Carriage Return
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26
-9, -9, -9, -9, -9, // Decimal 27 - 31
-5, // Whitespace: Space
-9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 44
62, // Dash '-' sign at decimal 45
-9, -9, // Decimal 46-47
52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine
-9, -9, -9, // Decimal 58 - 60
-1, // Equals sign at decimal 61
-9, -9, -9, // Decimal 62 - 64
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N'
14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z'
-9, -9, -9, -9, // Decimal 91-94
63, // Underscore '_' at decimal 95
-9, // Decimal 96
26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm'
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z'
-9, -9, -9, -9, -9 // Decimal 123 - 127
/* ,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 128 - 139
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243
-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */
};
// Indicates white space in encoding
private final static byte WHITE_SPACE_ENC = -5;
// Indicates equals sign in encoding
private final static byte EQUALS_SIGN_ENC = -1;
/**
* Defeats instantiation.
*/
private Base64() {
}
/* ******** E N C O D I N G M E T H O D S ******** */
/**
* Encodes up to three bytes of the array <var>source</var>
* and writes the resulting four Base64 bytes to <var>destination</var>.
* The source and destination arrays can be manipulated
* anywhere along their length by specifying
* <var>srcOffset</var> and <var>destOffset</var>.
* This method does not check to make sure your arrays
* are large enough to accommodate <var>srcOffset</var> + 3 for
* the <var>source</var> array or <var>destOffset</var> + 4 for
* the <var>destination</var> array.
* The actual number of significant bytes in your array is
* given by <var>numSigBytes</var>.
*
* @param source the array to convert
* @param srcOffset the index where conversion begins
* @param numSigBytes the number of significant bytes in your array
* @param destination the array to hold the conversion
* @param destOffset the index where output will be put
* @param alphabet is the encoding alphabet
* @return the <var>destination</var> array
* @since 1.3
*/
private static byte[] encode3to4(byte[] source, int srcOffset,
int numSigBytes, byte[] destination, int destOffset, byte[] alphabet) {
// 1 2 3
// 01234567890123456789012345678901 Bit position
// --------000000001111111122222222 Array position from threeBytes
// --------| || || || | Six bit groups to index alphabet
// >>18 >>12 >> 6 >> 0 Right shift necessary
// 0x3f 0x3f 0x3f Additional AND
// Create buffer with zero-padding if there are only one or two
// significant bytes passed in the array.
// We have to shift left 24 in order to flush out the 1's that appear
// when Java treats a value as negative that is cast from a byte to an int.
int inBuff =
(numSigBytes > 0 ? ((source[srcOffset] << 24) >>> 8) : 0)
| (numSigBytes > 1 ? ((source[srcOffset + 1] << 24) >>> 16) : 0)
| (numSigBytes > 2 ? ((source[srcOffset + 2] << 24) >>> 24) : 0);
switch (numSigBytes) {
case 3:
destination[destOffset] = alphabet[(inBuff >>> 18)];
destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f];
destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f];
destination[destOffset + 3] = alphabet[(inBuff) & 0x3f];
return destination;
case 2:
destination[destOffset] = alphabet[(inBuff >>> 18)];
destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f];
destination[destOffset + 2] = alphabet[(inBuff >>> 6) & 0x3f];
destination[destOffset + 3] = EQUALS_SIGN;
return destination;
case 1:
destination[destOffset] = alphabet[(inBuff >>> 18)];
destination[destOffset + 1] = alphabet[(inBuff >>> 12) & 0x3f];
destination[destOffset + 2] = EQUALS_SIGN;
destination[destOffset + 3] = EQUALS_SIGN;
return destination;
default:
return destination;
} // end switch
} // end encode3to4
/**
* Encodes a byte array into Base64 notation.
* Equivalent to calling
* {@code encodeBytes(source, 0, source.length)}
*
* @param source The data to convert
* @since 1.4
*/
public static String encode(byte[] source) {
return encode(source, 0, source.length, ALPHABET, true);
}
/**
* Encodes a byte array into web safe Base64 notation.
*
* @param source The data to convert
* @param doPadding is {@code true} to pad result with '=' chars
* if it does not fall on 3 byte boundaries
*/
public static String encodeWebSafe(byte[] source, boolean doPadding) {
return encode(source, 0, source.length, WEBSAFE_ALPHABET, doPadding);
}
/**
* Encodes a byte array into Base64 notation.
*
* @param source the data to convert
* @param off offset in array where conversion should begin
* @param len length of data to convert
* @param alphabet the encoding alphabet
* @param doPadding is {@code true} to pad result with '=' chars
* if it does not fall on 3 byte boundaries
* @since 1.4
*/
public static String encode(byte[] source, int off, int len, byte[] alphabet,
boolean doPadding) {
byte[] outBuff = encode(source, off, len, alphabet, Integer.MAX_VALUE);
int outLen = outBuff.length;
// If doPadding is false, set length to truncate '='
// padding characters
while (doPadding == false && outLen > 0) {
if (outBuff[outLen - 1] != '=') {
break;
}
outLen -= 1;
}
return new String(outBuff, 0, outLen);
}
/**
* Encodes a byte array into Base64 notation.
*
* @param source the data to convert
* @param off offset in array where conversion should begin
* @param len length of data to convert
* @param alphabet is the encoding alphabet
* @param maxLineLength maximum length of one line.
* @return the BASE64-encoded byte array
*/
public static byte[] encode(byte[] source, int off, int len, byte[] alphabet,
int maxLineLength) {
int lenDiv3 = (len + 2) / 3; // ceil(len / 3)
int len43 = lenDiv3 * 4;
byte[] outBuff = new byte[len43 // Main 4:3
+ (len43 / maxLineLength)]; // New lines
int d = 0;
int e = 0;
int len2 = len - 2;
int lineLength = 0;
for (; d < len2; d += 3, e += 4) {
// The following block of code is the same as
// encode3to4( source, d + off, 3, outBuff, e, alphabet );
// but inlined for faster encoding (~20% improvement)
int inBuff =
((source[d + off] << 24) >>> 8)
| ((source[d + 1 + off] << 24) >>> 16)
| ((source[d + 2 + off] << 24) >>> 24);
outBuff[e] = alphabet[(inBuff >>> 18)];
outBuff[e + 1] = alphabet[(inBuff >>> 12) & 0x3f];
outBuff[e + 2] = alphabet[(inBuff >>> 6) & 0x3f];
outBuff[e + 3] = alphabet[(inBuff) & 0x3f];
lineLength += 4;
if (lineLength == maxLineLength) {
outBuff[e + 4] = NEW_LINE;
e++;
lineLength = 0;
} // end if: end of line
} // end for: each piece of array
if (d < len) {
encode3to4(source, d + off, len - d, outBuff, e, alphabet);
lineLength += 4;
if (lineLength == maxLineLength) {
// Add a last newline
outBuff[e + 4] = NEW_LINE;
e++;
}
e += 4;
}
assert (e == outBuff.length);
return outBuff;
}
/* ******** D E C O D I N G M E T H O D S ******** */
/**
* Decodes four bytes from array <var>source</var>
* and writes the resulting bytes (up to three of them)
* to <var>destination</var>.
* The source and destination arrays can be manipulated
* anywhere along their length by specifying
* <var>srcOffset</var> and <var>destOffset</var>.
* This method does not check to make sure your arrays
* are large enough to accommodate <var>srcOffset</var> + 4 for
* the <var>source</var> array or <var>destOffset</var> + 3 for
* the <var>destination</var> array.
* This method returns the actual number of bytes that
* were converted from the Base64 encoding.
*
* @param source the array to convert
* @param srcOffset the index where conversion begins
* @param destination the array to hold the conversion
* @param destOffset the index where output will be put
* @param decodabet the decodabet for decoding Base64 content
* @return the number of decoded bytes converted
* @since 1.3
*/
private static int decode4to3(byte[] source, int srcOffset,
byte[] destination, int destOffset, byte[] decodabet) {
// Example: Dk==
if (source[srcOffset + 2] == EQUALS_SIGN) {
int outBuff =
((decodabet[source[srcOffset]] << 24) >>> 6)
| ((decodabet[source[srcOffset + 1]] << 24) >>> 12);
destination[destOffset] = (byte) (outBuff >>> 16);
return 1;
} else if (source[srcOffset + 3] == EQUALS_SIGN) {
// Example: DkL=
int outBuff =
((decodabet[source[srcOffset]] << 24) >>> 6)
| ((decodabet[source[srcOffset + 1]] << 24) >>> 12)
| ((decodabet[source[srcOffset + 2]] << 24) >>> 18);
destination[destOffset] = (byte) (outBuff >>> 16);
destination[destOffset + 1] = (byte) (outBuff >>> 8);
return 2;
} else {
// Example: DkLE
int outBuff =
((decodabet[source[srcOffset]] << 24) >>> 6)
| ((decodabet[source[srcOffset + 1]] << 24) >>> 12)
| ((decodabet[source[srcOffset + 2]] << 24) >>> 18)
| ((decodabet[source[srcOffset + 3]] << 24) >>> 24);
destination[destOffset] = (byte) (outBuff >> 16);
destination[destOffset + 1] = (byte) (outBuff >> 8);
destination[destOffset + 2] = (byte) (outBuff);
return 3;
}
} // end decodeToBytes
/**
* Decodes data from Base64 notation.
*
* @param s the string to decode (decoded in default encoding)
* @return the decoded data
* @since 1.4
*/
public static byte[] decode(String s) throws Base64DecoderException {
byte[] bytes = s.getBytes();
return decode(bytes, 0, bytes.length);
}
/**
* Decodes data from web safe Base64 notation.
* Web safe encoding uses '-' instead of '+', '_' instead of '/'
*
* @param s the string to decode (decoded in default encoding)
* @return the decoded data
*/
public static byte[] decodeWebSafe(String s) throws Base64DecoderException {
byte[] bytes = s.getBytes();
return decodeWebSafe(bytes, 0, bytes.length);
}
/**
* Decodes Base64 content in byte array format and returns
* the decoded byte array.
*
* @param source The Base64 encoded data
* @return decoded data
* @throws Base64DecoderException
* @since 1.3
*/
public static byte[] decode(byte[] source) throws Base64DecoderException {
return decode(source, 0, source.length);
}
/**
* Decodes web safe Base64 content in byte array format and returns
* the decoded data.
* Web safe encoding uses '-' instead of '+', '_' instead of '/'
*
* @param source the string to decode (decoded in default encoding)
* @return the decoded data
*/
public static byte[] decodeWebSafe(byte[] source)
throws Base64DecoderException {
return decodeWebSafe(source, 0, source.length);
}
/**
* Decodes Base64 content in byte array format and returns
* the decoded byte array.
*
* @param source the Base64 encoded data
* @param off the offset of where to begin decoding
* @param len the length of characters to decode
* @return decoded data
* @throws Base64DecoderException
* @since 1.3
*/
public static byte[] decode(byte[] source, int off, int len)
throws Base64DecoderException {
return decode(source, off, len, DECODABET);
}
/**
* Decodes web safe Base64 content in byte array format and returns
* the decoded byte array.
* Web safe encoding uses '-' instead of '+', '_' instead of '/'
*
* @param source the Base64 encoded data
* @param off the offset of where to begin decoding
* @param len the length of characters to decode
* @return decoded data
*/
public static byte[] decodeWebSafe(byte[] source, int off, int len)
throws Base64DecoderException {
return decode(source, off, len, WEBSAFE_DECODABET);
}
/**
* Decodes Base64 content using the supplied decodabet and returns
* the decoded byte array.
*
* @param source the Base64 encoded data
* @param off the offset of where to begin decoding
* @param len the length of characters to decode
* @param decodabet the decodabet for decoding Base64 content
* @return decoded data
*/
public static byte[] decode(byte[] source, int off, int len, byte[] decodabet)
throws Base64DecoderException {
int len34 = len * 3 / 4;
byte[] outBuff = new byte[2 + len34]; // Upper limit on size of output
int outBuffPosn = 0;
byte[] b4 = new byte[4];
int b4Posn = 0;
int i = 0;
byte sbiCrop = 0;
byte sbiDecode = 0;
for (i = 0; i < len; i++) {
sbiCrop = (byte) (source[i + off] & 0x7f); // Only the low seven bits
sbiDecode = decodabet[sbiCrop];
if (sbiDecode >= WHITE_SPACE_ENC) { // White space Equals sign or better
if (sbiDecode >= EQUALS_SIGN_ENC) {
// An equals sign (for padding) must not occur at position 0 or 1
// and must be the last byte[s] in the encoded value
if (sbiCrop == EQUALS_SIGN) {
int bytesLeft = len - i;
byte lastByte = (byte) (source[len - 1 + off] & 0x7f);
if (b4Posn == 0 || b4Posn == 1) {
throw new Base64DecoderException(
"invalid padding byte '=' at byte offset " + i);
} else if ((b4Posn == 3 && bytesLeft > 2)
|| (b4Posn == 4 && bytesLeft > 1)) {
throw new Base64DecoderException(
"padding byte '=' falsely signals end of encoded value "
+ "at offset " + i
);
} else if (lastByte != EQUALS_SIGN && lastByte != NEW_LINE) {
throw new Base64DecoderException(
"encoded value has invalid trailing byte");
}
break;
}
b4[b4Posn++] = sbiCrop;
if (b4Posn == 4) {
outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet);
b4Posn = 0;
}
}
} else {
throw new Base64DecoderException("Bad Base64 input character at " + i
+ ": " + source[i + off] + "(decimal)");
}
}
// Because web safe encoding allows non padding base64 encodes, we
// need to pad the rest of the b4 buffer with equal signs when
// b4Posn != 0. There can be at most 2 equal signs at the end of
// four characters, so the b4 buffer must have two or three
// characters. This also catches the case where the input is
// padded with EQUALS_SIGN
if (b4Posn != 0) {
if (b4Posn == 1) {
throw new Base64DecoderException("single trailing character at offset "
+ (len - 1));
}
b4[b4Posn++] = EQUALS_SIGN;
outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, decodabet);
}
byte[] out = new byte[outBuffPosn];
System.arraycopy(outBuff, 0, out, 0, outBuffPosn);
return out;
}
}
| |
package org.spongycastle.asn1;
import java.io.IOException;
/**
* ASN.1 TaggedObject - in ASN.1 notation this is any object preceded by
* a [n] where n is some number - these are assumed to follow the construction
* rules (as with sequences).
*/
public abstract class ASN1TaggedObject
extends ASN1Primitive
implements ASN1TaggedObjectParser
{
int tagNo;
boolean empty = false;
boolean explicit = true;
ASN1Encodable obj = null;
static public ASN1TaggedObject getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
if (explicit)
{
return (ASN1TaggedObject)obj.getObject();
}
throw new IllegalArgumentException("implicitly tagged tagged object");
}
static public ASN1TaggedObject getInstance(
Object obj)
{
if (obj == null || obj instanceof ASN1TaggedObject)
{
return (ASN1TaggedObject)obj;
}
else if (obj instanceof byte[])
{
try
{
return ASN1TaggedObject.getInstance(fromByteArray((byte[])obj));
}
catch (IOException e)
{
throw new IllegalArgumentException("failed to construct tagged object from byte[]: " + e.getMessage());
}
}
throw new IllegalArgumentException("unknown object in getInstance: " + obj.getClass().getName());
}
/**
* Create a tagged object with the style given by the value of explicit.
* <p>
* If the object implements ASN1Choice the tag style will always be changed
* to explicit in accordance with the ASN.1 encoding rules.
* </p>
* @param explicit true if the object is explicitly tagged.
* @param tagNo the tag number for this object.
* @param obj the tagged object.
*/
public ASN1TaggedObject(
boolean explicit,
int tagNo,
ASN1Encodable obj)
{
if (obj instanceof ASN1Choice)
{
this.explicit = true;
}
else
{
this.explicit = explicit;
}
this.tagNo = tagNo;
if (this.explicit)
{
this.obj = obj;
}
else
{
ASN1Primitive prim = obj.toASN1Primitive();
if (prim instanceof ASN1Set)
{
ASN1Set s = null;
}
this.obj = obj;
}
}
boolean asn1Equals(
ASN1Primitive o)
{
if (!(o instanceof ASN1TaggedObject))
{
return false;
}
ASN1TaggedObject other = (ASN1TaggedObject)o;
if (tagNo != other.tagNo || empty != other.empty || explicit != other.explicit)
{
return false;
}
if(obj == null)
{
if (other.obj != null)
{
return false;
}
}
else
{
if (!(obj.toASN1Primitive().equals(other.obj.toASN1Primitive())))
{
return false;
}
}
return true;
}
public int hashCode()
{
int code = tagNo;
// TODO: actually this is wrong - the problem is that a re-encoded
// object may end up with a different hashCode due to implicit
// tagging. As implicit tagging is ambiguous if a sequence is involved
// it seems the only correct method for both equals and hashCode is to
// compare the encodings...
if (obj != null)
{
code ^= obj.hashCode();
}
return code;
}
public int getTagNo()
{
return tagNo;
}
/**
* return whether or not the object may be explicitly tagged.
* <p>
* Note: if the object has been read from an input stream, the only
* time you can be sure if isExplicit is returning the true state of
* affairs is if it returns false. An implicitly tagged object may appear
* to be explicitly tagged, so you need to understand the context under
* which the reading was done as well, see getObject below.
*/
public boolean isExplicit()
{
return explicit;
}
public boolean isEmpty()
{
return empty;
}
/**
* return whatever was following the tag.
* <p>
* Note: tagged objects are generally context dependent if you're
* trying to extract a tagged object you should be going via the
* appropriate getInstance method.
*/
public ASN1Primitive getObject()
{
if (obj != null)
{
return obj.toASN1Primitive();
}
return null;
}
/**
* Return the object held in this tagged object as a parser assuming it has
* the type of the passed in tag. If the object doesn't have a parser
* associated with it, the base object is returned.
*/
public ASN1Encodable getObjectParser(
int tag,
boolean isExplicit)
throws IOException
{
switch (tag)
{
case BERTags.SET:
return ASN1Set.getInstance(this, isExplicit).parser();
case BERTags.SEQUENCE:
return ASN1Sequence.getInstance(this, isExplicit).parser();
case BERTags.OCTET_STRING:
return ASN1OctetString.getInstance(this, isExplicit).parser();
}
if (isExplicit)
{
return getObject();
}
throw new ASN1Exception("implicit tagging not implemented for tag: " + tag);
}
public ASN1Primitive getLoadedObject()
{
return this.toASN1Primitive();
}
ASN1Primitive toDERObject()
{
return new DERTaggedObject(explicit, tagNo, obj);
}
ASN1Primitive toDLObject()
{
return new DLTaggedObject(explicit, tagNo, obj);
}
abstract void encode(ASN1OutputStream out)
throws IOException;
public String toString()
{
return "[" + tagNo + "]" + obj;
}
}
| |
/*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.photon.bootstrap3.uictrls.datetimepicker;
import java.time.LocalDate;
import java.util.Calendar;
import java.util.EnumSet;
import java.util.Locale;
import java.util.Set;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.helger.commons.ValueEnforcer;
import com.helger.commons.annotation.ReturnsMutableCopy;
import com.helger.commons.collection.CollectionHelper;
import com.helger.commons.datetime.PDTFactory;
import com.helger.commons.datetime.PDTFormatPatterns;
import com.helger.commons.string.StringHelper;
import com.helger.html.css.DefaultCSSClassProvider;
import com.helger.html.css.ICSSClassProvider;
import com.helger.html.hc.IHCConversionSettingsToNode;
import com.helger.html.hc.IHCHasChildrenMutable;
import com.helger.html.hc.IHCNode;
import com.helger.html.hc.html.forms.HCEdit;
import com.helger.html.jquery.JQuery;
import com.helger.html.jquery.JQueryInvocation;
import com.helger.html.jscode.JSArray;
import com.helger.html.jscode.JSAssocArray;
import com.helger.html.jscode.JSInvocation;
import com.helger.html.request.IHCRequestField;
import com.helger.photon.app.html.PhotonCSS;
import com.helger.photon.app.html.PhotonJS;
import com.helger.photon.bootstrap3.EBootstrapIcon;
import com.helger.photon.bootstrap3.inputgroup.BootstrapInputGroup;
import com.helger.photon.bootstrap3.uictrls.EBootstrapUICtrlsCSSPathProvider;
import com.helger.photon.bootstrap3.uictrls.EBootstrapUICtrlsJSPathProvider;
import com.helger.photon.core.form.RequestField;
import com.helger.photon.core.form.RequestFieldDate;
/**
* This class represents a wrapper around the DateTime Picker for Bootstrap from
* https://github.com/smalot/bootstrap-datetimepicker<br>
* By default it is a date selector only. If you want to have times as well, you
* call {@link #setMinView(EDateTimePickerViewType)} with the type
* {@link EDateTimePickerViewType#HOUR}.
*
* @author Philip Helger
*/
public class BootstrapDateTimePicker extends BootstrapInputGroup
{
public static final ICSSClassProvider CSS_CLASS_DATE = DefaultCSSClassProvider.create ("date");
public static final boolean DEFAULT_AUTOCLOSE = true;
public static final EDateTimePickerViewType DEFAULT_START_VIEW = EDateTimePickerViewType.MONTH;
public static final EDateTimePickerTodayType DEFAULT_TODAY = EDateTimePickerTodayType.LINKED;
public static final boolean DEFAULT_TODAY_HIGHLIGHT = true;
public static final boolean DEFAULT_KEYBOARD_NAVIGATION = true;
public static final boolean DEFAULT_FORCE_PARSE = false;
public static final int DEFAULT_MINUTE_STEP = 5;
public static final boolean DEFAULT_SHOW_MERIDIAN = false;
public static final boolean DEFAULT_SHOW_RESET_BUTTON = false;
private static final Logger LOGGER = LoggerFactory.getLogger (BootstrapDateTimePicker.class);
private final HCEdit m_aEdit;
private final Locale m_aDisplayLocale;
private final EDateTimePickerLanguage m_eLanguage;
// Settings
private String m_sFormat;
private EDateTimePickerDayOfWeek m_eWeekStart;
private LocalDate m_aStartDate;
private LocalDate m_aEndDate;
private EnumSet <EDateTimePickerDayOfWeek> m_aDaysOfWeekDisabled;
private boolean m_bAutoclose = DEFAULT_AUTOCLOSE;
private EDateTimePickerViewType m_eStartView;
private EDateTimePickerViewType m_eMinView = EDateTimePickerViewType.MONTH;
private EDateTimePickerViewType m_eMaxView;
private EDateTimePickerTodayType m_eTodayBtn = DEFAULT_TODAY;
private boolean m_bTodayHighlight = DEFAULT_TODAY_HIGHLIGHT;
private boolean m_bKeyboardNavigation = DEFAULT_KEYBOARD_NAVIGATION;
private boolean m_bForceParse = DEFAULT_FORCE_PARSE;
private int m_nMinuteStep = DEFAULT_MINUTE_STEP;
private EDateTimePickerPositionType m_ePickerPosition;
private boolean m_bShowMeridian = DEFAULT_SHOW_MERIDIAN;
private LocalDate m_aInitialDate;
// UI options
private boolean m_bShowResetButton = DEFAULT_SHOW_RESET_BUTTON;
public BootstrapDateTimePicker (@Nonnull final RequestFieldDate aRFD)
{
this (aRFD.getFieldName (), aRFD.getRequestValue (), aRFD.getDisplayLocale ());
}
public BootstrapDateTimePicker (@Nonnull final IHCRequestField aRF, @Nonnull final Locale aDisplayLocale)
{
this (aRF.getFieldName (), aRF.getRequestValue (), aDisplayLocale);
}
public BootstrapDateTimePicker (@Nonnull final String sName, @Nullable final String sValue, @Nonnull final Locale aDisplayLocale)
{
super (new HCEdit (new RequestField (sName, sValue)).setPlaceholder (""));
ValueEnforcer.notNull (aDisplayLocale, "DisplayLocale");
ensureID ();
m_aEdit = (HCEdit) getInput ();
m_aDisplayLocale = aDisplayLocale;
m_eLanguage = EDateTimePickerLanguage.getFromLocaleOrNull (aDisplayLocale);
if (m_eLanguage == null && !EDateTimePickerLanguage.PREDEFINED_LANGUAGE.equals (aDisplayLocale.getLanguage ()))
LOGGER.warn ("Unsupported EDateTimePickerLanguage provided: " + aDisplayLocale);
m_eWeekStart = EDateTimePickerDayOfWeek.getFromJavaValueOrNull (Calendar.getInstance (aDisplayLocale).getFirstDayOfWeek ());
// Use the calendar icon as default prefix
prefixes ().addChild (EBootstrapIcon.CALENDAR.getAsNode ());
if (false)
{
// Default to end date + 1 year
setEndDate (PDTFactory.getCurrentLocalDate ().plusYears (1));
}
}
/**
* @return The contained edit. You may modify the styles.
*/
@Nonnull
public HCEdit getEdit ()
{
return m_aEdit;
}
@Nonnull
public BootstrapDateTimePicker setReadOnly (final boolean bReadOnly)
{
m_aEdit.setReadOnly (bReadOnly);
return this;
}
/**
* @return The datetime picker language to use. May be <code>null</code>.
*/
@Nullable
public EDateTimePickerLanguage getDateTimeLanguage ()
{
return m_eLanguage;
}
/**
* @return The control date format string.
*/
@Nullable
public String getFormat ()
{
return m_sFormat;
}
@Nonnull
public BootstrapDateTimePicker setFormat (@Nullable final String sFormat)
{
m_sFormat = sFormat;
return this;
}
@Nullable
public EDateTimePickerDayOfWeek getWeekStart ()
{
return m_eWeekStart;
}
@Nonnull
public BootstrapDateTimePicker setWeekStart (@Nullable final EDateTimePickerDayOfWeek eWeekStart)
{
m_eWeekStart = eWeekStart;
return this;
}
@Nullable
public LocalDate getStartDate ()
{
return m_aStartDate;
}
@Nonnull
public BootstrapDateTimePicker setStartDate (@Nullable final LocalDate aStartDate)
{
m_aStartDate = aStartDate;
return this;
}
@Nullable
public LocalDate getEndDate ()
{
return m_aEndDate;
}
@Nonnull
public BootstrapDateTimePicker setEndDate (@Nullable final LocalDate aEndDate)
{
m_aEndDate = aEndDate;
return this;
}
@Nonnull
@ReturnsMutableCopy
public Set <EDateTimePickerDayOfWeek> getDaysOfWeekDisabled ()
{
return CollectionHelper.newEnumSet (EDateTimePickerDayOfWeek.class, m_aDaysOfWeekDisabled);
}
@Nonnull
public BootstrapDateTimePicker setDaysOfWeekDisabled (@Nullable final Set <EDateTimePickerDayOfWeek> aDaysOfWeekDisabled)
{
if (aDaysOfWeekDisabled == null)
m_aDaysOfWeekDisabled = null;
else
m_aDaysOfWeekDisabled = CollectionHelper.newEnumSet (EDateTimePickerDayOfWeek.class, m_aDaysOfWeekDisabled);
return this;
}
public boolean isAutoclose ()
{
return m_bAutoclose;
}
@Nonnull
public BootstrapDateTimePicker setAutoclose (final boolean bAutoclose)
{
m_bAutoclose = bAutoclose;
return this;
}
@Nullable
public EDateTimePickerViewType getStartView ()
{
return m_eStartView;
}
@Nonnull
public BootstrapDateTimePicker setStartView (@Nullable final EDateTimePickerViewType eStartView)
{
if (eStartView != null && m_eMinView != null && eStartView.isLessThan (m_eMinView))
throw new IllegalArgumentException ("StartView may not be before MinView");
if (eStartView != null && m_eMaxView != null && eStartView.isGreaterThan (m_eMaxView))
throw new IllegalArgumentException ("StartView may not be after MaxView");
m_eStartView = eStartView;
return this;
}
@Nullable
public EDateTimePickerViewType getMinView ()
{
return m_eMinView;
}
@Nonnull
public BootstrapDateTimePicker setMinView (@Nullable final EDateTimePickerViewType eMinView)
{
if (eMinView != null && m_eStartView != null && m_eStartView.isLessThan (eMinView))
throw new IllegalArgumentException ("StartView may not be before MinView");
if (eMinView != null && m_eMaxView != null && m_eMaxView.isLessThan (eMinView))
throw new IllegalArgumentException ("MaxView may not be before MinView");
m_eMinView = eMinView;
return this;
}
@Nullable
public EDateTimePickerViewType getMaxView ()
{
return m_eMaxView;
}
@Nonnull
public BootstrapDateTimePicker setMaxView (@Nullable final EDateTimePickerViewType eMaxView)
{
if (eMaxView != null && m_eStartView != null && m_eStartView.isGreaterThan (eMaxView))
throw new IllegalArgumentException ("StartView may not be after MaxView");
if (eMaxView != null && m_eMinView != null && m_eMinView.isGreaterThan (eMaxView))
throw new IllegalArgumentException ("MinView may not be after MaxView");
m_eMaxView = eMaxView;
return this;
}
@Nullable
public EDateTimePickerTodayType getTodayBtn ()
{
return m_eTodayBtn;
}
@Nonnull
public BootstrapDateTimePicker setTodayBtn (@Nullable final EDateTimePickerTodayType eTodayBtn)
{
m_eTodayBtn = eTodayBtn;
return this;
}
public boolean isTodayHighlight ()
{
return m_bTodayHighlight;
}
@Nonnull
public BootstrapDateTimePicker setTodayHighlight (final boolean bTodayHighlight)
{
m_bTodayHighlight = bTodayHighlight;
return this;
}
public boolean isKeyboardNavigation ()
{
return m_bKeyboardNavigation;
}
@Nonnull
public BootstrapDateTimePicker setKeyboardNavigation (final boolean bKeyboardNavigation)
{
m_bKeyboardNavigation = bKeyboardNavigation;
return this;
}
public boolean isForceParse ()
{
return m_bForceParse;
}
@Nonnull
public BootstrapDateTimePicker setForceParse (final boolean bForceParse)
{
m_bForceParse = bForceParse;
return this;
}
public int getMinuteStep ()
{
return m_nMinuteStep;
}
@Nonnull
public BootstrapDateTimePicker setMinuteStep (@Nonnegative final int nMinuteStep)
{
if (nMinuteStep < 1 || nMinuteStep > 59)
throw new IllegalArgumentException ("Invalid minute step: " + nMinuteStep);
m_nMinuteStep = nMinuteStep;
return this;
}
@Nullable
public EDateTimePickerPositionType getPickerPosition ()
{
return m_ePickerPosition;
}
@Nonnull
public BootstrapDateTimePicker setPickerPosition (@Nullable final EDateTimePickerPositionType ePickerPosition)
{
m_ePickerPosition = ePickerPosition;
return this;
}
@Nullable
public LocalDate getInitialDate ()
{
return m_aInitialDate;
}
@Nonnull
public BootstrapDateTimePicker setInitialDate (@Nullable final LocalDate aInitialDate)
{
m_aInitialDate = aInitialDate;
return this;
}
public boolean isShowMeridian ()
{
return m_bShowMeridian;
}
@Nonnull
public BootstrapDateTimePicker setShowMeridian (final boolean bShowMeridian)
{
m_bShowMeridian = bShowMeridian;
return this;
}
public boolean isShowResetButton ()
{
return m_bShowResetButton;
}
@Nonnull
public BootstrapDateTimePicker setShowResetButton (final boolean bShowResetButton)
{
m_bShowResetButton = bShowResetButton;
return this;
}
public boolean isShowTime ()
{
return m_eMinView == null || m_eMinView.isLessThan (EDateTimePickerViewType.MONTH);
}
@Nonnull
public static JSInvocation invoke (@Nonnull final JQueryInvocation aJQueryInvocation)
{
return aJQueryInvocation.invoke ("datetimepicker");
}
@Nonnull
public JSInvocation invoke ()
{
return invoke (JQuery.idRef (this));
}
@Nonnull
public static JSInvocation invoke (@Nonnull final JQueryInvocation aJQueryInvocation, @Nonnull final JSAssocArray aOptions)
{
return invoke (aJQueryInvocation).arg (aOptions);
}
@Nonnull
public JSInvocation invoke (@Nonnull final JSAssocArray aOptions)
{
return invoke ().arg (aOptions);
}
/**
* @return A {@link JSAssocArray} with all options for this date and time
* Picker. Never <code>null</code>.
*/
@Nonnull
public JSAssocArray getJSOptions ()
{
final JSAssocArray aOptions = new JSAssocArray ();
if (StringHelper.hasText (m_sFormat))
aOptions.add ("format", m_sFormat);
else
{
final String sDefaultFormat = BootstrapDateTimePickerFormatBuilder.fromJavaPattern (isShowTime () ? PDTFormatPatterns.getDefaultPatternDateTime (m_aDisplayLocale)
: PDTFormatPatterns.getDefaultPatternDate (m_aDisplayLocale))
.getJSCalendarFormatString ();
aOptions.add ("format", sDefaultFormat);
}
if (m_eWeekStart != null)
aOptions.add ("weekStart", m_eWeekStart.getJSValue ());
if (m_aStartDate != null)
{
// Print ISO8601 formatted
aOptions.add ("startDate", m_aStartDate.toString ());
}
if (m_aEndDate != null)
{
// Print ISO8601 formatted
aOptions.add ("endDate", m_aEndDate.toString ());
}
if (m_aDaysOfWeekDisabled != null && !m_aDaysOfWeekDisabled.isEmpty ())
{
final JSArray aArray = new JSArray ();
for (final EDateTimePickerDayOfWeek eDayOfWeek : m_aDaysOfWeekDisabled)
aArray.add (eDayOfWeek.getJSValue ());
aOptions.add ("daysOfWeekDisabled", aArray);
}
aOptions.add ("autoclose", m_bAutoclose);
if (m_eStartView != null)
aOptions.add ("startView", m_eStartView.getJSValueString ());
if (m_eMinView != null)
aOptions.add ("minView", m_eMinView.getJSValueString ());
if (m_eMaxView != null)
aOptions.add ("maxView", m_eMaxView.getJSValueString ());
if (m_eTodayBtn != null)
aOptions.add ("todayBtn", m_eTodayBtn.getJSValue ());
aOptions.add ("todayHighlight", m_bTodayHighlight);
aOptions.add ("keyboardNavigation", m_bKeyboardNavigation);
if (m_eLanguage != null)
aOptions.add ("language", m_eLanguage.getLanguageID ());
aOptions.add ("forceParse", m_bForceParse);
if (m_nMinuteStep != DEFAULT_MINUTE_STEP)
aOptions.add ("minuteStep", m_nMinuteStep);
if (m_ePickerPosition != null)
aOptions.add ("pickerPosition", m_ePickerPosition.getJSValue ());
aOptions.add ("showMeridian", m_bShowMeridian);
if (m_aInitialDate != null)
{
// Print ISO8601 formatted
aOptions.add ("initialDate", m_aInitialDate.toString ());
}
return aOptions;
}
@Override
protected void onFinalizeNodeState (@Nonnull final IHCConversionSettingsToNode aConversionSettings,
@Nonnull final IHCHasChildrenMutable <?, ? super IHCNode> aTargetNode)
{
super.onFinalizeNodeState (aConversionSettings, aTargetNode);
if (m_bShowResetButton)
suffixes ().addChild (EBootstrapIcon.REMOVE.getAsNode ());
addClass (CSS_CLASS_DATE);
// Add JS if necessary
if (!m_aEdit.isReadOnly ())
addChild (new BootstrapDateTimePickerJS (this));
}
@Override
protected void onRegisterExternalResources (@Nonnull final IHCConversionSettingsToNode aConversionSettings,
final boolean bForceRegistration)
{
super.onRegisterExternalResources (aConversionSettings, bForceRegistration);
PhotonJS.registerJSIncludeForThisRequest (EBootstrapUICtrlsJSPathProvider.DATETIMEPICKER);
if (m_eLanguage != null)
{
// Locales must be after the main datetime picker
PhotonJS.registerJSIncludeForThisRequest (EBootstrapUICtrlsJSPathProvider.DATETIMEPICKER_LOCALE.getInstance (m_eLanguage.getLanguageID ()));
}
PhotonCSS.registerCSSIncludeForThisRequest (EBootstrapUICtrlsCSSPathProvider.DATETIMEPICKER);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.db.compaction;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.db.rows.UnfilteredRowIterator;
import org.apache.cassandra.db.marshal.AsciiType;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.junit.BeforeClass;
import com.google.common.collect.Sets;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.apache.cassandra.OrderedJUnit4ClassRunner;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.ColumnFilter;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.ISSTableScanner;
import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.utils.ByteBufferUtil;
import java.io.IOException;
import java.util.Collections;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(OrderedJUnit4ClassRunner.class)
public class TTLExpiryTest
{
public static final String KEYSPACE1 = "TTLExpiryTest";
private static final String CF_STANDARD1 = "Standard1";
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE1,
KeyspaceParams.simple(1),
CFMetaData.Builder.create(KEYSPACE1, CF_STANDARD1)
.addPartitionKey("pKey", AsciiType.instance)
.addRegularColumn("col1", AsciiType.instance)
.addRegularColumn("col", AsciiType.instance)
.addRegularColumn("col311", AsciiType.instance)
.addRegularColumn("col2", AsciiType.instance)
.addRegularColumn("col3", AsciiType.instance)
.addRegularColumn("col7", AsciiType.instance)
.addRegularColumn("shadow", AsciiType.instance)
.build().gcGraceSeconds(0));
}
@Test
public void testAggressiveFullyExpired()
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore("Standard1");
cfs.disableAutoCompaction();
cfs.metadata.gcGraceSeconds(0);
String key = "ttl";
new RowUpdateBuilder(cfs.metadata, 1L, 1, key)
.add("col1", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
new RowUpdateBuilder(cfs.metadata, 3L, 1, key)
.add("col2", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, 2L, 1, key)
.add("col1", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
new RowUpdateBuilder(cfs.metadata, 5L, 1, key)
.add("col2", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, 4L, 1, key)
.add("col1", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
new RowUpdateBuilder(cfs.metadata, 7L, 1, key)
.add("shadow", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, 6L, 3, key)
.add("shadow", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
new RowUpdateBuilder(cfs.metadata, 8L, 1, key)
.add("col2", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
Set<SSTableReader> sstables = Sets.newHashSet(cfs.getSSTables());
int now = (int)(System.currentTimeMillis() / 1000);
int gcBefore = now + 2;
Set<SSTableReader> expired = CompactionController.getFullyExpiredSSTables(
cfs,
sstables,
Collections.EMPTY_SET,
gcBefore);
assertEquals(2, expired.size());
cfs.clearUnsafe();
}
@Test
public void testSimpleExpire() throws InterruptedException
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore("Standard1");
cfs.disableAutoCompaction();
cfs.metadata.gcGraceSeconds(0);
long timestamp = System.currentTimeMillis();
String key = "ttl";
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.add("col7", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col2", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col3", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col311", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
Thread.sleep(2000); // wait for ttl to expire
assertEquals(4, cfs.getSSTables().size());
cfs.enableAutoCompaction(true);
assertEquals(0, cfs.getSSTables().size());
}
@Test
public void testNoExpire() throws InterruptedException, IOException
{
ColumnFamilyStore cfs = Keyspace.open(KEYSPACE1).getColumnFamilyStore("Standard1");
cfs.disableAutoCompaction();
cfs.metadata.gcGraceSeconds(0);
long timestamp = System.currentTimeMillis();
String key = "ttl";
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.add("col7", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col2", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
new RowUpdateBuilder(cfs.metadata, timestamp, 1, key)
.add("col3", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
String noTTLKey = "nottl";
new RowUpdateBuilder(cfs.metadata, timestamp, noTTLKey)
.add("col311", ByteBufferUtil.EMPTY_BYTE_BUFFER)
.build()
.applyUnsafe();
cfs.forceBlockingFlush();
Thread.sleep(2000); // wait for ttl to expire
assertEquals(4, cfs.getSSTables().size());
cfs.enableAutoCompaction(true);
assertEquals(1, cfs.getSSTables().size());
SSTableReader sstable = cfs.getSSTables().iterator().next();
ISSTableScanner scanner = sstable.getScanner(ColumnFilter.all(sstable.metadata), DataRange.allData(sstable.partitioner), false);
assertTrue(scanner.hasNext());
while(scanner.hasNext())
{
UnfilteredRowIterator iter = scanner.next();
assertEquals(Util.dk(noTTLKey), iter.partitionKey());
}
scanner.close();
}
}
| |
package it.finsiel.siged.mvc.presentation.action.protocollo;
import it.finsiel.siged.constant.Constants;
import it.finsiel.siged.constant.ReturnValues;
import it.finsiel.siged.model.Fascicolo;
import it.finsiel.siged.model.organizzazione.Organizzazione;
import it.finsiel.siged.model.organizzazione.Ufficio;
import it.finsiel.siged.model.organizzazione.Utente;
import it.finsiel.siged.mvc.bo.AlberoUfficiBO;
import it.finsiel.siged.mvc.bo.TitolarioBO;
import it.finsiel.siged.mvc.business.FascicoloDelegate;
import it.finsiel.siged.mvc.business.LookupDelegate;
import it.finsiel.siged.mvc.business.ProcedimentoDelegate;
import it.finsiel.siged.mvc.presentation.actionform.protocollo.FaldoneForm;
import it.finsiel.siged.mvc.presentation.actionform.protocollo.ProtocolloForm;
import it.finsiel.siged.mvc.presentation.actionform.protocollo.ProtocolloIngressoForm;
import it.finsiel.siged.mvc.presentation.actionform.protocollo.RicercaProcedimentoForm;
import it.finsiel.siged.mvc.vo.organizzazione.UfficioVO;
import it.finsiel.siged.mvc.vo.protocollo.ProcedimentoVO;
import it.finsiel.siged.mvc.vo.protocollo.ProtocolloProcedimentoVO;
import it.finsiel.siged.util.DateUtil;
import it.finsiel.siged.util.NumberUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.apache.struts.Globals;
import org.apache.struts.action.Action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
import org.apache.struts.util.MessageResources;
public class RicercaProcedimentoAction extends Action {
// ----------------------------------------------------- Instance Variables
/**
* The <code>Log</code> instance for this application.
*/
static Logger logger = Logger.getLogger(RicercaProcedimentoAction.class
.getName());
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionMessages errors = new ActionMessages();// Report any errors we
HttpSession session = request.getSession();
RicercaProcedimentoForm ricercaForm = (RicercaProcedimentoForm) form;
boolean indietroVisibile = false;
ricercaForm.setIndietroVisibile(indietroVisibile);
Utente utente = (Utente) session.getAttribute(Constants.UTENTE_KEY);
boolean ufficioCompleto = (utente.getUfficioVOInUso().getTipo().equals(
UfficioVO.UFFICIO_CENTRALE) || utente.getUfficioVOInUso()
.getTipo().equals(UfficioVO.UFFICIO_SEMICENTRALE));
boolean preQuery = request.getAttribute("cercaProcedimentiDaFaldoni") != null;
if (Boolean.TRUE.equals(session
.getAttribute("procedimentiDaProtocollo"))) {
}
String cercaOggetto = (String) request
.getAttribute("cercaProcedimentiDaProtocollo");
// ricercaForm.setOggettoProcedimento(cercaOggetto);
if (request.getAttribute("cercaProcedimentiDaProtocollo") == null) {
ricercaForm.setOggettoProcedimento(ricercaForm
.getOggettoProcedimento());
} else {
ricercaForm.setOggettoProcedimento(cercaOggetto);
}
String cercaOggettoDaFaldoni = (String) request
.getAttribute("cercaProcedimentiDaFaldoni");
if (request.getAttribute("cercaProcedimentiDaFaldoni") == null) {
ricercaForm.setOggettoProcedimento(ricercaForm
.getOggettoProcedimento());
} else {
ricercaForm.setOggettoProcedimento(cercaOggettoDaFaldoni);
}
// ricercaForm.setOggettoProcedimento(cercaOggettoDaFaldoni);
String oggettoProcedimento = ricercaForm.getOggettoProcedimento();
if (ricercaForm == null
|| request.getParameter("annullaAction") != null
// "true".equals((String) request
// .getAttribute("btnCercaProcedimentiDaFaldoni"))
) {
if (Boolean.TRUE.equals(session.getAttribute("tornaFaldone"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
}
Organizzazione org = Organizzazione.getInstance();
Ufficio uff = org.getUfficio(utente.getUfficioInUso());
ricercaForm.inizializzaForm();
ricercaForm.setAooId(utente.getValueObject().getAooId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm, ufficioCompleto);
ricercaForm.setStatiProcedimento(LookupDelegate
.getStatiProcedimento());
ricercaForm.setPosizioniProcedimento(LookupDelegate
.getPosizioniProcedimento());
ricercaForm.setUfficioCorrente(uff.getValueObject());
if (preQuery)
ricercaForm.setOggettoProcedimento((String) request
.getAttribute("cercaProcedimentiDaFaldoni"));
session.setAttribute(mapping.getAttribute(), ricercaForm);
if ("fascicoloProcedimenti".equals(session
.getAttribute("provenienza"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
}
// else if ("procedimentiDaFaldoni".equals(session
// .getAttribute("provenienza"))){
// indietroVisibile = true;
// ricercaForm.setIndietroVisibile(indietroVisibile);
// }
else if (Boolean.TRUE.equals(session
.getAttribute("tornaProtocollo"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
} else if (Boolean.TRUE.equals(session
.getAttribute("indietroProcedimentiDaProtocollo"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
} else if (Boolean.TRUE.equals(session
.getAttribute("indietroProcedimentiDaFaldoni"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
} else if ("".equals(cercaOggettoDaFaldoni)) {
// session.removeAttribute("procedimentiDaProtocollo");
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
return (mapping.findForward("input"));
}
else {
indietroVisibile = false;
ricercaForm.setIndietroVisibile(indietroVisibile);
}
return mapping.findForward("input");
}
if (ricercaForm.getUfficioCorrenteId() == 0) {
AlberoUfficiBO.impostaUfficio(utente, ricercaForm, ufficioCompleto);
}
if (ricercaForm.getTitolario() == null) {
impostaTitolario(ricercaForm, utente, 0);
}
if (request.getParameter("impostaUfficioAction") != null) {
ricercaForm.setUfficioCorrenteId(ricercaForm
.getUfficioSelezionatoId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm, ufficioCompleto);
impostaTitolario(ricercaForm, utente, 0);
return mapping.findForward("input");
} else if (request.getParameter("ricercaIniziale") != null) {
Organizzazione org = Organizzazione.getInstance();
Ufficio uff = org.getUfficio(utente.getUfficioInUso());
ricercaForm.inizializzaForm();
ricercaForm.setAooId(utente.getValueObject().getAooId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm, ufficioCompleto);
ricercaForm.setStatiProcedimento(LookupDelegate
.getStatiProcedimento());
ricercaForm.setPosizioniProcedimento(LookupDelegate
.getPosizioniProcedimento());
ricercaForm.setUfficioCorrente(uff.getValueObject());
indietroVisibile = false;
ricercaForm.setIndietroVisibile(indietroVisibile);
session.removeAttribute("cercaProcedimentiDaProtocollo");
session.removeAttribute("tornaFaldone");
session.removeAttribute("tornaProtocollo");
session.removeAttribute("btnCercaProcedimentiDaFaldoni");
session.removeAttribute("tornaFascicolo");
session.removeAttribute("provenienza");
request.removeAttribute("cercaProcedimentiDaProtocollo");
session.removeAttribute("procedimentiDaProtocollo");
session.removeAttribute("indietroProcedimentiDaProtocollo");
session.removeAttribute("risultatiProcedimentiDaProtocollo");
session.removeAttribute("indietroProcedimentiDaFaldoni");
session.setAttribute("ricercaSemplice", Boolean.TRUE);
} else if (request.getParameter("btnRicerca") != null) {
session.removeAttribute("btnCercaFascicoliDaFaldoni");
session.setAttribute("ricercaSemplice", Boolean.TRUE);
session.removeAttribute("provenienza");
if (Boolean.TRUE.equals(session.getAttribute("tornaFaldone"))) {
session.removeAttribute("tornaFaldone");
return (mapping.findForward("tornaFaldone"));
} else {
ricercaForm.resetForm();
Organizzazione org = Organizzazione.getInstance();
Ufficio uff = org.getUfficio(utente.getUfficioInUso());
ricercaForm.setAooId(utente.getValueObject().getAooId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm,
ufficioCompleto);
ricercaForm.setStatiProcedimento(LookupDelegate
.getStatiProcedimento());
ricercaForm.setPosizioniProcedimento(LookupDelegate
.getPosizioniProcedimento());
ricercaForm.setUfficioCorrente(uff.getValueObject());
return mapping.findForward("input");
}
}
else if (request.getParameter("ufficioPrecedenteAction") != null) {
ricercaForm.setUfficioCorrenteId(ricercaForm.getUfficioCorrente()
.getParentId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm, ufficioCompleto);
impostaTitolario(ricercaForm, utente, 0);
} else if (request.getParameter("impostaTitolarioAction") != null) {
if (ricercaForm.getTitolario() != null) {
ricercaForm.setTitolarioPrecedenteId(ricercaForm.getTitolario()
.getId().intValue());
}
TitolarioBO.impostaTitolario(ricercaForm, ricercaForm
.getUfficioCorrenteId(), ricercaForm
.getTitolarioSelezionatoId());
return mapping.findForward("input");
}
else if (request.getParameter("titolarioPrecedenteAction") != null) {
TitolarioBO.impostaTitolario(ricercaForm, ricercaForm
.getUfficioCorrenteId(), ricercaForm
.getTitolarioPrecedenteId());
if (ricercaForm.getTitolario() != null) {
ricercaForm.setTitolarioPrecedenteId(ricercaForm.getTitolario()
.getParentId());
}
return mapping.findForward("input");
}// ---------------------------------------------------------------
// //
else if ("".equals(cercaOggetto)) {
session.removeAttribute("procedimentiDaProtocollo");
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
return (mapping.findForward("input"));
} else if ("".equals(cercaOggettoDaFaldoni)) {
session.removeAttribute("cercaProcedimentiDaFaldoni");
session.removeAttribute("procedimentiDaProtocollo");
session.removeAttribute("tornaFaldone");
Organizzazione org = Organizzazione.getInstance();
Ufficio uff = org.getUfficio(utente.getUfficioInUso());
ricercaForm.inizializzaForm();
ricercaForm.setAooId(utente.getValueObject().getAooId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm, ufficioCompleto);
ricercaForm.setStatiProcedimento(LookupDelegate
.getStatiProcedimento());
ricercaForm.setPosizioniProcedimento(LookupDelegate
.getPosizioniProcedimento());
ricercaForm.setUfficioCorrente(uff.getValueObject());
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
return (mapping.findForward("input"));
} else if (request.getParameter("btnCerca") != null
|| preQuery
|| Boolean.TRUE.equals(session
.getAttribute("procedimentiDaProtocollo"))
|| Boolean.TRUE.equals(session.getAttribute("tornaFaldone"))) {
// errors = ricercaForm.validateParametriRicerca(mapping, request);
if (!errors.isEmpty()) {
saveErrors(request, errors);
return (mapping.findForward("input"));
}
ricercaForm.setProcedimenti(null);
if (Boolean.TRUE.equals(session
.getAttribute("procedimentiDaProtocollo"))) {
String oggetto = (String) request
.getAttribute("cercaProcedimentiDaProtocollo");
ricercaForm.setOggettoProcedimento(oggetto);
} else {
ricercaForm.setOggettoProcedimento(oggettoProcedimento);
;
}
// controllo numero righe di ritorno lista procedimenti
MessageResources bundle = (MessageResources) request
.getAttribute(Globals.MESSAGES_KEY);
int maxRighe = Integer.parseInt(bundle
.getMessage("protocollo.max.righe.lista"));
ProcedimentoDelegate procedimentoDelegate = ProcedimentoDelegate
.getInstance();
HashMap hashMap = getParametriRicerca(ricercaForm, request);
int contaRighe = procedimentoDelegate.contaProcedimenti(utente,
hashMap);
if (contaRighe <= maxRighe) {
SortedMap procedimenti = new TreeMap();
procedimenti = ProcedimentoDelegate.getInstance()
.cercaProcedimenti(utente,
getParametriRicerca(ricercaForm, request));
if (procedimenti == null || procedimenti.size() == 0) {
errors.add("nessun_dato", new ActionMessage("nessun_dato",
"", ""));
} else {
ricercaForm.setProcedimenti(procedimenti.values());
}
if ("fascicoloProcedimenti".equals(session
.getAttribute("provenienza"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
session.removeAttribute("ricercaSemplice");
session.removeAttribute("tornaFaldone");
session.removeAttribute("tornaProtocollo");
session.removeAttribute("btnCercaProcedimentiDaFaldoni");
session.setAttribute("tornaFascicolo", Boolean.TRUE);
}
if ("procedimentiDaFaldoni".equals(session
.getAttribute("provenienza"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
session.removeAttribute("ricercaSemplice");
session.removeAttribute("tornaFaldone");
session.removeAttribute("tornaProtocollo");
session.removeAttribute("btnCercaProcedimentiDaFaldoni");
session.setAttribute("tornaFascicolo", Boolean.TRUE);
}
session.removeAttribute("procedimentiDaProtocollo");
return (mapping.findForward("risultati"));
} else {
if (Boolean.TRUE.equals(session
.getAttribute("procedimentiDaProtocollo"))
|| Boolean.TRUE.equals(session
.getAttribute("tornaFaldone"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
ricercaForm.setOggettoProcedimento("");
errors.add("controllo.maxrighe", new ActionMessage(
"controllo.maxrighe", "" + contaRighe,
"procedimenti", "" + maxRighe));
} else {
errors.add("controllo.maxrighe", new ActionMessage(
"controllo.maxrighe", "" + contaRighe,
"procedimenti", "" + maxRighe));
}
}
}
else if (request.getParameter("btnAnnulla") != null) {
if ("fascicoloProcedimenti".equals(session
.getAttribute("provenienza"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
// return mapping.getInputForward();
return (mapping.findForward("fascicoloProcedimento"));
}
if ("procedimentiDaFaldoni".equals(session
.getAttribute("provenienza"))) {
session.removeAttribute("provenienza");
return (mapping.findForward("tornaFaldone"));
}
if (Boolean.TRUE.equals(session.getAttribute("tornaFaldone"))) {
session.removeAttribute("tornaFaldone");
return (mapping.findForward("tornaFaldone"));
}
if (Boolean.TRUE.equals(session.getAttribute("tornaProtocollo"))) {
Object pForm = session.getAttribute("protocolloForm");
session.removeAttribute("tornaProtocollo");
if (pForm instanceof ProtocolloIngressoForm) {
return (mapping.findForward("tornaProtocolloIngresso"));
} else {
return (mapping.findForward("tornaProtocolloUscita"));
}
}
if (Boolean.TRUE.equals(session
.getAttribute("indietroProcedimentiDaProtocollo"))) {
Object pForm = session.getAttribute("protocolloForm");
session.removeAttribute("tornaProtocollo");
if (pForm instanceof ProtocolloIngressoForm) {
return (mapping.findForward("tornaProtocolloIngresso"));
} else {
return (mapping.findForward("tornaProtocolloUscita"));
}
}
if (Boolean.TRUE.equals(session
.getAttribute("indietroProcedimentiDaFaldoni"))) {
session.removeAttribute("indietroProcedimentiDaFaldoni");
return (mapping.findForward("tornaFaldone"));
}
else {
ricercaForm.resetForm();
Organizzazione org = Organizzazione.getInstance();
Ufficio uff = org.getUfficio(utente.getUfficioInUso());
ricercaForm.setAooId(utente.getValueObject().getAooId());
AlberoUfficiBO.impostaUfficio(utente, ricercaForm,
ufficioCompleto);
ricercaForm.setStatiProcedimento(LookupDelegate
.getStatiProcedimento());
ricercaForm.setPosizioniProcedimento(LookupDelegate
.getPosizioniProcedimento());
ricercaForm.setUfficioCorrente(uff.getValueObject());
return mapping.findForward("input");
}
}
else if (request.getParameter("indietro") != null) {
if ("procedimentiDaFaldoni".equals(session
.getAttribute("provenienza"))) {
return mapping.findForward("tornaFaldone");
} else if ("fascicoloProcedimenti".equals(session
.getAttribute("provenienza"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
// if (!errors.isEmpty()) {
// saveErrors(request, errors);
return mapping.findForward("input");
// }
}
}
else if (request.getParameter("btnSeleziona") != null) {
String[] proSel = ricercaForm.getProcedimentiSelezionati();
// if (session.getAttribute("tornaFaldone") == Boolean.TRUE)
if ("procedimentiDaFaldoni".equals(session
.getAttribute("provenienza"))) {
FaldoneForm fo = (FaldoneForm) session
.getAttribute("faldoneForm");
for (int i = 0; proSel != null && i < proSel.length; i++) {
ProcedimentoVO vo = ProcedimentoDelegate.getInstance()
.getProcedimentoVO(NumberUtil.getInt(proSel[i]));
if (vo != null && vo.getReturnValue() == ReturnValues.FOUND)
fo.aggiungiProcedimento(vo);
}
ricercaForm.inizializzaForm();
session.removeAttribute("tornaFaldone");
return (mapping.findForward("tornaFaldone"));
}
if (session.getAttribute("tornaFascicolo") == Boolean.TRUE) {
Fascicolo fascicolo = (Fascicolo) session
.getAttribute(Constants.FASCICOLO);
FascicoloDelegate.getInstance().salvaProcedimentiFascicolo(
fascicolo.getFascicoloVO(), proSel,
utente.getValueObject().getUsername());
request.setAttribute("fascicoloId", fascicolo.getFascicoloVO()
.getId());
session.removeAttribute("tornaFascicolo");
return (mapping.findForward("tornaFascicolo"));
} else if (session.getAttribute("tornaProtocollo") == Boolean.TRUE) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
ProtocolloForm pf = (ProtocolloForm) session
.getAttribute("protocolloForm");
for (int i = 0; proSel != null && i < proSel.length; i++) {
ProcedimentoVO pVO = ProcedimentoDelegate.getInstance()
.getProcedimentoVO(NumberUtil.getInt(proSel[i]));
if (pVO != null
&& pVO.getReturnValue() == ReturnValues.FOUND) {
ProtocolloProcedimentoVO ppVO = new ProtocolloProcedimentoVO();
ppVO.setProtocolloId(pf.getProtocolloId());
ppVO.setNumeroProtocollo(pf.getNumero());
ppVO.setProcedimentoId(pVO.getId().intValue());
ppVO.setNumeroProcedimento(pVO.getNumeroProcedimento());
ppVO.setOggetto(pVO.getOggetto());
pf.aggiungiProcedimento(ppVO);
}
}
pf.setSezioneVisualizzata("Procedimenti");
ricercaForm.inizializzaForm();
session.removeAttribute("tornaProtocollo");
session.setAttribute("protocolloForm", pf);
if ("I".equals(pf.getFlagTipo()))
return (mapping.findForward("tornaProtocolloIngresso"));
else
return (mapping.findForward("tornaProtocolloUscita"));
} else if (session
.getAttribute("risultatiProcedimentiDaProtocollo") == Boolean.TRUE) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
ProtocolloForm pf = (ProtocolloForm) session
.getAttribute("protocolloForm");
for (int i = 0; proSel != null && i < proSel.length; i++) {
ProcedimentoVO pVO = ProcedimentoDelegate.getInstance()
.getProcedimentoVO(NumberUtil.getInt(proSel[i]));
if (pVO != null
&& pVO.getReturnValue() == ReturnValues.FOUND) {
ProtocolloProcedimentoVO ppVO = new ProtocolloProcedimentoVO();
ppVO.setProtocolloId(pf.getProtocolloId());
ppVO.setNumeroProtocollo(pf.getNumero());
ppVO.setProcedimentoId(pVO.getId().intValue());
ppVO.setNumeroProcedimento(pVO.getNumeroProcedimento());
ppVO.setOggetto(pVO.getOggetto());
pf.aggiungiProcedimento(ppVO);
}
}
pf.setSezioneVisualizzata("Procedimenti");
ricercaForm.inizializzaForm();
session.removeAttribute("tornaProtocollo");
session.setAttribute("protocolloForm", pf);
if ("I".equals(pf.getFlagTipo()))
return (mapping.findForward("tornaProtocolloIngresso"));
else
return (mapping.findForward("tornaProtocolloUscita"));
}
} else if (request.getParameter("visualizzaProcedimento") != null) {
return (mapping.findForward("visualizzaProcedimento"));
} else if ("fascicoloProcedimenti".equals(session
.getAttribute("provenienza"))) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
return mapping.getInputForward();
} else if (session.getAttribute("tornaProtocollo") == Boolean.TRUE) {
indietroVisibile = true;
ricercaForm.setIndietroVisibile(indietroVisibile);
return mapping.getInputForward();
} else if (session.getAttribute("procedimentiDaProtocollo") == Boolean.TRUE) {
indietroVisibile = false;
ricercaForm.setIndietroVisibile(indietroVisibile);
return mapping.getInputForward();
}
if (!errors.isEmpty()) {
saveErrors(request, errors);
}
session.removeAttribute("btnCercaProcedimentiDaFaldoni");
session.removeAttribute("procedimentiDaProtocollo");
session.removeAttribute("tornaFaldone");
ricercaForm.setOggettoProcedimento("");
// session.removeAttribute("provenienza");
return mapping.getInputForward();
}
private void impostaTitolario(RicercaProcedimentoForm form, Utente utente,
int titolarioId) {
int ufficioId;
if (utente.getAreaOrganizzativa().getDipendenzaTitolarioUfficio() == 1) {
ufficioId = form.getUfficioCorrenteId();
} else {
ufficioId = utente.getUfficioInUso();
}
TitolarioBO.impostaTitolario(form, ufficioId, titolarioId);
}
public static HashMap getParametriRicerca(RicercaProcedimentoForm form,
HttpServletRequest request) throws Exception {
Date dataAvvioInizio;
Date dataAvvioFine;
Date dataEvidenzaInizio;
Date dataEvidenzaFine;
HashMap sqlDB = new HashMap();
sqlDB.clear();
if (DateUtil.isData(form.getDataAvvioInizio())) {
dataAvvioInizio = DateUtil.toDate(form.getDataAvvioInizio());
sqlDB.put("data_avvio >= ?", dataAvvioInizio);
}
if (DateUtil.isData(form.getDataAvvioFine())) {
dataAvvioFine = DateUtil.toDate(form.getDataAvvioFine());
sqlDB.put("data_avvio <= ?", dataAvvioFine);
}
if (DateUtil.isData(form.getDataEvidenzaInizio())) {
dataEvidenzaInizio = DateUtil.toDate(form.getDataEvidenzaInizio());
sqlDB.put("data_evidenza >= ?", dataEvidenzaInizio);
}
if (DateUtil.isData(form.getDataEvidenzaFine())) {
dataEvidenzaFine = DateUtil.toDate(form.getDataEvidenzaFine());
sqlDB.put("data_evidenza <= ?", dataEvidenzaFine);
}
if (NumberUtil.isInteger(form.getAnno())) {
Integer anno = new Integer(form.getAnno());
sqlDB.put("anno = ?", anno);
}
if (NumberUtil.isInteger(form.getNumero())) {
Integer numero = new Integer(form.getNumero());
sqlDB.put("numero = ?", numero);
}
if (form.getTitolario() != null) {
Integer titolarioId = new Integer(form.getTitolario().getId()
.intValue());
sqlDB.put("titolario_id = ?", titolarioId);
}
if (form.getUfficioRicercaId() > 0) {
Integer uffId = new Integer(form.getUfficioRicercaId());
sqlDB.put("procedimenti.ufficio_id = ?", uffId);
}
// if (form.getUfficioCorrenteId()> 0) {
// Integer UfficioCorrenteId = new Integer(form.getUfficioCorrenteId());
// sqlDB.put("procedimenti.ufficio_id = ?", UfficioCorrenteId);
// }
if (form.getOggettoProcedimento() != null
&& !"".equals(form.getOggettoProcedimento())) {
sqlDB.put(" upper(oggetto) LIKE ?", form.getOggettoProcedimento()
.toUpperCase());
}
if (form.getNote() != null && !"".equals(form.getNote())) {
sqlDB.put(" upper(note) LIKE ?", form.getNote().toUpperCase());
}
if (form.getStatoId() > 0) {
Integer stato = new Integer(form.getStatoId());
sqlDB.put("stato_id = ?", stato);
}
if (!"ALL".equals(form.getPosizione()) && form.getPosizione() != null) {
sqlDB.put("posizione_id = ?", form.getPosizione());
}
if (sqlDB.isEmpty())
return null;
return sqlDB;
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.winde.comicsweb.domain;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.dbcp.BasicDataSource;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* @author Winde
*/
public class Config {
private BasicDataSource datasource;
private static Map<String, String> cachedValues = new HashMap<String, String>() {
{
put("pathComics", "C:\\Comics");
put("pathLibros", "C:\\Libros");
put("keepLastRead", "true");
}
};
private static List<String> favorites = null;
private static Map<String, Boolean> initialized = new HashMap<String, Boolean>() {
{
put("pathComics", false);
put("pathLibros", false);
put("keepLastRead", false);
}
};
public void setDatasource(BasicDataSource datasource) {
this.datasource = datasource;
}
private void createFavoriteTableIfNotExists(Connection connection) {
PreparedStatement preparedStatement;
try {
preparedStatement = connection.prepareStatement("CREATE TABLE IF NOT EXISTS favorites (name VARCHAR(500))");
preparedStatement.execute();
} catch (SQLException ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void createConfigTableIfNotExists(Connection connection) {
PreparedStatement preparedStatement;
try {
preparedStatement = connection.prepareStatement("CREATE TABLE IF NOT EXISTS config (key VARCHAR(100),value VARCHAR(500))");
preparedStatement.execute();
} catch (SQLException ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
}
}
public List<String> getFavorites() {
if (favorites!=null) {
return favorites;
} else {
favorites = new ArrayList<>();
Connection connection = null;
try {
connection = datasource.getConnection();
createFavoriteTableIfNotExists(connection);
PreparedStatement preparedStatement = connection.prepareStatement("SELECT name FROM favorites");
ResultSet rs = preparedStatement.executeQuery();
while (rs.next()) {
String salida = rs.getString("name");
favorites.add(salida);
}
} catch (SQLException ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
favorites = null;
} catch (Exception ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
favorites = null;
}finally {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException ex1) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex1);
}
}
}
return favorites;
}
public boolean deleteFavorite(String favorite) {
Connection connection = null;
try {
connection = datasource.getConnection();
createFavoriteTableIfNotExists(connection);
PreparedStatement preparedStatement = connection.prepareStatement("DELETE FROM favorites WHERE name =?");
preparedStatement.setString(1, favorite);
int result = preparedStatement.executeUpdate();
if (favorites!=null) {
favorites.remove(favorite);
}
if (result>0) {
return true;
} else {
return false;
}
} catch (SQLException ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
} catch (Exception ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
}finally {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException ex1) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex1);
}
}
return false;
}
public boolean addFavorite(String favorite) {
if (favorites == null){
this.getFavorites();
}
if (favorites == null){
return false;
} else if (favorites.contains(favorite)){
return false;
}
Connection connection = null;
try {
connection = datasource.getConnection();
createFavoriteTableIfNotExists(connection);
PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO favorites (name) VALUES (?)");
preparedStatement.setString(1, favorite);
int result = preparedStatement.executeUpdate();
if (favorites!=null) {
favorites.add(favorite);
}
if (result>0) {
return true;
} else {
return false;
}
} catch (SQLException ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
} catch (Exception ex) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
}finally {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException ex1) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex1);
}
}
return false;
}
public String getConfigValue(String key) {
if (initialized.get(key)) {
return cachedValues.get(key);
} else {
Connection connection = null;
try {
connection = datasource.getConnection();
createConfigTableIfNotExists(connection);
PreparedStatement preparedStatement = connection.prepareStatement("SELECT value FROM config WHERE key=?");
preparedStatement.setString(1, key);
ResultSet rs = preparedStatement.executeQuery();
if (rs.next()) {
String salida = rs.getString("value");
connection.close();
initialized.put(key, true);
cachedValues.put(key, salida);
return salida;
} else {
setConfigValue(key, cachedValues.get(key));
initialized.put(key, true);
connection.close();
return cachedValues.get(key);
}
} catch (SQLException ex) {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException ex1) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex1);
}
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
}
}
return cachedValues.get(key);
}
public boolean setConfigValue(String key, String value) {
Connection connection = null;
if (value.equals(cachedValues.get(key)) && Boolean.TRUE.equals(initialized.get(key))) {
return true;
} else {
cachedValues.put(key, value);
initialized.put(key, true);
try {
connection = datasource.getConnection();
createConfigTableIfNotExists(connection);
PreparedStatement preparedStatement = connection.prepareStatement("SELECT value FROM config WHERE key=?;");
preparedStatement.setString(1, key);
ResultSet rs = preparedStatement.executeQuery();
if (!rs.next()) {
preparedStatement = connection.prepareStatement("INSERT INTO config (key, value) VALUES (?, ?);");
preparedStatement.setString(1, key);
preparedStatement.setString(2, value);
preparedStatement.execute();
} else {
preparedStatement = connection.prepareStatement("UPDATE config SET value=? WHERE key=?; ");
preparedStatement.setString(1, value);
preparedStatement.setString(2, key);
preparedStatement.execute();
}
connection.close();
return true;
} catch (SQLException ex) {
try {
if (connection != null) {
connection.close();
}
} catch (SQLException ex1) {
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex1);
}
Logger.getLogger(Config.class.getName()).log(Level.SEVERE, null, ex);
}
}
return false;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.config.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.cloudstack.framework.config.ConfigDepot;
import org.apache.cloudstack.framework.config.ConfigDepotAdmin;
import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.Configurable;
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.commons.lang.ObjectUtils;
import org.apache.log4j.Logger;
import com.cloud.utils.Pair;
import com.cloud.utils.exception.CloudRuntimeException;
/**
* ConfigDepotImpl implements the ConfigDepot and ConfigDepotAdmin interface.
* Its functionalities include:
* - Control how dynamic config values are cached and refreshed.
* - Control how scoped config values are stored.
* - Gather all of the Configurable interfaces and insert their config
* variables into the config table.
* - Hide the data source where configs are stored and retrieved.
*
* When dealing with this class, we must be very careful on cluster situations.
*
* TODO:
* - Move the rest of the changes to the config table to here.
* - Add the code to mark the rows in configuration table without
* the corresponding keys to be null.
* - Move all of the configurations to using ConfigDepot
* - Completely eliminate Config.java
* - Figure out the correct categories.
* - Add a scope for management server, where if the scope is management server
* then the override is retrieved from a properties file. Imagine adding a
* new management server node and it is much more capable system than previous
* management servers, you want the adjustments to thread pools etc to be
* very different than other management serves.
* - Add validation methods to ConfigKey<?>. If a validation class is declared
* when constructing a ConfigKey then configuration server should use the
* validation class to validate the value the admin input for the key.
*/
public class ConfigDepotImpl implements ConfigDepot, ConfigDepotAdmin {
private final static Logger s_logger = Logger.getLogger(ConfigDepotImpl.class);
@Inject
ConfigurationDao _configDao;
List<Configurable> _configurables;
List<ScopedConfigStorage> _scopedStorages;
Set<Configurable> _configured = Collections.synchronizedSet(new HashSet<Configurable>());
HashMap<String, Pair<String, ConfigKey<?>>> _allKeys = new HashMap<String, Pair<String, ConfigKey<?>>>(1007);
HashMap<ConfigKey.Scope, Set<ConfigKey<?>>> _scopeLevelConfigsMap = new HashMap<ConfigKey.Scope, Set<ConfigKey<?>>>();
public ConfigDepotImpl() {
ConfigKey.init(this);
_scopeLevelConfigsMap.put(ConfigKey.Scope.Zone, new HashSet<ConfigKey<?>>());
_scopeLevelConfigsMap.put(ConfigKey.Scope.Cluster, new HashSet<ConfigKey<?>>());
_scopeLevelConfigsMap.put(ConfigKey.Scope.StoragePool, new HashSet<ConfigKey<?>>());
_scopeLevelConfigsMap.put(ConfigKey.Scope.Account, new HashSet<ConfigKey<?>>());
_scopeLevelConfigsMap.put(ConfigKey.Scope.ImageStore, new HashSet<ConfigKey<?>>());
_scopeLevelConfigsMap.put(ConfigKey.Scope.Domain, new HashSet<ConfigKey<?>>());
}
@Override
public ConfigKey<?> get(String key) {
Pair<String, ConfigKey<?>> value = _allKeys.get(key);
return value != null ? value.second() : null;
}
@PostConstruct
@Override
public void populateConfigurations() {
Date date = new Date();
for (Configurable configurable : _configurables) {
populateConfiguration(date, configurable);
}
}
protected void populateConfiguration(Date date, Configurable configurable) {
if (_configured.contains(configurable))
return;
s_logger.debug("Retrieving keys from " + configurable.getClass().getSimpleName());
for (ConfigKey<?> key : configurable.getConfigKeys()) {
Pair<String, ConfigKey<?>> previous = _allKeys.get(key.key());
if (previous != null && !previous.first().equals(configurable.getConfigComponentName())) {
throw new CloudRuntimeException("Configurable " + configurable.getConfigComponentName() + " is adding a key that has been added before by " +
previous.first() + ": " + key.toString());
}
_allKeys.put(key.key(), new Pair<String, ConfigKey<?>>(configurable.getConfigComponentName(), key));
createOrupdateConfigObject(date, configurable.getConfigComponentName(), key, null);
if ((key.scope() != null) && (key.scope() != ConfigKey.Scope.Global)) {
Set<ConfigKey<?>> currentConfigs = _scopeLevelConfigsMap.get(key.scope());
currentConfigs.add(key);
}
}
_configured.add(configurable);
}
private void createOrupdateConfigObject(Date date, String componentName, ConfigKey<?> key, String value) {
ConfigurationVO vo = _configDao.findById(key.key());
if (vo == null) {
vo = new ConfigurationVO(componentName, key);
vo.setUpdated(date);
if (value != null) {
vo.setValue(value);
}
_configDao.persist(vo);
} else {
if (vo.isDynamic() != key.isDynamic() || !ObjectUtils.equals(vo.getDescription(), key.description()) || !ObjectUtils.equals(vo.getDefaultValue(), key.defaultValue()) ||
!ObjectUtils.equals(vo.getScope(), key.scope().toString()) ||
!ObjectUtils.equals(vo.getComponent(), componentName)) {
vo.setDynamic(key.isDynamic());
vo.setDescription(key.description());
vo.setDefaultValue(key.defaultValue());
vo.setScope(key.scope().toString());
vo.setComponent(componentName);
vo.setUpdated(date);
_configDao.persist(vo);
}
}
}
@Override
public void populateConfiguration(Configurable configurable) {
populateConfiguration(new Date(), configurable);
}
@Override
public List<String> getComponentsInDepot() {
return new ArrayList<String>();
}
public ConfigurationDao global() {
return _configDao;
}
public ScopedConfigStorage findScopedConfigStorage(ConfigKey<?> config) {
for (ScopedConfigStorage storage : _scopedStorages) {
if (storage.getScope() == config.scope()) {
return storage;
}
}
throw new CloudRuntimeException("Unable to find config storage for this scope: " + config.scope() + " for " + config.key());
}
public List<ScopedConfigStorage> getScopedStorages() {
return _scopedStorages;
}
@Inject
public void setScopedStorages(List<ScopedConfigStorage> scopedStorages) {
_scopedStorages = scopedStorages;
}
public List<Configurable> getConfigurables() {
return _configurables;
}
@Inject
public void setConfigurables(List<Configurable> configurables) {
_configurables = configurables;
}
@Override
public Set<ConfigKey<?>> getConfigListByScope(String scope) {
return _scopeLevelConfigsMap.get(ConfigKey.Scope.valueOf(scope));
}
@Override
public <T> void set(ConfigKey<T> key, T value) {
_configDao.update(key.key(), value.toString());
}
@Override
public <T> void createOrUpdateConfigObject(String componentName, ConfigKey<T> key, String value) {
createOrupdateConfigObject(new Date(), componentName, key, value);
}
}
| |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server.dreams;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
import android.os.RemoteException;
import android.os.IBinder.DeathRecipient;
import android.os.UserHandle;
import android.service.dreams.DreamService;
import android.service.dreams.IDreamService;
import android.util.Slog;
import android.view.IWindowManager;
import android.view.WindowManager;
import android.view.WindowManagerGlobal;
import java.io.PrintWriter;
import java.util.NoSuchElementException;
/**
* Internal controller for starting and stopping the current dream and managing related state.
*
* Assumes all operations are called from the dream handler thread.
*/
final class DreamController {
private static final String TAG = "DreamController";
// How long we wait for a newly bound dream to create the service connection
private static final int DREAM_CONNECTION_TIMEOUT = 5 * 1000;
private final Context mContext;
private final Handler mHandler;
private final Listener mListener;
private final IWindowManager mIWindowManager;
private final Intent mDreamingStartedIntent = new Intent(Intent.ACTION_DREAMING_STARTED)
.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY);
private final Intent mDreamingStoppedIntent = new Intent(Intent.ACTION_DREAMING_STOPPED)
.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY);
private final Intent mCloseNotificationShadeIntent = new Intent(Intent.ACTION_CLOSE_SYSTEM_DIALOGS);
private DreamRecord mCurrentDream;
private final Runnable mStopUnconnectedDreamRunnable = new Runnable() {
@Override
public void run() {
if (mCurrentDream != null && mCurrentDream.mBound && !mCurrentDream.mConnected) {
Slog.w(TAG, "Bound dream did not connect in the time allotted");
stopDream();
}
}
};
public DreamController(Context context, Handler handler, Listener listener) {
mContext = context;
mHandler = handler;
mListener = listener;
mIWindowManager = WindowManagerGlobal.getWindowManagerService();
}
public void dump(PrintWriter pw) {
pw.println("Dreamland:");
if (mCurrentDream != null) {
pw.println(" mCurrentDream:");
pw.println(" mToken=" + mCurrentDream.mToken);
pw.println(" mName=" + mCurrentDream.mName);
pw.println(" mIsTest=" + mCurrentDream.mIsTest);
pw.println(" mCanDoze=" + mCurrentDream.mCanDoze);
pw.println(" mUserId=" + mCurrentDream.mUserId);
pw.println(" mBound=" + mCurrentDream.mBound);
pw.println(" mService=" + mCurrentDream.mService);
pw.println(" mSentStartBroadcast=" + mCurrentDream.mSentStartBroadcast);
} else {
pw.println(" mCurrentDream: null");
}
}
public void startDream(Binder token, ComponentName name,
boolean isTest, boolean canDoze, int userId) {
stopDream();
// Close the notification shade. Don't need to send to all, but better to be explicit.
mContext.sendBroadcastAsUser(mCloseNotificationShadeIntent, UserHandle.ALL);
Slog.i(TAG, "Starting dream: name=" + name
+ ", isTest=" + isTest + ", canDoze=" + canDoze
+ ", userId=" + userId);
mCurrentDream = new DreamRecord(token, name, isTest, canDoze, userId);
try {
mIWindowManager.addWindowToken(token, WindowManager.LayoutParams.TYPE_DREAM);
} catch (RemoteException ex) {
Slog.e(TAG, "Unable to add window token for dream.", ex);
stopDream();
return;
}
Intent intent = new Intent(DreamService.SERVICE_INTERFACE);
intent.setComponent(name);
intent.addFlags(Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
try {
if (!mContext.bindServiceAsUser(intent, mCurrentDream,
Context.BIND_AUTO_CREATE, new UserHandle(userId))) {
Slog.e(TAG, "Unable to bind dream service: " + intent);
stopDream();
return;
}
} catch (SecurityException ex) {
Slog.e(TAG, "Unable to bind dream service: " + intent, ex);
stopDream();
return;
}
mCurrentDream.mBound = true;
mHandler.postDelayed(mStopUnconnectedDreamRunnable, DREAM_CONNECTION_TIMEOUT);
}
public void stopDream() {
if (mCurrentDream == null) {
return;
}
final DreamRecord oldDream = mCurrentDream;
mCurrentDream = null;
Slog.i(TAG, "Stopping dream: name=" + oldDream.mName
+ ", isTest=" + oldDream.mIsTest + ", canDoze=" + oldDream.mCanDoze
+ ", userId=" + oldDream.mUserId);
mHandler.removeCallbacks(mStopUnconnectedDreamRunnable);
if (oldDream.mSentStartBroadcast) {
mContext.sendBroadcastAsUser(mDreamingStoppedIntent, UserHandle.ALL);
}
if (oldDream.mService != null) {
// Tell the dream that it's being stopped so that
// it can shut down nicely before we yank its window token out from
// under it.
try {
oldDream.mService.detach();
} catch (RemoteException ex) {
// we don't care; this thing is on the way out
}
try {
oldDream.mService.asBinder().unlinkToDeath(oldDream, 0);
} catch (NoSuchElementException ex) {
// don't care
}
oldDream.mService = null;
}
if (oldDream.mBound) {
mContext.unbindService(oldDream);
}
try {
mIWindowManager.removeWindowToken(oldDream.mToken);
} catch (RemoteException ex) {
Slog.w(TAG, "Error removing window token for dream.", ex);
}
mHandler.post(new Runnable() {
@Override
public void run() {
mListener.onDreamStopped(oldDream.mToken);
}
});
}
private void attach(IDreamService service) {
try {
service.asBinder().linkToDeath(mCurrentDream, 0);
service.attach(mCurrentDream.mToken, mCurrentDream.mCanDoze);
} catch (RemoteException ex) {
Slog.e(TAG, "The dream service died unexpectedly.", ex);
stopDream();
return;
}
mCurrentDream.mService = service;
if (!mCurrentDream.mIsTest) {
mContext.sendBroadcastAsUser(mDreamingStartedIntent, UserHandle.ALL);
mCurrentDream.mSentStartBroadcast = true;
}
}
/**
* Callback interface to be implemented by the {@link DreamManagerService}.
*/
public interface Listener {
void onDreamStopped(Binder token);
}
private final class DreamRecord implements DeathRecipient, ServiceConnection {
public final Binder mToken;
public final ComponentName mName;
public final boolean mIsTest;
public final boolean mCanDoze;
public final int mUserId;
public boolean mBound;
public boolean mConnected;
public IDreamService mService;
public boolean mSentStartBroadcast;
public DreamRecord(Binder token, ComponentName name,
boolean isTest, boolean canDoze, int userId) {
mToken = token;
mName = name;
mIsTest = isTest;
mCanDoze = canDoze;
mUserId = userId;
}
// May be called on any thread.
@Override
public void binderDied() {
mHandler.post(new Runnable() {
@Override
public void run() {
mService = null;
if (mCurrentDream == DreamRecord.this) {
stopDream();
}
}
});
}
// May be called on any thread.
@Override
public void onServiceConnected(ComponentName name, final IBinder service) {
mHandler.post(new Runnable() {
@Override
public void run() {
mConnected = true;
if (mCurrentDream == DreamRecord.this && mService == null) {
attach(IDreamService.Stub.asInterface(service));
}
}
});
}
// May be called on any thread.
@Override
public void onServiceDisconnected(ComponentName name) {
mHandler.post(new Runnable() {
@Override
public void run() {
mService = null;
if (mCurrentDream == DreamRecord.this) {
stopDream();
}
}
});
}
}
}
| |
package org.knowm.xchange.bankera;
import java.math.BigDecimal;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
import org.knowm.xchange.bankera.dto.BankeraException;
import org.knowm.xchange.bankera.dto.account.BankeraUserInfo;
import org.knowm.xchange.bankera.dto.account.BankeraWallet;
import org.knowm.xchange.bankera.dto.marketdata.BankeraOrderBook;
import org.knowm.xchange.bankera.dto.marketdata.BankeraTickerResponse;
import org.knowm.xchange.bankera.dto.marketdata.BankeraTrade;
import org.knowm.xchange.bankera.dto.marketdata.BankeraTradesResponse;
import org.knowm.xchange.bankera.dto.trade.BankeraOpenOrders;
import org.knowm.xchange.bankera.dto.trade.BankeraOrder;
import org.knowm.xchange.bankera.dto.trade.BankeraUserTrades;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.AccountInfo;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.exceptions.ExchangeException;
import org.knowm.xchange.exceptions.ExchangeSecurityException;
public final class BankeraAdapters {
private static final String ORDER_SIDE_BUY = "buy";
private BankeraAdapters() {}
public static AccountInfo adaptAccountInfo(BankeraUserInfo userInfo) {
return new AccountInfo(
String.valueOf(userInfo.getUser().getId()), adaptWallet(userInfo.getUser().getWallets()));
}
public static Wallet adaptWallet(List<BankeraWallet> wallets) {
List<Balance> balances =
wallets.stream()
.map(
w ->
new Balance.Builder()
.total(new BigDecimal(w.getTotal()))
.available(new BigDecimal(w.getBalance()))
.frozen(new BigDecimal(w.getReserved()))
.currency(new Currency(w.getCurrency()))
.build())
.collect(Collectors.toList());
return Wallet.Builder.from(balances).build();
}
public static ExchangeException adaptError(BankeraException exception) {
return exception.getHttpStatusCode() == 403
? new ExchangeSecurityException()
: new ExchangeException(exception.getError(), exception);
}
/**
* Adapts Bankera BankeraTickerResponse to a Ticker
*
* @param ticker Specific ticker
* @param currencyPair BankeraCurrency pair (e.g. ETH/BTC)
* @return Ticker
*/
public static Ticker adaptTicker(BankeraTickerResponse ticker, CurrencyPair currencyPair) {
BigDecimal high = new BigDecimal(ticker.getTicker().getHigh());
BigDecimal low = new BigDecimal(ticker.getTicker().getLow());
BigDecimal bid = new BigDecimal(ticker.getTicker().getBid());
BigDecimal ask = new BigDecimal(ticker.getTicker().getAsk());
BigDecimal last = new BigDecimal(ticker.getTicker().getLast());
BigDecimal volume = new BigDecimal(ticker.getTicker().getVolume());
Date timestamp = new Date(ticker.getTicker().getTimestamp());
return new Ticker.Builder()
.currencyPair(currencyPair)
.high(high)
.low(low)
.bid(bid)
.ask(ask)
.last(last)
.volume(volume)
.timestamp(timestamp)
.build();
}
public static OrderBook adaptOrderBook(BankeraOrderBook orderbook, CurrencyPair currencyPair) {
List<LimitOrder> bids = createOrders(currencyPair, OrderType.BID, orderbook.getBids());
List<LimitOrder> asks = createOrders(currencyPair, OrderType.ASK, orderbook.getAsks());
return new OrderBook(null, asks, bids);
}
public static Trades adaptTrades(
BankeraTradesResponse tradesResponse, CurrencyPair currencyPair) {
List<BankeraTrade> bankeraTrades = tradesResponse.getTrades();
List<Trade> tradesList = new ArrayList<>();
bankeraTrades.forEach(
bankeraTrade -> {
BigDecimal amount = new BigDecimal(bankeraTrade.getAmount());
BigDecimal price = new BigDecimal(bankeraTrade.getPrice());
Date date = new Date(Long.parseLong(bankeraTrade.getTime()));
OrderType type =
bankeraTrade.getSide().equalsIgnoreCase(ORDER_SIDE_BUY)
? OrderType.BID
: OrderType.ASK;
tradesList.add(
new Trade.Builder()
.type(type)
.originalAmount(amount)
.currencyPair(currencyPair)
.price(price)
.timestamp(date)
.build());
});
return new Trades(tradesList, 0L, Trades.TradeSortType.SortByTimestamp);
}
private static List<LimitOrder> createOrders(
CurrencyPair currencyPair,
OrderType orderType,
List<BankeraOrderBook.OrderBookOrder> ordersList) {
List<LimitOrder> limitOrders = new ArrayList<>();
if (ordersList == null) return limitOrders;
ordersList.forEach(
order -> {
limitOrders.add(
new LimitOrder(
orderType,
new BigDecimal(order.getAmount()),
currencyPair,
String.valueOf(order.getId()),
null,
new BigDecimal(order.getPrice())));
});
return limitOrders;
}
public static List<LimitOrder> adaptOpenOrders(BankeraOpenOrders openOrders) {
List<LimitOrder> orderList = new ArrayList<>();
openOrders
.getOpenOrders()
.forEach(
bankeraOrder -> {
String[] currencies = bankeraOrder.getMarket().split("-");
CurrencyPair pair = new CurrencyPair(currencies[0], currencies[1]);
orderList.add(
new LimitOrder(
bankeraOrder.getSide().equalsIgnoreCase("buy")
? OrderType.BID
: OrderType.ASK,
new BigDecimal(bankeraOrder.getAmount()),
new BigDecimal(bankeraOrder.getRemainingAmount()),
pair,
String.valueOf(bankeraOrder.getId()),
new Date(Long.valueOf(bankeraOrder.getCreatedAt())),
new BigDecimal(bankeraOrder.getPrice())));
});
return orderList;
}
public static List<UserTrade> adaptUserTrades(BankeraUserTrades userTrades) {
List<UserTrade> tradeList = new ArrayList<>();
userTrades
.getTrades()
.forEach(
trade -> {
String[] currencies = trade.getMarket().split("-");
CurrencyPair pair = new CurrencyPair(currencies[0], currencies[1]);
Currency feeCurrency = new Currency(currencies[1]);
tradeList.add(
new UserTrade.Builder()
.type(trade.getSide().equalsIgnoreCase("buy") ? OrderType.BID : OrderType.ASK)
.originalAmount(new BigDecimal(trade.getAmount()))
.currencyPair(pair)
.price(new BigDecimal(trade.getPrice()))
.timestamp(new Date(Long.parseLong(trade.getCompletedAt())))
.id(String.valueOf(trade.getId()))
.orderId(String.valueOf(trade.getOrderId()))
.feeAmount(new BigDecimal(trade.getFeeAmount()))
.feeCurrency(feeCurrency)
.build());
});
return tradeList;
}
public static Order adaptOrder(BankeraOrder bankeraOrder) {
String[] currencies = bankeraOrder.getMarket().split("-");
CurrencyPair pair = new CurrencyPair(currencies[0], currencies[1]);
DecimalFormat format = new DecimalFormat();
format.setParseBigDecimal(true);
return new LimitOrder(
bankeraOrder.getSide().equalsIgnoreCase("buy") ? OrderType.BID : OrderType.ASK,
new BigDecimal(bankeraOrder.getAmount()),
pair,
String.valueOf(bankeraOrder.getId()),
new Date(Long.parseLong(bankeraOrder.getCreatedAt())),
new BigDecimal(bankeraOrder.getPrice()),
new BigDecimal(bankeraOrder.getPrice()),
new BigDecimal(bankeraOrder.getExecutedAmount()),
bankeraOrder.getTotalFee(),
adaptOrderStatus(bankeraOrder.getStatus()));
}
private static Order.OrderStatus adaptOrderStatus(String status) {
switch (status.toLowerCase()) {
case "open":
return Order.OrderStatus.NEW;
case "completed":
return Order.OrderStatus.FILLED;
case "cancelled":
return Order.OrderStatus.CANCELED;
case "rejected":
return Order.OrderStatus.REJECTED;
case "pending cancel":
return Order.OrderStatus.PENDING_CANCEL;
default:
return Order.OrderStatus.UNKNOWN;
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ecr.model;
import java.io.Serializable;
/**
*
*/
public class BatchDeleteImageResult implements Serializable, Cloneable {
/**
* <p>
* The image IDs of the deleted images.
* </p>
*/
private java.util.List<ImageIdentifier> imageIds;
/**
* <p>
* Any failures associated with the call.
* </p>
*/
private java.util.List<ImageFailure> failures;
/**
* <p>
* The image IDs of the deleted images.
* </p>
*
* @return The image IDs of the deleted images.
*/
public java.util.List<ImageIdentifier> getImageIds() {
return imageIds;
}
/**
* <p>
* The image IDs of the deleted images.
* </p>
*
* @param imageIds
* The image IDs of the deleted images.
*/
public void setImageIds(java.util.Collection<ImageIdentifier> imageIds) {
if (imageIds == null) {
this.imageIds = null;
return;
}
this.imageIds = new java.util.ArrayList<ImageIdentifier>(imageIds);
}
/**
* <p>
* The image IDs of the deleted images.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setImageIds(java.util.Collection)} or
* {@link #withImageIds(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param imageIds
* The image IDs of the deleted images.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public BatchDeleteImageResult withImageIds(ImageIdentifier... imageIds) {
if (this.imageIds == null) {
setImageIds(new java.util.ArrayList<ImageIdentifier>(
imageIds.length));
}
for (ImageIdentifier ele : imageIds) {
this.imageIds.add(ele);
}
return this;
}
/**
* <p>
* The image IDs of the deleted images.
* </p>
*
* @param imageIds
* The image IDs of the deleted images.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public BatchDeleteImageResult withImageIds(
java.util.Collection<ImageIdentifier> imageIds) {
setImageIds(imageIds);
return this;
}
/**
* <p>
* Any failures associated with the call.
* </p>
*
* @return Any failures associated with the call.
*/
public java.util.List<ImageFailure> getFailures() {
return failures;
}
/**
* <p>
* Any failures associated with the call.
* </p>
*
* @param failures
* Any failures associated with the call.
*/
public void setFailures(java.util.Collection<ImageFailure> failures) {
if (failures == null) {
this.failures = null;
return;
}
this.failures = new java.util.ArrayList<ImageFailure>(failures);
}
/**
* <p>
* Any failures associated with the call.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setFailures(java.util.Collection)} or
* {@link #withFailures(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param failures
* Any failures associated with the call.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public BatchDeleteImageResult withFailures(ImageFailure... failures) {
if (this.failures == null) {
setFailures(new java.util.ArrayList<ImageFailure>(failures.length));
}
for (ImageFailure ele : failures) {
this.failures.add(ele);
}
return this;
}
/**
* <p>
* Any failures associated with the call.
* </p>
*
* @param failures
* Any failures associated with the call.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public BatchDeleteImageResult withFailures(
java.util.Collection<ImageFailure> failures) {
setFailures(failures);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getImageIds() != null)
sb.append("ImageIds: " + getImageIds() + ",");
if (getFailures() != null)
sb.append("Failures: " + getFailures());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof BatchDeleteImageResult == false)
return false;
BatchDeleteImageResult other = (BatchDeleteImageResult) obj;
if (other.getImageIds() == null ^ this.getImageIds() == null)
return false;
if (other.getImageIds() != null
&& other.getImageIds().equals(this.getImageIds()) == false)
return false;
if (other.getFailures() == null ^ this.getFailures() == null)
return false;
if (other.getFailures() != null
&& other.getFailures().equals(this.getFailures()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getImageIds() == null) ? 0 : getImageIds().hashCode());
hashCode = prime * hashCode
+ ((getFailures() == null) ? 0 : getFailures().hashCode());
return hashCode;
}
@Override
public BatchDeleteImageResult clone() {
try {
return (BatchDeleteImageResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 1999-2019 Seata.io Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.seata.server.coordinator;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
import io.seata.core.context.RootContext;
import io.seata.core.exception.BranchTransactionException;
import io.seata.core.exception.GlobalTransactionException;
import io.seata.core.exception.TransactionException;
import io.seata.core.exception.TransactionExceptionCode;
import io.seata.core.model.BranchStatus;
import io.seata.core.model.BranchType;
import io.seata.core.model.GlobalStatus;
import io.seata.core.protocol.transaction.BranchCommitRequest;
import io.seata.core.protocol.transaction.BranchCommitResponse;
import io.seata.core.protocol.transaction.BranchRollbackRequest;
import io.seata.core.protocol.transaction.BranchRollbackResponse;
import io.seata.core.rpc.RemotingServer;
import io.seata.server.lock.LockManager;
import io.seata.server.lock.LockerManagerFactory;
import io.seata.server.session.BranchSession;
import io.seata.server.session.GlobalSession;
import io.seata.server.session.SessionHelper;
import io.seata.server.session.SessionHolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import static io.seata.core.exception.TransactionExceptionCode.BranchTransactionNotExist;
import static io.seata.core.exception.TransactionExceptionCode.FailedToAddBranch;
import static io.seata.core.exception.TransactionExceptionCode.GlobalTransactionNotActive;
import static io.seata.core.exception.TransactionExceptionCode.GlobalTransactionStatusInvalid;
import static io.seata.core.exception.TransactionExceptionCode.FailedToSendBranchCommitRequest;
import static io.seata.core.exception.TransactionExceptionCode.FailedToSendBranchRollbackRequest;
/**
* The type abstract core.
*
* @author ph3636
*/
public abstract class AbstractCore implements Core {
protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractCore.class);
protected LockManager lockManager = LockerManagerFactory.getLockManager();
protected RemotingServer remotingServer;
public AbstractCore(RemotingServer remotingServer) {
if (remotingServer == null) {
throw new IllegalArgumentException("remotingServer must be not null");
}
this.remotingServer = remotingServer;
}
public abstract BranchType getHandleBranchType();
@Override
public Long branchRegister(BranchType branchType, String resourceId, String clientId, String xid,
String applicationData, String lockKeys) throws TransactionException {
GlobalSession globalSession = assertGlobalSessionNotNull(xid, false);
return SessionHolder.lockAndExecute(globalSession, () -> {
globalSessionStatusCheck(globalSession);
globalSession.addSessionLifecycleListener(SessionHolder.getRootSessionManager());
BranchSession branchSession = SessionHelper.newBranchByGlobal(globalSession, branchType, resourceId,
applicationData, lockKeys, clientId);
MDC.put(RootContext.MDC_KEY_BRANCH_ID, String.valueOf(branchSession.getBranchId()));
branchSessionLock(globalSession, branchSession);
try {
globalSession.addBranch(branchSession);
} catch (RuntimeException ex) {
branchSessionUnlock(branchSession);
throw new BranchTransactionException(FailedToAddBranch, String
.format("Failed to store branch xid = %s branchId = %s", globalSession.getXid(),
branchSession.getBranchId()), ex);
}
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Register branch successfully, xid = {}, branchId = {}, resourceId = {} ,lockKeys = {}",
globalSession.getXid(), branchSession.getBranchId(), resourceId, lockKeys);
}
return branchSession.getBranchId();
});
}
protected void globalSessionStatusCheck(GlobalSession globalSession) throws GlobalTransactionException {
if (!globalSession.isActive()) {
throw new GlobalTransactionException(GlobalTransactionNotActive, String.format(
"Could not register branch into global session xid = %s status = %s, cause by globalSession not active",
globalSession.getXid(), globalSession.getStatus()));
}
if (globalSession.getStatus() != GlobalStatus.Begin) {
throw new GlobalTransactionException(GlobalTransactionStatusInvalid, String
.format("Could not register branch into global session xid = %s status = %s while expecting %s",
globalSession.getXid(), globalSession.getStatus(), GlobalStatus.Begin));
}
}
protected void branchSessionLock(GlobalSession globalSession, BranchSession branchSession) throws TransactionException {
}
protected void branchSessionUnlock(BranchSession branchSession) throws TransactionException {
}
private GlobalSession assertGlobalSessionNotNull(String xid, boolean withBranchSessions)
throws TransactionException {
GlobalSession globalSession = SessionHolder.findGlobalSession(xid, withBranchSessions);
if (globalSession == null) {
throw new GlobalTransactionException(TransactionExceptionCode.GlobalTransactionNotExist,
String.format("Could not found global transaction xid = %s, may be has finished.", xid));
}
return globalSession;
}
@Override
public void branchReport(BranchType branchType, String xid, long branchId, BranchStatus status,
String applicationData) throws TransactionException {
GlobalSession globalSession = assertGlobalSessionNotNull(xid, true);
BranchSession branchSession = globalSession.getBranch(branchId);
if (branchSession == null) {
throw new BranchTransactionException(BranchTransactionNotExist,
String.format("Could not found branch session xid = %s branchId = %s", xid, branchId));
}
branchSession.setApplicationData(applicationData);
globalSession.addSessionLifecycleListener(SessionHolder.getRootSessionManager());
globalSession.changeBranchStatus(branchSession, status);
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Report branch status successfully, xid = {}, branchId = {}", globalSession.getXid(),
branchSession.getBranchId());
}
}
@Override
public boolean lockQuery(BranchType branchType, String resourceId, String xid, String lockKeys)
throws TransactionException {
return true;
}
@Override
public BranchStatus branchCommit(GlobalSession globalSession, BranchSession branchSession) throws TransactionException {
try {
BranchCommitRequest request = new BranchCommitRequest();
request.setXid(branchSession.getXid());
request.setBranchId(branchSession.getBranchId());
request.setResourceId(branchSession.getResourceId());
request.setApplicationData(branchSession.getApplicationData());
request.setBranchType(branchSession.getBranchType());
return branchCommitSend(request, globalSession, branchSession);
} catch (IOException | TimeoutException e) {
throw new BranchTransactionException(FailedToSendBranchCommitRequest,
String.format("Send branch commit failed, xid = %s branchId = %s", branchSession.getXid(),
branchSession.getBranchId()), e);
}
}
protected BranchStatus branchCommitSend(BranchCommitRequest request, GlobalSession globalSession,
BranchSession branchSession) throws IOException, TimeoutException {
BranchCommitResponse response = (BranchCommitResponse) remotingServer.sendSyncRequest(
branchSession.getResourceId(), branchSession.getClientId(), request);
return response.getBranchStatus();
}
@Override
public BranchStatus branchRollback(GlobalSession globalSession, BranchSession branchSession) throws TransactionException {
try {
BranchRollbackRequest request = new BranchRollbackRequest();
request.setXid(branchSession.getXid());
request.setBranchId(branchSession.getBranchId());
request.setResourceId(branchSession.getResourceId());
request.setApplicationData(branchSession.getApplicationData());
request.setBranchType(branchSession.getBranchType());
return branchRollbackSend(request, globalSession, branchSession);
} catch (IOException | TimeoutException e) {
throw new BranchTransactionException(FailedToSendBranchRollbackRequest,
String.format("Send branch rollback failed, xid = %s branchId = %s",
branchSession.getXid(), branchSession.getBranchId()), e);
}
}
protected BranchStatus branchRollbackSend(BranchRollbackRequest request, GlobalSession globalSession,
BranchSession branchSession) throws IOException, TimeoutException {
BranchRollbackResponse response = (BranchRollbackResponse) remotingServer.sendSyncRequest(
branchSession.getResourceId(), branchSession.getClientId(), request);
return response.getBranchStatus();
}
@Override
public String begin(String applicationId, String transactionServiceGroup, String name, int timeout)
throws TransactionException {
return null;
}
@Override
public GlobalStatus commit(String xid) throws TransactionException {
return null;
}
@Override
public boolean doGlobalCommit(GlobalSession globalSession, boolean retrying) throws TransactionException {
return true;
}
@Override
public GlobalStatus globalReport(String xid, GlobalStatus globalStatus) throws TransactionException {
return null;
}
@Override
public GlobalStatus rollback(String xid) throws TransactionException {
return null;
}
@Override
public boolean doGlobalRollback(GlobalSession globalSession, boolean retrying) throws TransactionException {
return true;
}
@Override
public GlobalStatus getStatus(String xid) throws TransactionException {
return null;
}
@Override
public void doGlobalReport(GlobalSession globalSession, String xid, GlobalStatus globalStatus) throws TransactionException {
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.identitytoolkit.model;
/**
* Respone of setting the account information.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Identity Toolkit API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class SetAccountInfoResponse extends com.google.api.client.json.GenericJson {
/**
* The name of the user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* The email of the user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String email;
/**
* If email has been verified.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean emailVerified;
/**
* If idToken is STS id token, then this field will be expiration time of STS id token in seconds.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long expiresIn;
/**
* The Gitkit id token to login the newly sign up user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String idToken;
/**
* The fixed string "identitytoolkit#SetAccountInfoResponse".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The local ID of the user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String localId;
/**
* The new email the user attempts to change to.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String newEmail;
/**
* The user's hashed password.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String passwordHash;
/**
* The photo url of the user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String photoUrl;
/**
* The user's profiles at the associated IdPs.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<ProviderUserInfo> providerUserInfo;
static {
// hack to force ProGuard to consider ProviderUserInfo used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(ProviderUserInfo.class);
}
/**
* If idToken is STS id token, then this field will be refresh token.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String refreshToken;
/**
* The name of the user.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* The name of the user.
* @param displayName displayName or {@code null} for none
*/
public SetAccountInfoResponse setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* The email of the user.
* @return value or {@code null} for none
*/
public java.lang.String getEmail() {
return email;
}
/**
* The email of the user.
* @param email email or {@code null} for none
*/
public SetAccountInfoResponse setEmail(java.lang.String email) {
this.email = email;
return this;
}
/**
* If email has been verified.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEmailVerified() {
return emailVerified;
}
/**
* If email has been verified.
* @param emailVerified emailVerified or {@code null} for none
*/
public SetAccountInfoResponse setEmailVerified(java.lang.Boolean emailVerified) {
this.emailVerified = emailVerified;
return this;
}
/**
* If idToken is STS id token, then this field will be expiration time of STS id token in seconds.
* @return value or {@code null} for none
*/
public java.lang.Long getExpiresIn() {
return expiresIn;
}
/**
* If idToken is STS id token, then this field will be expiration time of STS id token in seconds.
* @param expiresIn expiresIn or {@code null} for none
*/
public SetAccountInfoResponse setExpiresIn(java.lang.Long expiresIn) {
this.expiresIn = expiresIn;
return this;
}
/**
* The Gitkit id token to login the newly sign up user.
* @return value or {@code null} for none
*/
public java.lang.String getIdToken() {
return idToken;
}
/**
* The Gitkit id token to login the newly sign up user.
* @param idToken idToken or {@code null} for none
*/
public SetAccountInfoResponse setIdToken(java.lang.String idToken) {
this.idToken = idToken;
return this;
}
/**
* The fixed string "identitytoolkit#SetAccountInfoResponse".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* The fixed string "identitytoolkit#SetAccountInfoResponse".
* @param kind kind or {@code null} for none
*/
public SetAccountInfoResponse setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The local ID of the user.
* @return value or {@code null} for none
*/
public java.lang.String getLocalId() {
return localId;
}
/**
* The local ID of the user.
* @param localId localId or {@code null} for none
*/
public SetAccountInfoResponse setLocalId(java.lang.String localId) {
this.localId = localId;
return this;
}
/**
* The new email the user attempts to change to.
* @return value or {@code null} for none
*/
public java.lang.String getNewEmail() {
return newEmail;
}
/**
* The new email the user attempts to change to.
* @param newEmail newEmail or {@code null} for none
*/
public SetAccountInfoResponse setNewEmail(java.lang.String newEmail) {
this.newEmail = newEmail;
return this;
}
/**
* The user's hashed password.
* @see #decodePasswordHash()
* @return value or {@code null} for none
*/
public java.lang.String getPasswordHash() {
return passwordHash;
}
/**
* The user's hashed password.
* @see #getPasswordHash()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodePasswordHash() {
return com.google.api.client.util.Base64.decodeBase64(passwordHash);
}
/**
* The user's hashed password.
* @see #encodePasswordHash()
* @param passwordHash passwordHash or {@code null} for none
*/
public SetAccountInfoResponse setPasswordHash(java.lang.String passwordHash) {
this.passwordHash = passwordHash;
return this;
}
/**
* The user's hashed password.
* @see #setPasswordHash()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public SetAccountInfoResponse encodePasswordHash(byte[] passwordHash) {
this.passwordHash = com.google.api.client.util.Base64.encodeBase64URLSafeString(passwordHash);
return this;
}
/**
* The photo url of the user.
* @return value or {@code null} for none
*/
public java.lang.String getPhotoUrl() {
return photoUrl;
}
/**
* The photo url of the user.
* @param photoUrl photoUrl or {@code null} for none
*/
public SetAccountInfoResponse setPhotoUrl(java.lang.String photoUrl) {
this.photoUrl = photoUrl;
return this;
}
/**
* The user's profiles at the associated IdPs.
* @return value or {@code null} for none
*/
public java.util.List<ProviderUserInfo> getProviderUserInfo() {
return providerUserInfo;
}
/**
* The user's profiles at the associated IdPs.
* @param providerUserInfo providerUserInfo or {@code null} for none
*/
public SetAccountInfoResponse setProviderUserInfo(java.util.List<ProviderUserInfo> providerUserInfo) {
this.providerUserInfo = providerUserInfo;
return this;
}
/**
* If idToken is STS id token, then this field will be refresh token.
* @return value or {@code null} for none
*/
public java.lang.String getRefreshToken() {
return refreshToken;
}
/**
* If idToken is STS id token, then this field will be refresh token.
* @param refreshToken refreshToken or {@code null} for none
*/
public SetAccountInfoResponse setRefreshToken(java.lang.String refreshToken) {
this.refreshToken = refreshToken;
return this;
}
@Override
public SetAccountInfoResponse set(String fieldName, Object value) {
return (SetAccountInfoResponse) super.set(fieldName, value);
}
@Override
public SetAccountInfoResponse clone() {
return (SetAccountInfoResponse) super.clone();
}
/**
* Model definition for SetAccountInfoResponseProviderUserInfo.
*/
public static final class ProviderUserInfo extends com.google.api.client.json.GenericJson {
/**
* The user's display name at the IDP.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* User's identifier at IDP.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String federatedId;
/**
* The user's photo url at the IDP.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String photoUrl;
/**
* The IdP ID. For whitelisted IdPs it's a short domain name, e.g., google.com, aol.com, live.net
* and yahoo.com. For other OpenID IdPs it's the OP identifier.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String providerId;
/**
* The user's display name at the IDP.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* The user's display name at the IDP.
* @param displayName displayName or {@code null} for none
*/
public ProviderUserInfo setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* User's identifier at IDP.
* @return value or {@code null} for none
*/
public java.lang.String getFederatedId() {
return federatedId;
}
/**
* User's identifier at IDP.
* @param federatedId federatedId or {@code null} for none
*/
public ProviderUserInfo setFederatedId(java.lang.String federatedId) {
this.federatedId = federatedId;
return this;
}
/**
* The user's photo url at the IDP.
* @return value or {@code null} for none
*/
public java.lang.String getPhotoUrl() {
return photoUrl;
}
/**
* The user's photo url at the IDP.
* @param photoUrl photoUrl or {@code null} for none
*/
public ProviderUserInfo setPhotoUrl(java.lang.String photoUrl) {
this.photoUrl = photoUrl;
return this;
}
/**
* The IdP ID. For whitelisted IdPs it's a short domain name, e.g., google.com, aol.com, live.net
* and yahoo.com. For other OpenID IdPs it's the OP identifier.
* @return value or {@code null} for none
*/
public java.lang.String getProviderId() {
return providerId;
}
/**
* The IdP ID. For whitelisted IdPs it's a short domain name, e.g., google.com, aol.com, live.net
* and yahoo.com. For other OpenID IdPs it's the OP identifier.
* @param providerId providerId or {@code null} for none
*/
public ProviderUserInfo setProviderId(java.lang.String providerId) {
this.providerId = providerId;
return this;
}
@Override
public ProviderUserInfo set(String fieldName, Object value) {
return (ProviderUserInfo) super.set(fieldName, value);
}
@Override
public ProviderUserInfo clone() {
return (ProviderUserInfo) super.clone();
}
}
}
| |
/*
* Copyright (C) 2018 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.internal.codegen;
import static com.google.testing.compile.CompilationSubject.assertThat;
import static dagger.internal.codegen.Compilers.daggerCompiler;
import static dagger.internal.codegen.TestUtils.endsWithMessage;
import com.google.testing.compile.Compilation;
import com.google.testing.compile.JavaFileObjects;
import java.util.regex.Pattern;
import javax.tools.JavaFileObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class FullBindingGraphValidationTest {
private static final JavaFileObject MODULE_WITH_ERRORS =
JavaFileObjects.forSourceLines(
"test.ModuleWithErrors",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"",
"@Module",
"interface ModuleWithErrors {",
" @Binds Object object1(String string);",
" @Binds Object object2(Long l);",
" @Binds Number missingDependency(Integer i);",
"}");
// Make sure the error doesn't show other bindings or a dependency trace afterwards.
private static final Pattern MODULE_WITH_ERRORS_MESSAGE =
endsWithMessage(
"[Dagger/DuplicateBindings] java.lang.Object is bound multiple times:",
" @Binds Object test.ModuleWithErrors.object1(String)",
" @Binds Object test.ModuleWithErrors.object2(Long)");
@Test
public void moduleWithErrors_validationTypeNone() {
Compilation compilation = daggerCompiler().compile(MODULE_WITH_ERRORS);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void moduleWithErrors_validationTypeError() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=ERROR")
.compile(MODULE_WITH_ERRORS);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContainingMatch(MODULE_WITH_ERRORS_MESSAGE)
.inFile(MODULE_WITH_ERRORS)
.onLineContaining("interface ModuleWithErrors");
assertThat(compilation).hadErrorCount(1);
}
@Test
public void moduleWithErrors_validationTypeWarning() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=WARNING")
.compile(MODULE_WITH_ERRORS);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContainingMatch(MODULE_WITH_ERRORS_MESSAGE)
.inFile(MODULE_WITH_ERRORS)
.onLineContaining("interface ModuleWithErrors");
assertThat(compilation).hadWarningCount(1);
}
private static final JavaFileObject INCLUDES_MODULE_WITH_ERRORS =
JavaFileObjects.forSourceLines(
"test.IncludesModuleWithErrors",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"",
"@Module(includes = ModuleWithErrors.class)",
"interface IncludesModuleWithErrors {}");
@Test
public void includesModuleWithErrors_validationTypeNone() {
Compilation compilation =
daggerCompiler().compile(MODULE_WITH_ERRORS, INCLUDES_MODULE_WITH_ERRORS);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void includesModuleWithErrors_validationTypeError() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=ERROR")
.compile(MODULE_WITH_ERRORS, INCLUDES_MODULE_WITH_ERRORS);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContainingMatch(MODULE_WITH_ERRORS_MESSAGE)
.inFile(MODULE_WITH_ERRORS)
.onLineContaining("interface ModuleWithErrors");
assertThat(compilation)
.hadErrorContainingMatch("test.ModuleWithErrors has errors")
.inFile(INCLUDES_MODULE_WITH_ERRORS)
.onLineContaining("ModuleWithErrors.class");
assertThat(compilation).hadErrorCount(2);
}
@Test
public void includesModuleWithErrors_validationTypeWarning() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=WARNING")
.compile(MODULE_WITH_ERRORS, INCLUDES_MODULE_WITH_ERRORS);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContainingMatch(MODULE_WITH_ERRORS_MESSAGE)
.inFile(MODULE_WITH_ERRORS)
.onLineContaining("interface ModuleWithErrors");
// TODO(b/130284666)
assertThat(compilation)
.hadWarningContainingMatch(MODULE_WITH_ERRORS_MESSAGE)
.inFile(INCLUDES_MODULE_WITH_ERRORS)
.onLineContaining("interface IncludesModuleWithErrors");
assertThat(compilation).hadWarningCount(2);
}
private static final JavaFileObject A_MODULE =
JavaFileObjects.forSourceLines(
"test.AModule",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"",
"@Module",
"interface AModule {",
" @Binds Object object(String string);",
"}");
private static final JavaFileObject COMBINED_WITH_A_MODULE_HAS_ERRORS =
JavaFileObjects.forSourceLines(
"test.CombinedWithAModuleHasErrors",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"",
"@Module(includes = AModule.class)",
"interface CombinedWithAModuleHasErrors {",
" @Binds Object object(Long l);",
"}");
// Make sure the error doesn't show other bindings or a dependency trace afterwards.
private static final Pattern COMBINED_WITH_A_MODULE_HAS_ERRORS_MESSAGE =
endsWithMessage(
"[Dagger/DuplicateBindings] java.lang.Object is bound multiple times:",
" @Binds Object test.AModule.object(String)",
" @Binds Object test.CombinedWithAModuleHasErrors.object(Long)");
@Test
public void moduleIncludingModuleWithCombinedErrors_validationTypeNone() {
Compilation compilation = daggerCompiler().compile(A_MODULE, COMBINED_WITH_A_MODULE_HAS_ERRORS);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void moduleIncludingModuleWithCombinedErrors_validationTypeError() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=ERROR")
.compile(A_MODULE, COMBINED_WITH_A_MODULE_HAS_ERRORS);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContainingMatch(COMBINED_WITH_A_MODULE_HAS_ERRORS_MESSAGE)
.inFile(COMBINED_WITH_A_MODULE_HAS_ERRORS)
.onLineContaining("interface CombinedWithAModuleHasErrors");
assertThat(compilation).hadErrorCount(1);
}
@Test
public void moduleIncludingModuleWithCombinedErrors_validationTypeWarning() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=WARNING")
.compile(A_MODULE, COMBINED_WITH_A_MODULE_HAS_ERRORS);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContainingMatch(COMBINED_WITH_A_MODULE_HAS_ERRORS_MESSAGE)
.inFile(COMBINED_WITH_A_MODULE_HAS_ERRORS)
.onLineContaining("interface CombinedWithAModuleHasErrors");
assertThat(compilation).hadWarningCount(1);
}
private static final JavaFileObject SUBCOMPONENT_WITH_ERRORS =
JavaFileObjects.forSourceLines(
"test.SubcomponentWithErrors",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Subcomponent;",
"",
"@Subcomponent(modules = AModule.class)",
"interface SubcomponentWithErrors {",
" @Subcomponent.Builder",
" interface Builder {",
" @BindsInstance Builder object(Object object);",
" SubcomponentWithErrors build();",
" }",
"}");
// Make sure the error doesn't show other bindings or a dependency trace afterwards.
private static final Pattern SUBCOMPONENT_WITH_ERRORS_MESSAGE =
endsWithMessage(
"[Dagger/DuplicateBindings] java.lang.Object is bound multiple times:",
" @Binds Object test.AModule.object(String)",
" @BindsInstance test.SubcomponentWithErrors.Builder"
+ " test.SubcomponentWithErrors.Builder.object(Object)");
@Test
public void subcomponentWithErrors_validationTypeNone() {
Compilation compilation = daggerCompiler().compile(SUBCOMPONENT_WITH_ERRORS, A_MODULE);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void subcomponentWithErrors_validationTypeError() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=ERROR")
.compile(SUBCOMPONENT_WITH_ERRORS, A_MODULE);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContainingMatch(SUBCOMPONENT_WITH_ERRORS_MESSAGE)
.inFile(SUBCOMPONENT_WITH_ERRORS)
.onLineContaining("interface SubcomponentWithErrors");
assertThat(compilation).hadErrorCount(1);
}
@Test
public void subcomponentWithErrors_validationTypeWarning() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=WARNING")
.compile(SUBCOMPONENT_WITH_ERRORS, A_MODULE);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContainingMatch(SUBCOMPONENT_WITH_ERRORS_MESSAGE)
.inFile(SUBCOMPONENT_WITH_ERRORS)
.onLineContaining("interface SubcomponentWithErrors");
assertThat(compilation).hadWarningCount(1);
}
private static final JavaFileObject MODULE_WITH_SUBCOMPONENT_WITH_ERRORS =
JavaFileObjects.forSourceLines(
"test.ModuleWithSubcomponentWithErrors",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"",
"@Module(subcomponents = SubcomponentWithErrors.class)",
"interface ModuleWithSubcomponentWithErrors {}");
@Test
public void moduleWithSubcomponentWithErrors_validationTypeNone() {
Compilation compilation =
daggerCompiler()
.compile(MODULE_WITH_SUBCOMPONENT_WITH_ERRORS, SUBCOMPONENT_WITH_ERRORS, A_MODULE);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void moduleWithSubcomponentWithErrors_validationTypeError() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=ERROR")
.compile(MODULE_WITH_SUBCOMPONENT_WITH_ERRORS, SUBCOMPONENT_WITH_ERRORS, A_MODULE);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContainingMatch(SUBCOMPONENT_WITH_ERRORS_MESSAGE)
.inFile(MODULE_WITH_SUBCOMPONENT_WITH_ERRORS)
.onLineContaining("interface ModuleWithSubcomponentWithErrors");
// TODO(b/130283677)
assertThat(compilation)
.hadErrorContainingMatch(SUBCOMPONENT_WITH_ERRORS_MESSAGE)
.inFile(SUBCOMPONENT_WITH_ERRORS)
.onLineContaining("interface SubcomponentWithErrors");
assertThat(compilation).hadErrorCount(2);
}
@Test
public void moduleWithSubcomponentWithErrors_validationTypeWarning() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=WARNING")
.compile(MODULE_WITH_SUBCOMPONENT_WITH_ERRORS, SUBCOMPONENT_WITH_ERRORS, A_MODULE);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContainingMatch(SUBCOMPONENT_WITH_ERRORS_MESSAGE)
.inFile(MODULE_WITH_SUBCOMPONENT_WITH_ERRORS)
.onLineContaining("interface ModuleWithSubcomponentWithErrors");
// TODO(b/130283677)
assertThat(compilation)
.hadWarningContainingMatch(SUBCOMPONENT_WITH_ERRORS_MESSAGE)
.inFile(SUBCOMPONENT_WITH_ERRORS)
.onLineContaining("interface SubcomponentWithErrors");
assertThat(compilation).hadWarningCount(2);
}
private static final JavaFileObject A_SUBCOMPONENT =
JavaFileObjects.forSourceLines(
"test.ASubcomponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Subcomponent;",
"",
"@Subcomponent(modules = AModule.class)",
"interface ASubcomponent {",
" @Subcomponent.Builder",
" interface Builder {",
" ASubcomponent build();",
" }",
"}");
private static final JavaFileObject COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS =
JavaFileObjects.forSourceLines(
"test.CombinedWithASubcomponentHasErrors",
"package test;",
"",
"import dagger.Binds;",
"import dagger.Module;",
"",
"@Module(subcomponents = ASubcomponent.class)",
"interface CombinedWithASubcomponentHasErrors {",
" @Binds Object object(Number number);",
"}");
// Make sure the error doesn't show other bindings or a dependency trace afterwards.
private static final Pattern COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS_MESSAGE =
endsWithMessage(
"[Dagger/DuplicateBindings] java.lang.Object is bound multiple times:",
" @Binds Object test.AModule.object(String)",
" @Binds Object test.CombinedWithASubcomponentHasErrors.object(Number)");
@Test
public void moduleWithSubcomponentWithCombinedErrors_validationTypeNone() {
Compilation compilation =
daggerCompiler().compile(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS, A_SUBCOMPONENT, A_MODULE);
assertThat(compilation).succeededWithoutWarnings();
}
@Test
public void moduleWithSubcomponentWithCombinedErrors_validationTypeError() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=ERROR")
.compile(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS, A_SUBCOMPONENT, A_MODULE);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContainingMatch(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS_MESSAGE)
.inFile(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS)
.onLineContaining("interface CombinedWithASubcomponentHasErrors");
assertThat(compilation).hadErrorCount(1);
}
@Test
public void moduleWithSubcomponentWithCombinedErrors_validationTypeWarning() {
Compilation compilation =
daggerCompiler()
.withOptions("-Adagger.fullBindingGraphValidation=WARNING")
.compile(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS, A_SUBCOMPONENT, A_MODULE);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContainingMatch(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS_MESSAGE)
.inFile(COMBINED_WITH_A_SUBCOMPONENT_HAS_ERRORS)
.onLineContaining("interface CombinedWithASubcomponentHasErrors");
assertThat(compilation).hadWarningCount(1);
}
@Test
public void bothAliasesDifferentValues() {
Compilation compilation =
daggerCompiler()
.withOptions(
"-Adagger.moduleBindingValidation=NONE",
"-Adagger.fullBindingGraphValidation=ERROR")
.compile(MODULE_WITH_ERRORS);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
"Only one of the equivalent options "
+ "(-Adagger.fullBindingGraphValidation, -Adagger.moduleBindingValidation)"
+ " should be used; prefer -Adagger.fullBindingGraphValidation");
assertThat(compilation).hadErrorCount(1);
}
@Test
public void bothAliasesSameValue() {
Compilation compilation =
daggerCompiler()
.withOptions(
"-Adagger.moduleBindingValidation=NONE", "-Adagger.fullBindingGraphValidation=NONE")
.compile(MODULE_WITH_ERRORS);
assertThat(compilation).succeeded();
assertThat(compilation)
.hadWarningContaining(
"Only one of the equivalent options "
+ "(-Adagger.fullBindingGraphValidation, -Adagger.moduleBindingValidation)"
+ " should be used; prefer -Adagger.fullBindingGraphValidation");
}
}
| |
/*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014 The Billing Project, LLC
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.analytics.dao.factory;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.UUID;
import javax.sql.DataSource;
import org.killbill.billing.invoice.api.InvoiceItem;
import org.killbill.billing.invoice.api.InvoiceItemType;
import org.killbill.billing.plugin.analytics.AnalyticsTestSuiteNoDB;
import org.killbill.billing.plugin.analytics.BusinessExecutor;
import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceItemBaseModelDao;
import org.killbill.billing.plugin.analytics.dao.model.BusinessInvoiceItemBaseModelDao.ItemSource;
import org.killbill.billing.plugin.analytics.utils.BusinessInvoiceUtils;
import org.killbill.killbill.osgi.libs.killbill.OSGIKillbillDataSource;
import org.killbill.killbill.osgi.libs.killbill.OSGIKillbillLogService;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
public class TestBusinessInvoiceFactory extends AnalyticsTestSuiteNoDB {
private BusinessInvoiceFactory invoiceFactory;
@Override
@BeforeMethod(groups = "fast")
public void setUp() throws Exception {
super.setUp();
final OSGIKillbillDataSource osgiKillbillDataSource = Mockito.mock(OSGIKillbillDataSource.class);
final DataSource dataSource = Mockito.mock(DataSource.class);
Mockito.when(osgiKillbillDataSource.getDataSource()).thenReturn(dataSource);
final OSGIKillbillLogService osgiKillbillLogService = Mockito.mock(OSGIKillbillLogService.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(final InvocationOnMock invocation) throws Throwable {
logger.info(Arrays.toString(invocation.getArguments()));
return null;
}
}).when(osgiKillbillLogService).log(Mockito.anyInt(), Mockito.anyString());
invoiceFactory = new BusinessInvoiceFactory(BusinessExecutor.newCachedThreadPool(osgiConfigPropertiesService));
}
@Test(groups = "fast")
public void testRevenueRecognizableClassicAccountCredit() throws Exception {
final UUID invoiceId = UUID.randomUUID();
// Classic account credit ($10), from the perspective of the CREDIT_ADJ item
final BusinessInvoiceItemBaseModelDao businessCreditAdjItem = invoiceFactory.createBusinessInvoiceItem(account,
invoice,
createInvoiceItem(invoiceId, InvoiceItemType.CREDIT_ADJ, new BigDecimal("-10")),
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.CBA_ADJ, new BigDecimal("10"))),
null,
null,
null,
invoiceItemRecordId,
currencyConverter,
auditLog,
accountRecordId,
tenantRecordId,
reportGroup);
// We ignore these
Assert.assertNull(businessCreditAdjItem);
// Classic account credit ($10), from the perspective of the CBA_ADJ item
final BusinessInvoiceItemBaseModelDao businessCreditItem = invoiceFactory.createBusinessInvoiceItem(account,
invoice,
createInvoiceItem(invoiceId, InvoiceItemType.CBA_ADJ, new BigDecimal("10")),
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.CREDIT_ADJ, new BigDecimal("-10"))),
null,
null,
null,
invoiceItemRecordId,
currencyConverter,
auditLog,
accountRecordId,
tenantRecordId,
reportGroup);
// We treat these as NOT recognizable account credits
Assert.assertEquals(businessCreditItem.getAmount().compareTo(new BigDecimal("10")), 0);
Assert.assertEquals(businessCreditItem.getItemType(), InvoiceItemType.CBA_ADJ.toString());
Assert.assertEquals(businessCreditItem.getItemSource(), ItemSource.user.toString());
// Invoice adjustment, not to be mixed with credits!
final BusinessInvoiceItemBaseModelDao businessInvoiceAdjustmentItem = invoiceFactory.createBusinessInvoiceItem(account,
invoice,
createInvoiceItem(invoiceId, InvoiceItemType.CREDIT_ADJ, new BigDecimal("-10")),
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.RECURRING, new BigDecimal("10"))),
null,
null,
null,
invoiceItemRecordId,
currencyConverter,
auditLog,
accountRecordId,
tenantRecordId,
reportGroup);
Assert.assertEquals(businessInvoiceAdjustmentItem.getAmount().compareTo(new BigDecimal("-10")), 0);
Assert.assertEquals(businessInvoiceAdjustmentItem.getItemType(), InvoiceItemType.CREDIT_ADJ.toString());
Assert.assertEquals(businessInvoiceAdjustmentItem.getItemSource(), ItemSource.user.toString());
// Invoice adjustment via refund
final BusinessInvoiceItemBaseModelDao businessRefundInvoiceAdjustmentItem = invoiceFactory.createBusinessInvoiceItem(account,
invoice,
createInvoiceItem(invoiceId, InvoiceItemType.REFUND_ADJ, new BigDecimal("-10")),
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.RECURRING, new BigDecimal("10"))),
null,
null,
null,
invoiceItemRecordId,
currencyConverter,
auditLog,
accountRecordId,
tenantRecordId,
reportGroup);
Assert.assertEquals(businessRefundInvoiceAdjustmentItem.getAmount().compareTo(new BigDecimal("-10")), 0);
Assert.assertEquals(businessRefundInvoiceAdjustmentItem.getItemType(), InvoiceItemType.REFUND_ADJ.toString());
Assert.assertEquals(businessRefundInvoiceAdjustmentItem.getItemSource(), ItemSource.user.toString());
// Item adjustment
final BusinessInvoiceItemBaseModelDao businessInvoiceItemAdjustmentItem = invoiceFactory.createBusinessInvoiceItem(account,
invoice,
createInvoiceItem(invoiceId, InvoiceItemType.ITEM_ADJ, new BigDecimal("-10")),
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.RECURRING, new BigDecimal("10"))),
null,
null,
null,
invoiceItemRecordId,
currencyConverter,
auditLog,
accountRecordId,
tenantRecordId,
reportGroup);
Assert.assertEquals(businessInvoiceItemAdjustmentItem.getAmount().compareTo(new BigDecimal("-10")), 0);
Assert.assertEquals(businessInvoiceItemAdjustmentItem.getItemType(), InvoiceItemType.ITEM_ADJ.toString());
Assert.assertEquals(businessInvoiceItemAdjustmentItem.getItemSource(), ItemSource.user.toString());
// System generated account credit
final BusinessInvoiceItemBaseModelDao businessCBAItem = invoiceFactory.createBusinessInvoiceItem(account,
invoice,
createInvoiceItem(invoiceId, InvoiceItemType.CBA_ADJ, new BigDecimal("10")),
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.RECURRING, new BigDecimal("30")),
createInvoiceItem(invoiceId, InvoiceItemType.REPAIR_ADJ, new BigDecimal("-30")),
createInvoiceItem(invoiceId, InvoiceItemType.RECURRING, new BigDecimal("20"))),
null,
null,
null,
invoiceItemRecordId,
currencyConverter,
auditLog,
accountRecordId,
tenantRecordId,
reportGroup
);
Assert.assertEquals(businessCBAItem.getAmount().compareTo(new BigDecimal("10")), 0);
Assert.assertEquals(businessCBAItem.getItemType(), InvoiceItemType.CBA_ADJ.toString());
Assert.assertEquals(businessCBAItem.getItemSource(), BusinessInvoiceItemBaseModelDao.DEFAULT_ITEM_SOURCE);
}
@Test(groups = "fast")
public void testInvoiceAdjustment() throws Exception {
final UUID invoiceId = UUID.randomUUID();
Assert.assertFalse(BusinessInvoiceUtils.isInvoiceAdjustmentItem(createInvoiceItem(invoiceId, InvoiceItemType.RECURRING),
ImmutableList.<InvoiceItem>of()));
Assert.assertTrue(BusinessInvoiceUtils.isInvoiceAdjustmentItem(createInvoiceItem(invoiceId, InvoiceItemType.REFUND_ADJ),
ImmutableList.<InvoiceItem>of()));
final InvoiceItem creditAdj = createInvoiceItem(invoiceId, InvoiceItemType.CREDIT_ADJ);
// Account credit
Assert.assertFalse(BusinessInvoiceUtils.isInvoiceAdjustmentItem(creditAdj,
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.CBA_ADJ, creditAdj.getAmount().negate()))));
Assert.assertTrue(BusinessInvoiceUtils.isInvoiceAdjustmentItem(creditAdj,
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.CBA_ADJ, creditAdj.getAmount().negate().add(BigDecimal.ONE)))));
Assert.assertTrue(BusinessInvoiceUtils.isInvoiceAdjustmentItem(creditAdj,
ImmutableList.<InvoiceItem>of(createInvoiceItem(invoiceId, InvoiceItemType.RECURRING),
createInvoiceItem(invoiceId, InvoiceItemType.CBA_ADJ, creditAdj.getAmount().negate()))
));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.http.HttpChannel;
import org.elasticsearch.http.HttpRequest;
import org.elasticsearch.http.HttpResponse;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestStatus;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FakeRestRequest extends RestRequest {
public FakeRestRequest() {
this(NamedXContentRegistry.EMPTY, new FakeHttpRequest(Method.GET, "", BytesArray.EMPTY, new HashMap<>()), new HashMap<>(),
new FakeHttpChannel(null));
}
private FakeRestRequest(NamedXContentRegistry xContentRegistry, HttpRequest httpRequest, Map<String, String> params,
HttpChannel httpChannel) {
super(xContentRegistry, params, httpRequest.uri(), httpRequest.getHeaders(), httpRequest, httpChannel);
}
private static class FakeHttpRequest implements HttpRequest {
private final Method method;
private final String uri;
private final BytesReference content;
private final Map<String, List<String>> headers;
private final Exception inboundException;
private FakeHttpRequest(Method method, String uri, BytesReference content, Map<String, List<String>> headers) {
this(method, uri, content, headers, null);
}
private FakeHttpRequest(Method method, String uri, BytesReference content, Map<String, List<String>> headers,
Exception inboundException) {
this.method = method;
this.uri = uri;
this.content = content;
this.headers = headers;
this.inboundException = inboundException;
}
@Override
public Method method() {
return method;
}
@Override
public String uri() {
return uri;
}
@Override
public BytesReference content() {
return content;
}
@Override
public Map<String, List<String>> getHeaders() {
return headers;
}
@Override
public List<String> strictCookies() {
return Collections.emptyList();
}
@Override
public HttpVersion protocolVersion() {
return HttpVersion.HTTP_1_1;
}
@Override
public HttpRequest removeHeader(String header) {
headers.remove(header);
return this;
}
@Override
public HttpResponse createResponse(RestStatus status, BytesReference content) {
Map<String, String> headers = new HashMap<>();
return new HttpResponse() {
@Override
public void addHeader(String name, String value) {
headers.put(name, value);
}
@Override
public boolean containsHeader(String name) {
return headers.containsKey(name);
}
};
}
@Override
public void release() {
}
@Override
public HttpRequest releaseAndCopy() {
return this;
}
@Override
public Exception getInboundException() {
return inboundException;
}
}
private static class FakeHttpChannel implements HttpChannel {
private final InetSocketAddress remoteAddress;
private FakeHttpChannel(InetSocketAddress remoteAddress) {
this.remoteAddress = remoteAddress;
}
@Override
public void sendResponse(HttpResponse response, ActionListener<Void> listener) {
}
@Override
public InetSocketAddress getLocalAddress() {
return null;
}
@Override
public InetSocketAddress getRemoteAddress() {
return remoteAddress;
}
@Override
public void addCloseListener(ActionListener<Void> listener) {
}
@Override
public boolean isOpen() {
return true;
}
@Override
public void close() {
}
}
public static class Builder {
private final NamedXContentRegistry xContentRegistry;
private Map<String, List<String>> headers = new HashMap<>();
private Map<String, String> params = new HashMap<>();
private BytesReference content = BytesArray.EMPTY;
private String path = "/";
private Method method = Method.GET;
private InetSocketAddress address = null;
private Exception inboundException;
public Builder(NamedXContentRegistry xContentRegistry) {
this.xContentRegistry = xContentRegistry;
}
public Builder withHeaders(Map<String, List<String>> headers) {
this.headers = headers;
return this;
}
public Builder withParams(Map<String, String> params) {
this.params = params;
return this;
}
public Builder withContent(BytesReference content, XContentType xContentType) {
this.content = content;
if (xContentType != null) {
headers.put("Content-Type", Collections.singletonList(xContentType.mediaType()));
}
return this;
}
public Builder withPath(String path) {
this.path = path;
return this;
}
public Builder withMethod(Method method) {
this.method = method;
return this;
}
public Builder withRemoteAddress(InetSocketAddress address) {
this.address = address;
return this;
}
public Builder withInboundException(Exception exception) {
this.inboundException = exception;
return this;
}
public FakeRestRequest build() {
FakeHttpRequest fakeHttpRequest = new FakeHttpRequest(method, path, content, headers, inboundException);
return new FakeRestRequest(xContentRegistry, fakeHttpRequest, params, new FakeHttpChannel(address));
}
}
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.integration;
import org.ehcache.Cache;
import org.ehcache.CacheManager;
import org.ehcache.core.EhcacheWithLoaderWriter;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.builders.CacheManagerBuilder;
import org.ehcache.config.units.EntryUnit;
import org.ehcache.config.units.MemoryUnit;
import org.ehcache.event.CacheEvent;
import org.ehcache.event.CacheEventListener;
import org.ehcache.event.EventFiring;
import org.ehcache.event.EventOrdering;
import org.ehcache.event.EventType;
import org.ehcache.expiry.Duration;
import org.ehcache.expiry.Expirations;
import org.ehcache.impl.internal.TimeSourceConfiguration;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import static org.ehcache.config.builders.CacheConfigurationBuilder.newCacheConfigurationBuilder;
import static org.ehcache.config.builders.ResourcePoolsBuilder.heap;
import static org.ehcache.config.builders.ResourcePoolsBuilder.newResourcePoolsBuilder;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class EventNotificationTest {
private static final TestTimeSource testTimeSource = new TestTimeSource();
Listener listener1 = new Listener();
Listener listener2 = new Listener();
Listener listener3 = new Listener();
AsynchronousListener asyncListener = new AsynchronousListener();
@Test
public void testNotificationForCacheOperations() throws InterruptedException {
CacheConfiguration<Long, String> cacheConfiguration = newCacheConfigurationBuilder(Long.class, String.class, heap(5)).build();
CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration)
.build(true);
Cache<Long, String> cache = cacheManager.getCache("cache", Long.class, String.class);
cache.getRuntimeConfiguration().registerCacheEventListener(listener1, EventOrdering.UNORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.EVICTED, EventType.CREATED, EventType.UPDATED, EventType.REMOVED));
cache.getRuntimeConfiguration().registerCacheEventListener(listener2, EventOrdering.UNORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.EVICTED, EventType.CREATED, EventType.UPDATED, EventType.REMOVED));
cache.put(1L, "1");
assertEquals(1, listener1.created.get());
assertEquals(0, listener1.updated.get());
assertEquals(0, listener1.removed.get());
assertEquals(1, listener2.created.get());
assertEquals(0, listener2.updated.get());
assertEquals(0, listener2.removed.get());
Map<Long, String> entries = new HashMap<Long, String>();
entries.put(2L, "2");
entries.put(3L, "3");
cache.putAll(entries);
assertEquals(3, listener1.created.get());
assertEquals(0, listener1.updated.get());
assertEquals(0, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(0, listener2.updated.get());
assertEquals(0, listener2.removed.get());
cache.put(1L, "01");
assertEquals(3, listener1.created.get());
assertEquals(1, listener1.updated.get());
assertEquals(0, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(1, listener2.updated.get());
assertEquals(0, listener2.removed.get());
cache.remove(2L);
assertEquals(3, listener1.created.get());
assertEquals(1, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(1, listener2.updated.get());
assertEquals(1, listener2.removed.get());
cache.replace(1L, "001");
assertEquals(3, listener1.created.get());
assertEquals(2, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(2, listener2.updated.get());
assertEquals(1, listener2.removed.get());
cache.replace(3L, "3", "03");
assertEquals(3, listener1.created.get());
assertEquals(3, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(3, listener2.updated.get());
assertEquals(1, listener2.removed.get());
cache.get(1L);
assertEquals(3, listener1.created.get());
assertEquals(3, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(3, listener2.updated.get());
assertEquals(1, listener2.removed.get());
cache.containsKey(1L);
assertEquals(3, listener1.created.get());
assertEquals(3, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(3, listener2.updated.get());
assertEquals(1, listener2.removed.get());
cache.put(1L, "0001");
assertEquals(3, listener1.created.get());
assertEquals(4, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(4, listener2.updated.get());
assertEquals(1, listener2.removed.get());
Set<Long> keys = new HashSet<Long>();
keys.add(1L);
cache.getAll(keys);
assertEquals(3, listener1.created.get());
assertEquals(4, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(4, listener2.updated.get());
assertEquals(1, listener2.removed.get());
cache.getRuntimeConfiguration().registerCacheEventListener(listener3, EventOrdering.ORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.EVICTED, EventType.CREATED, EventType.UPDATED, EventType.REMOVED));
cache.replace(1L, "00001");
assertEquals(3, listener1.created.get());
assertEquals(5, listener1.updated.get());
assertEquals(1, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(5, listener2.updated.get());
assertEquals(1, listener2.removed.get());
assertEquals(0, listener3.created.get());
assertEquals(1, listener3.updated.get());
assertEquals(0, listener3.removed.get());
cache.remove(1L);
assertEquals(3, listener1.created.get());
assertEquals(5, listener1.updated.get());
assertEquals(2, listener1.removed.get());
assertEquals(3, listener2.created.get());
assertEquals(5, listener2.updated.get());
assertEquals(2, listener2.removed.get());
assertEquals(0, listener3.created.get());
assertEquals(1, listener3.updated.get());
assertEquals(1, listener3.removed.get());
asyncListener.resetLatchCount(10);
cache.getRuntimeConfiguration().registerCacheEventListener(asyncListener, EventOrdering.ORDERED, EventFiring.ASYNCHRONOUS, EnumSet
.of(EventType.EVICTED, EventType.CREATED, EventType.UPDATED, EventType.REMOVED));
entries.clear();
entries.put(4L, "4");
entries.put(5L, "5");
entries.put(6L, "6");
entries.put(7L, "7");
entries.put(8L, "8");
entries.put(9L, "9");
entries.put(10L, "10");
cache.putAll(entries);
asyncListener.latch.await();
cacheManager.close();
assertEquals(10, listener1.created.get());
assertEquals(5, listener1.updated.get());
assertEquals(2, listener1.removed.get());
assertEquals(3, listener1.evicted.get());
assertEquals(10, listener2.created.get());
assertEquals(5, listener2.updated.get());
assertEquals(2, listener2.removed.get());
assertEquals(3, listener2.evicted.get());
assertEquals(7, listener3.created.get());
assertEquals(1, listener3.updated.get());
assertEquals(1, listener3.removed.get());
assertEquals(3, listener3.evicted.get());
assertEquals(7, asyncListener.created.get());
assertEquals(3, asyncListener.evicted.get());
assertEquals(0, asyncListener.removed.get());
assertEquals(0, asyncListener.updated.get());
}
@Test
public void testEventOrderForUpdateThatTriggersEviction () {
CacheConfiguration<Long, SerializableObject> cacheConfiguration = newCacheConfigurationBuilder(Long.class, SerializableObject.class,
newResourcePoolsBuilder()
.heap(1L, EntryUnit.ENTRIES).offheap(1l, MemoryUnit.MB).build()).build();
CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration)
.build(true);
Cache<Long, SerializableObject> cache = cacheManager.getCache("cache", Long.class, SerializableObject.class);
cache.getRuntimeConfiguration().registerCacheEventListener(listener1, EventOrdering.ORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.EVICTED, EventType.CREATED, EventType.UPDATED, EventType.REMOVED));
SerializableObject object1 = new SerializableObject(0xAAE60); // 700 KB
SerializableObject object2 = new SerializableObject(0xDBBA0); // 900 KB
cache.put(1L, object1);
cache.put(1L, object2);
assertThat(listener1.eventTypeHashMap.get(EventType.EVICTED), lessThan(listener1.eventTypeHashMap.get(EventType.CREATED)));
cacheManager.close();
}
@Test
public void testEventFiringInCacheIterator() {
Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EventNotificationTest");
CacheConfiguration<Long, String> cacheConfiguration = newCacheConfigurationBuilder(Long.class, String.class,
newResourcePoolsBuilder()
.heap(5L, EntryUnit.ENTRIES).build())
.withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)))
.build();
CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration)
.using(new TimeSourceConfiguration(testTimeSource))
.build(true);
testTimeSource.setTimeMillis(0);
Cache<Long, String> cache = cacheManager.getCache("cache", Long.class, String.class);
cache.getRuntimeConfiguration().registerCacheEventListener(listener1, EventOrdering.UNORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.EXPIRED));
cache.put(1L, "1");
cache.put(2L, "2");
cache.put(3L, "3");
cache.put(4L, "4");
cache.put(5L, "5");
assertThat(listener1.expired.get(), is(0));
for(Cache.Entry entry : cache) {
logger.info("Iterating over key : ", entry.getKey());
}
testTimeSource.setTimeMillis(2000);
for(Cache.Entry entry : cache) {
logger.info("Iterating over key : ", entry.getKey());
}
cacheManager.close();
assertThat(listener1.expired.get(), is(5));
}
@Test
public void testMultiThreadedSyncAsyncNotifications() throws InterruptedException {
AsynchronousListener asyncListener = new AsynchronousListener();
asyncListener.resetLatchCount(100);
CacheConfiguration<Number, Number> cacheConfiguration = newCacheConfigurationBuilder(Number.class, Number.class,
newResourcePoolsBuilder().heap(10L, EntryUnit.ENTRIES))
.withExpiry(Expirations.timeToLiveExpiration(new Duration(1, TimeUnit.SECONDS)))
.build();
CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration)
.using(new TimeSourceConfiguration(testTimeSource))
.build(true);
testTimeSource.setTimeMillis(0);
Cache<Number, Number> cache = cacheManager.getCache("cache", Number.class, Number.class);
cache.getRuntimeConfiguration().registerCacheEventListener(asyncListener, EventOrdering.UNORDERED, EventFiring.ASYNCHRONOUS, EnumSet
.of(EventType.CREATED, EventType.EXPIRED));
cache.getRuntimeConfiguration().registerCacheEventListener(listener1, EventOrdering.ORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.CREATED, EventType.EXPIRED));
Thread[] operators = new Thread[10];
for (int i = 0; i < 10; i++) {
operators[i] = new Thread(new CachePutOperator(cache, i), "CACHE-PUT-OPERATOR_" + i);
operators[i].start();
}
for (int i = 0; i < 10; i++) {
operators[i].join();
}
int entryCount = 0;
for (Cache.Entry<Number, Number> entry : cache) {
entryCount++;
}
testTimeSource.setTimeMillis(2000);
operators = new Thread[10];
for (int i = 0; i < 10; i++) {
operators[i] = new Thread(new CacheGetOperator(cache, i), "CACHE-GET-OPERATOR_" + i);
operators[i].start();
}
for (int i = 0; i < 10; i++) {
operators[i].join();
}
cacheManager.close();
assertEquals(100, listener1.created.get());
assertEquals(entryCount, listener1.expired.get());
assertEquals(100, asyncListener.created.get());
assertEquals(entryCount, asyncListener.expired.get());
}
@Test
public void testMultiThreadedSyncAsyncNotificationsWithOffheap() throws InterruptedException {
AsynchronousListener asyncListener = new AsynchronousListener();
asyncListener.resetLatchCount(100);
CacheConfiguration<Number, Number> cacheConfiguration = newCacheConfigurationBuilder(Number.class, Number.class,
newResourcePoolsBuilder()
.heap(10L, EntryUnit.ENTRIES).offheap(10, MemoryUnit.MB))
.build();
CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration)
.build(true);
Cache<Number, Number> cache = cacheManager.getCache("cache", Number.class, Number.class);
cache.getRuntimeConfiguration().registerCacheEventListener(asyncListener, EventOrdering.UNORDERED, EventFiring.ASYNCHRONOUS, EnumSet
.of(EventType.CREATED, EventType.EXPIRED));
cache.getRuntimeConfiguration().registerCacheEventListener(listener1, EventOrdering.ORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.CREATED, EventType.EXPIRED));
Thread[] operators = new Thread[10];
for (int i = 0; i < 10; i++) {
operators[i] = new Thread(new CachePutOperator(cache, i), "CACHE-PUT-OPERATOR_" + i);
operators[i].start();
}
for (int i = 0; i < 10; i++) {
operators[i].join();
}
cacheManager.close();
assertEquals(100, listener1.created.get());
assertEquals(100, asyncListener.created.get());
}
@Test
public void testMultiThreadedSyncNotifications() throws InterruptedException {
CacheConfiguration<Number, Number> cacheConfiguration = newCacheConfigurationBuilder(Number.class, Number.class,
newResourcePoolsBuilder()
.heap(10L, EntryUnit.ENTRIES))
.build();
CacheManager cacheManager = CacheManagerBuilder.newCacheManagerBuilder().withCache("cache", cacheConfiguration)
.build(true);
Cache<Number, Number> cache = cacheManager.getCache("cache", Number.class, Number.class);
cache.getRuntimeConfiguration()
.registerCacheEventListener(listener1, EventOrdering.UNORDERED, EventFiring.SYNCHRONOUS, EnumSet
.of(EventType.CREATED, EventType.EVICTED));
Thread[] operators = new Thread[10];
for (int i = 0; i < 10; i++) {
operators[i] = new Thread(new CachePutOperator(cache, i), "CACHE-PUT-OPERATOR_" + i);
operators[i].start();
}
for (int i = 0; i < 10; i++) {
operators[i].join();
}
int entryCount = 0;
for (Cache.Entry<Number, Number> entry : cache) {
entryCount++;
}
cacheManager.close();
assertEquals(100, listener1.created.get());
assertEquals(100 - entryCount, listener1.evicted.get());
}
public static class Listener implements CacheEventListener<Object, Object> {
private AtomicInteger evicted = new AtomicInteger();
private AtomicInteger created = new AtomicInteger();
private AtomicInteger updated = new AtomicInteger();
private AtomicInteger removed = new AtomicInteger();
private AtomicInteger expired = new AtomicInteger();
private AtomicInteger eventCounter = new AtomicInteger();
private HashMap<EventType, Integer> eventTypeHashMap = new HashMap<EventType, Integer>();
@Override
public void onEvent(CacheEvent<? extends Object, ? extends Object> event) {
Logger logger = LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + "EventNotificationTest");
logger.info(event.getType().toString());
eventTypeHashMap.put(event.getType(), eventCounter.get());
eventCounter.getAndIncrement();
if(event.getType() == EventType.EVICTED){
evicted.getAndIncrement();
}
if(event.getType() == EventType.CREATED){
created.getAndIncrement();
}
if(event.getType() == EventType.UPDATED){
updated.getAndIncrement();
}
if(event.getType() == EventType.REMOVED){
removed.getAndIncrement();
}
if(event.getType() == EventType.EXPIRED){
expired.getAndIncrement();
}
}
}
public static class AsynchronousListener implements CacheEventListener<Object, Object> {
private AtomicInteger evicted = new AtomicInteger();
private AtomicInteger created = new AtomicInteger();
private AtomicInteger updated = new AtomicInteger();
private AtomicInteger removed = new AtomicInteger();
private AtomicInteger expired = new AtomicInteger();
private CountDownLatch latch;
private void resetLatchCount(int operations) {
this.latch = new CountDownLatch(operations);
}
@Override
public void onEvent(final CacheEvent<? extends Object, ? extends Object> event) {
Logger logger = LoggerFactory.getLogger(EventNotificationTest.class + "-" + "EventNotificationTest");
logger.info(event.getType().toString());
if(event.getType() == EventType.EVICTED){
evicted.getAndIncrement();
}
if(event.getType() == EventType.CREATED){
created.getAndIncrement();
}
if(event.getType() == EventType.UPDATED){
updated.getAndIncrement();
}
if(event.getType() == EventType.REMOVED){
removed.getAndIncrement();
}
if(event.getType() == EventType.EXPIRED){
expired.getAndIncrement();
}
latch.countDown();
}
}
public static class SerializableObject implements Serializable {
private int size;
private Byte [] data;
SerializableObject(int size) {
this.size = size;
this.data = new Byte[size];
}
}
private static class CachePutOperator implements Runnable {
Logger logger = LoggerFactory.getLogger(EventNotificationTest.class + "-" + "EventNotificationTest");
Cache<Number, Number> cache;
int number;
CachePutOperator(Cache<Number, Number> cache, int number) {
this.cache = cache;
this.number = number * 100;
}
@Override
public void run() {
for (int i = number; i < number + 10; i++) {
cache.put(i , i);
logger.info(Thread.currentThread().getName() + " putting " + i);
}
}
}
private static class CacheGetOperator implements Runnable {
Cache<Number, Number> cache;
int number;
CacheGetOperator(Cache<Number, Number> cache, int number) {
this.cache = cache;
this.number = number * 100;
}
@Override
public void run() {
for (int i = number; i < number + 10; i++) {
cache.get(i);
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.optimizations;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import io.prestosql.Session;
import io.prestosql.metadata.Metadata;
import io.prestosql.metadata.TableProperties;
import io.prestosql.spi.connector.ColumnHandle;
import io.prestosql.spi.connector.LocalProperty;
import io.prestosql.sql.planner.Partitioning.ArgumentBinding;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.planner.TypeAnalyzer;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.plan.AggregationNode;
import io.prestosql.sql.planner.plan.ApplyNode;
import io.prestosql.sql.planner.plan.AssignUniqueId;
import io.prestosql.sql.planner.plan.CorrelatedJoinNode;
import io.prestosql.sql.planner.plan.DeleteNode;
import io.prestosql.sql.planner.plan.DistinctLimitNode;
import io.prestosql.sql.planner.plan.EnforceSingleRowNode;
import io.prestosql.sql.planner.plan.ExchangeNode;
import io.prestosql.sql.planner.plan.ExplainAnalyzeNode;
import io.prestosql.sql.planner.plan.FilterNode;
import io.prestosql.sql.planner.plan.GroupIdNode;
import io.prestosql.sql.planner.plan.IndexJoinNode;
import io.prestosql.sql.planner.plan.IndexSourceNode;
import io.prestosql.sql.planner.plan.JoinNode;
import io.prestosql.sql.planner.plan.LimitNode;
import io.prestosql.sql.planner.plan.MarkDistinctNode;
import io.prestosql.sql.planner.plan.OutputNode;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.PlanVisitor;
import io.prestosql.sql.planner.plan.ProjectNode;
import io.prestosql.sql.planner.plan.RowNumberNode;
import io.prestosql.sql.planner.plan.SampleNode;
import io.prestosql.sql.planner.plan.SemiJoinNode;
import io.prestosql.sql.planner.plan.SortNode;
import io.prestosql.sql.planner.plan.SpatialJoinNode;
import io.prestosql.sql.planner.plan.StatisticsWriterNode;
import io.prestosql.sql.planner.plan.TableDeleteNode;
import io.prestosql.sql.planner.plan.TableFinishNode;
import io.prestosql.sql.planner.plan.TableScanNode;
import io.prestosql.sql.planner.plan.TableWriterNode;
import io.prestosql.sql.planner.plan.TopNNode;
import io.prestosql.sql.planner.plan.TopNRowNumberNode;
import io.prestosql.sql.planner.plan.UnionNode;
import io.prestosql.sql.planner.plan.UnnestNode;
import io.prestosql.sql.planner.plan.ValuesNode;
import io.prestosql.sql.planner.plan.WindowNode;
import io.prestosql.sql.tree.Expression;
import io.prestosql.sql.tree.SymbolReference;
import javax.annotation.concurrent.Immutable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static io.prestosql.SystemSessionProperties.isSpillEnabled;
import static io.prestosql.spi.predicate.TupleDomain.extractFixedValues;
import static io.prestosql.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION;
import static io.prestosql.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.FIXED;
import static io.prestosql.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.MULTIPLE;
import static io.prestosql.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.SINGLE;
import static io.prestosql.sql.planner.plan.ExchangeNode.Scope.REMOTE;
import static java.util.Objects.requireNonNull;
public final class StreamPropertyDerivations
{
private StreamPropertyDerivations() {}
public static StreamProperties derivePropertiesRecursively(PlanNode node, Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
List<StreamProperties> inputProperties = node.getSources().stream()
.map(source -> derivePropertiesRecursively(source, metadata, session, types, typeAnalyzer))
.collect(toImmutableList());
return StreamPropertyDerivations.deriveProperties(node, inputProperties, metadata, session, types, typeAnalyzer);
}
public static StreamProperties deriveProperties(PlanNode node, StreamProperties inputProperties, Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
return deriveProperties(node, ImmutableList.of(inputProperties), metadata, session, types, typeAnalyzer);
}
public static StreamProperties deriveProperties(PlanNode node, List<StreamProperties> inputProperties, Metadata metadata, Session session, TypeProvider types, TypeAnalyzer typeAnalyzer)
{
requireNonNull(node, "node is null");
requireNonNull(inputProperties, "inputProperties is null");
requireNonNull(metadata, "metadata is null");
requireNonNull(session, "session is null");
requireNonNull(types, "types is null");
requireNonNull(typeAnalyzer, "typeAnalyzer is null");
// properties.otherActualProperties will never be null here because the only way
// an external caller should obtain StreamProperties is from this method, and the
// last line of this method assures otherActualProperties is set.
ActualProperties otherProperties = PropertyDerivations.streamBackdoorDeriveProperties(
node,
inputProperties.stream()
.map(properties -> properties.otherActualProperties)
.collect(toImmutableList()),
metadata,
session,
types,
typeAnalyzer);
StreamProperties result = node.accept(new Visitor(metadata, session), inputProperties)
.withOtherActualProperties(otherProperties);
result.getPartitioningColumns().ifPresent(columns ->
verify(node.getOutputSymbols().containsAll(columns), "Stream-level partitioning properties contain columns not present in node's output"));
Set<Symbol> localPropertyColumns = result.getLocalProperties().stream()
.flatMap(property -> property.getColumns().stream())
.collect(Collectors.toSet());
verify(node.getOutputSymbols().containsAll(localPropertyColumns), "Stream-level local properties contain columns not present in node's output");
return result;
}
private static class Visitor
extends PlanVisitor<StreamProperties, List<StreamProperties>>
{
private final Metadata metadata;
private final Session session;
private Visitor(Metadata metadata, Session session)
{
this.metadata = metadata;
this.session = session;
}
@Override
protected StreamProperties visitPlan(PlanNode node, List<StreamProperties> inputProperties)
{
throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName());
}
//
// Joins
//
@Override
public StreamProperties visitJoin(JoinNode node, List<StreamProperties> inputProperties)
{
StreamProperties leftProperties = inputProperties.get(0);
boolean unordered = spillPossible(session, node);
switch (node.getType()) {
case INNER:
return leftProperties
.translate(column -> PropertyDerivations.filterOrRewrite(node.getOutputSymbols(), node.getCriteria(), column))
.unordered(unordered);
case LEFT:
return leftProperties
.translate(column -> PropertyDerivations.filterIfMissing(node.getOutputSymbols(), column))
.unordered(unordered);
case RIGHT:
// since this is a right join, none of the matched output rows will contain nulls
// in the left partitioning columns, and all of the unmatched rows will have
// null for all left columns. therefore, the output is still partitioned on the
// left columns. the only change is there will be at least two streams so the
// output is multiple
// There is one exception to this. If the left is partitioned on empty set, we
// we can't say that the output is partitioned on empty set, but we can say that
// it is partitioned on the left join symbols
// todo do something smarter after https://github.com/prestodb/presto/pull/5877 is merged
return new StreamProperties(MULTIPLE, Optional.empty(), false);
case FULL:
// the left can contain nulls in any stream so we can't say anything about the
// partitioning, and nulls from the right are produced from a extra new stream
// so we will always have multiple streams.
return new StreamProperties(MULTIPLE, Optional.empty(), false);
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
}
private static boolean spillPossible(Session session, JoinNode node)
{
return isSpillEnabled(session) && node.isSpillable().orElseThrow(() -> new IllegalArgumentException("spillable not yet set"));
}
@Override
public StreamProperties visitSpatialJoin(SpatialJoinNode node, List<StreamProperties> inputProperties)
{
StreamProperties leftProperties = inputProperties.get(0);
switch (node.getType()) {
case INNER:
case LEFT:
return leftProperties.translate(column -> PropertyDerivations.filterIfMissing(node.getOutputSymbols(), column));
default:
throw new IllegalArgumentException("Unsupported spatial join type: " + node.getType());
}
}
@Override
public StreamProperties visitIndexJoin(IndexJoinNode node, List<StreamProperties> inputProperties)
{
StreamProperties probeProperties = inputProperties.get(0);
switch (node.getType()) {
case INNER:
return probeProperties;
case SOURCE_OUTER:
// the probe can contain nulls in any stream so we can't say anything about the
// partitioning but the other properties of the probe will be maintained.
return probeProperties.withUnspecifiedPartitioning();
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
}
//
// Source nodes
//
@Override
public StreamProperties visitValues(ValuesNode node, List<StreamProperties> context)
{
// values always produces a single stream
return StreamProperties.singleStream();
}
@Override
public StreamProperties visitTableScan(TableScanNode node, List<StreamProperties> inputProperties)
{
TableProperties layout = metadata.getTableProperties(session, node.getTable());
Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse();
// Globally constant assignments
Set<ColumnHandle> constants = new HashSet<>();
extractFixedValues(layout.getPredicate())
.orElse(ImmutableMap.of())
.entrySet().stream()
.filter(entry -> !entry.getValue().isNull()) // TODO consider allowing nulls
.forEach(entry -> constants.add(entry.getKey()));
Optional<Set<Symbol>> streamPartitionSymbols = layout.getStreamPartitioningColumns()
.flatMap(columns -> getNonConstantSymbols(columns, assignments, constants));
// if we are partitioned on empty set, we must say multiple of unknown partitioning, because
// the connector does not guarantee a single split in this case (since it might not understand
// that the value is a constant).
if (streamPartitionSymbols.isPresent() && streamPartitionSymbols.get().isEmpty()) {
return new StreamProperties(MULTIPLE, Optional.empty(), false);
}
return new StreamProperties(MULTIPLE, streamPartitionSymbols, false);
}
private static Optional<Set<Symbol>> getNonConstantSymbols(Set<ColumnHandle> columnHandles, Map<ColumnHandle, Symbol> assignments, Set<ColumnHandle> globalConstants)
{
// Strip off the constants from the partitioning columns (since those are not required for translation)
Set<ColumnHandle> constantsStrippedPartitionColumns = columnHandles.stream()
.filter(column -> !globalConstants.contains(column))
.collect(toImmutableSet());
ImmutableSet.Builder<Symbol> builder = ImmutableSet.builder();
for (ColumnHandle column : constantsStrippedPartitionColumns) {
Symbol translated = assignments.get(column);
if (translated == null) {
return Optional.empty();
}
builder.add(translated);
}
return Optional.of(builder.build());
}
@Override
public StreamProperties visitExchange(ExchangeNode node, List<StreamProperties> inputProperties)
{
if (node.getOrderingScheme().isPresent()) {
return StreamProperties.ordered();
}
if (node.getScope() == REMOTE) {
// TODO: correctly determine if stream is parallelised
// based on session properties
return StreamProperties.fixedStreams();
}
switch (node.getType()) {
case GATHER:
return StreamProperties.singleStream();
case REPARTITION:
if (node.getPartitioningScheme().getPartitioning().getHandle().equals(FIXED_ARBITRARY_DISTRIBUTION)) {
return new StreamProperties(FIXED, Optional.empty(), false);
}
return new StreamProperties(
FIXED,
Optional.of(node.getPartitioningScheme().getPartitioning().getArguments().stream()
.map(ArgumentBinding::getColumn)
.collect(toImmutableList())), false);
case REPLICATE:
return new StreamProperties(MULTIPLE, Optional.empty(), false);
}
throw new UnsupportedOperationException("not yet implemented");
}
//
// Nodes that rewrite and/or drop symbols
//
@Override
public StreamProperties visitProject(ProjectNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// We can describe properties in terms of inputs that are projected unmodified (i.e., identity projections)
Map<Symbol, Symbol> identities = computeIdentityTranslations(node.getAssignments().getMap());
return properties.translate(column -> Optional.ofNullable(identities.get(column)));
}
private static Map<Symbol, Symbol> computeIdentityTranslations(Map<Symbol, Expression> assignments)
{
Map<Symbol, Symbol> inputToOutput = new HashMap<>();
for (Map.Entry<Symbol, Expression> assignment : assignments.entrySet()) {
if (assignment.getValue() instanceof SymbolReference) {
inputToOutput.put(Symbol.from(assignment.getValue()), assignment.getKey());
}
}
return inputToOutput;
}
@Override
public StreamProperties visitGroupId(GroupIdNode node, List<StreamProperties> inputProperties)
{
Map<Symbol, Symbol> inputToOutputMappings = new HashMap<>();
for (Map.Entry<Symbol, Symbol> setMapping : node.getGroupingColumns().entrySet()) {
if (node.getCommonGroupingColumns().contains(setMapping.getKey())) {
// TODO: Add support for translating a property on a single column to multiple columns
// when GroupIdNode is copying a single input grouping column into multiple output grouping columns (i.e. aliases), this is basically picking one arbitrarily
inputToOutputMappings.putIfAbsent(setMapping.getValue(), setMapping.getKey());
}
}
// TODO: Add support for translating a property on a single column to multiple columns
// this is deliberately placed after the grouping columns, because preserving properties has a bigger perf impact
for (Symbol argument : node.getAggregationArguments()) {
inputToOutputMappings.putIfAbsent(argument, argument);
}
return Iterables.getOnlyElement(inputProperties).translate(column -> Optional.ofNullable(inputToOutputMappings.get(column)));
}
@Override
public StreamProperties visitAggregation(AggregationNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// Only grouped symbols projected symbols are passed through
return properties.translate(symbol -> node.getGroupingKeys().contains(symbol) ? Optional.of(symbol) : Optional.empty());
}
@Override
public StreamProperties visitStatisticsWriterNode(StatisticsWriterNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// analyze finish only outputs row count
return properties.withUnspecifiedPartitioning();
}
@Override
public StreamProperties visitTableFinish(TableFinishNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// table finish only outputs the row count
return properties.withUnspecifiedPartitioning();
}
@Override
public StreamProperties visitTableDelete(TableDeleteNode node, List<StreamProperties> inputProperties)
{
// delete only outputs a single row count
return StreamProperties.singleStream();
}
@Override
public StreamProperties visitDelete(DeleteNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// delete only outputs the row count
return properties.withUnspecifiedPartitioning();
}
@Override
public StreamProperties visitTableWriter(TableWriterNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// table writer only outputs the row count
return properties.withUnspecifiedPartitioning();
}
@Override
public StreamProperties visitUnnest(UnnestNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// We can describe properties in terms of inputs that are projected unmodified (i.e., not the unnested symbols)
Set<Symbol> passThroughInputs = ImmutableSet.copyOf(node.getReplicateSymbols());
StreamProperties translatedProperties = properties.translate(column -> {
if (passThroughInputs.contains(column)) {
return Optional.of(column);
}
return Optional.empty();
});
switch (node.getJoinType()) {
case INNER:
case LEFT:
return translatedProperties;
case RIGHT:
case FULL:
return translatedProperties.unordered(true);
default:
throw new UnsupportedOperationException("Unknown UNNEST join type: " + node.getJoinType());
}
}
@Override
public StreamProperties visitExplainAnalyze(ExplainAnalyzeNode node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
// explain only outputs the plan string
return properties.withUnspecifiedPartitioning();
}
//
// Nodes that gather data into a single stream
//
@Override
public StreamProperties visitIndexSource(IndexSourceNode node, List<StreamProperties> context)
{
return StreamProperties.singleStream();
}
@Override
public StreamProperties visitUnion(UnionNode node, List<StreamProperties> context)
{
// union is implemented using a local gather exchange
return StreamProperties.singleStream();
}
@Override
public StreamProperties visitEnforceSingleRow(EnforceSingleRowNode node, List<StreamProperties> context)
{
return StreamProperties.singleStream();
}
@Override
public StreamProperties visitAssignUniqueId(AssignUniqueId node, List<StreamProperties> inputProperties)
{
StreamProperties properties = Iterables.getOnlyElement(inputProperties);
if (properties.getPartitioningColumns().isPresent()) {
// preserve input (possibly preferred) partitioning
return properties;
}
return new StreamProperties(properties.getDistribution(),
Optional.of(ImmutableList.of(node.getIdColumn())),
properties.isOrdered());
}
//
// Simple nodes that pass through stream properties
//
@Override
public StreamProperties visitOutput(OutputNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties)
.translate(column -> PropertyDerivations.filterIfMissing(node.getOutputSymbols(), column));
}
@Override
public StreamProperties visitMarkDistinct(MarkDistinctNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitWindow(WindowNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitRowNumber(RowNumberNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitTopNRowNumber(TopNRowNumberNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitTopN(TopNNode node, List<StreamProperties> inputProperties)
{
// Partial TopN doesn't guarantee that stream is ordered
if (node.getStep() == TopNNode.Step.PARTIAL) {
return Iterables.getOnlyElement(inputProperties);
}
return StreamProperties.ordered();
}
@Override
public StreamProperties visitSort(SortNode node, List<StreamProperties> inputProperties)
{
StreamProperties sourceProperties = Iterables.getOnlyElement(inputProperties);
if (sourceProperties.isSingleStream()) {
// stream is only sorted if sort operator is executed without parallelism
return StreamProperties.ordered();
}
return sourceProperties;
}
@Override
public StreamProperties visitLimit(LimitNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitDistinctLimit(DistinctLimitNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitSemiJoin(SemiJoinNode node, List<StreamProperties> inputProperties)
{
return inputProperties.get(0);
}
@Override
public StreamProperties visitApply(ApplyNode node, List<StreamProperties> inputProperties)
{
throw new IllegalStateException("Unexpected node: " + node.getClass());
}
@Override
public StreamProperties visitCorrelatedJoin(CorrelatedJoinNode node, List<StreamProperties> inputProperties)
{
throw new IllegalStateException("Unexpected node: " + node.getClass());
}
@Override
public StreamProperties visitFilter(FilterNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
@Override
public StreamProperties visitSample(SampleNode node, List<StreamProperties> inputProperties)
{
return Iterables.getOnlyElement(inputProperties);
}
}
@Immutable
public static final class StreamProperties
{
public enum StreamDistribution
{
SINGLE, MULTIPLE, FIXED
}
private final StreamDistribution distribution;
private final Optional<List<Symbol>> partitioningColumns; // if missing => partitioned with some unknown scheme
private final boolean ordered;
// We are only interested in the local properties, but PropertyDerivations requires input
// ActualProperties, so we hold on to the whole object
private final ActualProperties otherActualProperties;
// NOTE: Partitioning on zero columns (or effectively zero columns if the columns are constant) indicates that all
// the rows will be partitioned into a single stream.
private StreamProperties(StreamDistribution distribution, Optional<? extends Iterable<Symbol>> partitioningColumns, boolean ordered)
{
this(distribution, partitioningColumns, ordered, null);
}
private StreamProperties(
StreamDistribution distribution,
Optional<? extends Iterable<Symbol>> partitioningColumns,
boolean ordered,
ActualProperties otherActualProperties)
{
this.distribution = requireNonNull(distribution, "distribution is null");
this.partitioningColumns = requireNonNull(partitioningColumns, "partitioningProperties is null")
.map(ImmutableList::copyOf);
checkArgument(distribution != SINGLE || this.partitioningColumns.equals(Optional.of(ImmutableList.of())),
"Single stream must be partitioned on empty set");
checkArgument(distribution == SINGLE || !this.partitioningColumns.equals(Optional.of(ImmutableList.of())),
"Multiple streams must not be partitioned on empty set");
this.ordered = ordered;
checkArgument(!ordered || distribution == SINGLE, "Ordered must be a single stream");
this.otherActualProperties = otherActualProperties;
}
public List<LocalProperty<Symbol>> getLocalProperties()
{
checkState(otherActualProperties != null, "otherActualProperties not set");
return otherActualProperties.getLocalProperties();
}
private static StreamProperties singleStream()
{
return new StreamProperties(SINGLE, Optional.of(ImmutableSet.of()), false);
}
private static StreamProperties fixedStreams()
{
return new StreamProperties(FIXED, Optional.empty(), false);
}
private static StreamProperties ordered()
{
return new StreamProperties(SINGLE, Optional.of(ImmutableSet.of()), true);
}
private StreamProperties unordered(boolean unordered)
{
if (unordered) {
ActualProperties updatedProperies = null;
if (otherActualProperties != null) {
updatedProperies = ActualProperties.builderFrom(otherActualProperties)
.unordered(true)
.build();
}
return new StreamProperties(
distribution,
partitioningColumns,
false,
updatedProperies);
}
return this;
}
public boolean isSingleStream()
{
return distribution == SINGLE;
}
public StreamDistribution getDistribution()
{
return distribution;
}
public boolean isExactlyPartitionedOn(Iterable<Symbol> columns)
{
return partitioningColumns.isPresent() && columns.equals(ImmutableList.copyOf(partitioningColumns.get()));
}
public boolean isPartitionedOn(Iterable<Symbol> columns)
{
if (partitioningColumns.isEmpty()) {
return false;
}
// partitioned on (k_1, k_2, ..., k_n) => partitioned on (k_1, k_2, ..., k_n, k_n+1, ...)
// can safely ignore all constant columns when comparing partition properties
return ImmutableSet.copyOf(columns).containsAll(partitioningColumns.get());
}
public boolean isOrdered()
{
return ordered;
}
private StreamProperties withUnspecifiedPartitioning()
{
// a single stream has no symbols
if (isSingleStream()) {
return this;
}
// otherwise we are distributed on some symbols, but since we are trying to remove all symbols,
// just say we have multiple partitions with an unknown scheme
return new StreamProperties(distribution, Optional.empty(), ordered);
}
private StreamProperties withOtherActualProperties(ActualProperties actualProperties)
{
return new StreamProperties(distribution, partitioningColumns, ordered, actualProperties);
}
public StreamProperties translate(Function<Symbol, Optional<Symbol>> translator)
{
return new StreamProperties(
distribution,
partitioningColumns.flatMap(partitioning -> {
ImmutableList.Builder<Symbol> newPartitioningColumns = ImmutableList.builder();
for (Symbol partitioningColumn : partitioning) {
Optional<Symbol> translated = translator.apply(partitioningColumn);
if (translated.isEmpty()) {
return Optional.empty();
}
newPartitioningColumns.add(translated.get());
}
return Optional.of(newPartitioningColumns.build());
}),
ordered, otherActualProperties.translate(translator));
}
public Optional<List<Symbol>> getPartitioningColumns()
{
return partitioningColumns;
}
@Override
public int hashCode()
{
return Objects.hash(distribution, partitioningColumns);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
StreamProperties other = (StreamProperties) obj;
return this.distribution == other.distribution &&
Objects.equals(this.partitioningColumns, other.partitioningColumns);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("distribution", distribution)
.add("partitioningColumns", partitioningColumns)
.toString();
}
}
}
| |
/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.nui.layouts;
import com.google.common.collect.Lists;
import org.terasology.input.Keyboard;
import org.terasology.math.geom.Rect2i;
import org.terasology.math.TeraMath;
import org.terasology.math.geom.Vector2i;
import org.terasology.math.geom.Vector2f;
import org.terasology.rendering.nui.BaseInteractionListener;
import org.terasology.rendering.nui.Canvas;
import org.terasology.rendering.nui.CoreLayout;
import org.terasology.rendering.nui.InteractionListener;
import org.terasology.rendering.nui.LayoutHint;
import org.terasology.rendering.nui.UIWidget;
import org.terasology.rendering.nui.events.NUIMouseClickEvent;
import org.terasology.rendering.nui.events.NUIMouseDragEvent;
import org.terasology.rendering.nui.events.NUIMouseOverEvent;
import org.terasology.rendering.nui.events.NUIMouseWheelEvent;
import java.util.Iterator;
import java.util.List;
/**
* A layout that allows positioning to a virtual coordinate system, which is mapped to screen coordinates using a
* viewport.
*
* @author synopia
*/
public class ZoomableLayout extends CoreLayout {
private List<PositionalWidget> widgets = Lists.newArrayList();
private Vector2f pixelSize;
private Vector2i screenSize;
private Vector2f windowPosition = new Vector2f();
private Vector2f windowSize = new Vector2f(50, 50);
private Vector2i last;
private InteractionListener dragListener = new BaseInteractionListener() {
@Override
public void onMouseOver(NUIMouseOverEvent event) {
last = new Vector2i(event.getRelativeMousePosition());
}
@Override
public boolean onMouseClick(NUIMouseClickEvent event) {
return true;
}
@Override
public void onMouseDrag(NUIMouseDragEvent event) {
Vector2f p = screenToWorld(last);
p.sub(screenToWorld(event.getRelativeMousePosition()));
p.add(windowPosition);
setWindowPosition(p);
}
@Override
public boolean onMouseWheel(NUIMouseWheelEvent event) {
if (event.getKeyboard().isKeyDown(Keyboard.Key.LEFT_SHIFT.getId())) {
float scale = 1 + event.getWheelTurns() * 0.05f;
zoom(scale, scale, event.getRelativeMousePosition());
}
return false;
}
};
public ZoomableLayout() {
}
public ZoomableLayout(String id) {
super(id);
}
@Override
public void addWidget(UIWidget element, LayoutHint hint) {
if (element instanceof PositionalWidget) {
PositionalWidget positionalWidget = (PositionalWidget) element;
addWidget(positionalWidget);
}
}
public void addWidget(PositionalWidget widget) {
if (widget != null) {
widgets.add(widget);
widget.onAdded(this);
}
}
@Override
public void removeWidget(UIWidget element) {
if (element instanceof PositionalWidget) {
PositionalWidget positionalWidget = (PositionalWidget) element;
removeWidget(positionalWidget);
}
}
public void removeWidget(PositionalWidget widget) {
if (widget != null) {
widget.onRemoved(this);
widgets.remove(widget);
}
}
public void removeAll() {
for (PositionalWidget widget : widgets) {
widget.onRemoved(this);
}
widgets.clear();
}
@Override
public void onDraw(Canvas canvas) {
setScreenSize(canvas.size());
calculateSizes();
canvas.addInteractionRegion(dragListener);
for (PositionalWidget widget : widgets) {
if (!widget.isVisible()) {
continue;
}
Vector2i screenStart = worldToScreen(widget.getPosition());
Vector2f worldEnd = new Vector2f(widget.getPosition());
worldEnd.add(widget.getSize());
Vector2i screenEnd = worldToScreen(worldEnd);
canvas.drawWidget(widget, Rect2i.createFromMinAndMax(screenStart, screenEnd));
}
}
@Override
public Vector2i getPreferredContentSize(Canvas canvas, Vector2i sizeHint) {
return Vector2i.zero();
}
@Override
public Vector2i getMaxContentSize(Canvas canvas) {
return new Vector2i(Integer.MAX_VALUE, Integer.MAX_VALUE);
}
@Override
public void update(float delta) {
for (PositionalWidget widget : widgets) {
widget.update(delta);
}
}
public List<PositionalWidget> getWidgets() {
return widgets;
}
@Override
public Iterator<UIWidget> iterator() {
return new Iterator<UIWidget>() {
private Iterator<PositionalWidget> delegate = widgets.iterator();
@Override
public boolean hasNext() {
return delegate.hasNext();
}
@Override
public UIWidget next() {
return delegate.next();
}
@Override
public void remove() {
delegate.remove();
}
};
}
public Vector2f screenToWorld(Vector2i screenPos) {
Vector2f world = new Vector2f(screenPos.x / pixelSize.x, screenPos.y / pixelSize.y);
world.add(windowPosition);
return world;
}
public Vector2i worldToScreen(Vector2f world) {
return new Vector2i(TeraMath.ceilToInt((world.x - windowPosition.x) * pixelSize.x), TeraMath.ceilToInt((world.y - windowPosition.y) * pixelSize.y));
}
public void setWindowPosition(Vector2f pos) {
windowPosition = pos;
}
public void setWindowSize(Vector2f size) {
windowSize = size;
}
public void setScreenSize(Vector2i size) {
screenSize = size;
}
public Vector2f getPixelSize() {
return pixelSize;
}
public Vector2i getScreenSize() {
return screenSize;
}
public Vector2f getWindowPosition() {
return windowPosition;
}
public Vector2f getWindowSize() {
return windowSize;
}
public void calculateSizes() {
if (windowSize.x > windowSize.y) {
windowSize.x = windowSize.y;
}
if (windowSize.x < windowSize.y) {
windowSize.y = windowSize.x;
}
if ((screenSize.x != 0) && (screenSize.y != 0)) {
if (screenSize.x > screenSize.y) {
windowSize.x *= (float) screenSize.x / screenSize.y;
} else {
windowSize.y *= (float) screenSize.y / screenSize.x;
}
}
if ((windowSize.x > 0) && (windowSize.y > 0)) {
pixelSize = new Vector2f(screenSize.x / windowSize.x, screenSize.y / windowSize.y);
} else {
pixelSize = new Vector2f();
}
}
public void zoom(float zoomX, float zoomY, Vector2i mousePos) {
Vector2f mouseBefore = screenToWorld(mousePos);
windowSize.x *= zoomX;
windowSize.y *= zoomY;
calculateSizes();
Vector2f mouseAfter = screenToWorld(mousePos);
windowPosition.x -= mouseAfter.x - mouseBefore.x;
windowPosition.y -= mouseAfter.y - mouseBefore.y;
}
public interface PositionalWidget<L extends ZoomableLayout> extends UIWidget {
Vector2f getPosition();
Vector2f getSize();
void onAdded(L layout);
void onRemoved(L layout);
}
}
| |
/*
Copyright 2015 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.Database.PostgreSQL.Savers;
import com.google.common.base.Preconditions;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Iterables;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.Database.AbstractSQLProvider;
import com.google.security.zynamics.binnavi.Database.CConnection;
import com.google.security.zynamics.binnavi.Database.CTableNames;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions.PostgreSQLInstructionFunctions;
import com.google.security.zynamics.binnavi.Database.PostgreSQL.Functions.PostgreSQLNodeFunctions;
import com.google.security.zynamics.binnavi.Exceptions.MaybeNullException;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.CComment;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
import com.google.security.zynamics.binnavi.Tagging.CTag;
import com.google.security.zynamics.binnavi.disassembly.CCodeNode;
import com.google.security.zynamics.binnavi.disassembly.CFunctionNode;
import com.google.security.zynamics.binnavi.disassembly.CTextNode;
import com.google.security.zynamics.binnavi.disassembly.INaviCodeNode;
import com.google.security.zynamics.binnavi.disassembly.INaviFunction;
import com.google.security.zynamics.binnavi.disassembly.INaviFunctionNode;
import com.google.security.zynamics.binnavi.disassembly.INaviGroupNode;
import com.google.security.zynamics.binnavi.disassembly.INaviInstruction;
import com.google.security.zynamics.binnavi.disassembly.INaviTextNode;
import com.google.security.zynamics.binnavi.disassembly.INaviViewNode;
import com.google.security.zynamics.zylib.general.Pair;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
public final class PostgreSQLNodeSaver {
private static String CODE = "code";
private static String FUNCTION = "function";
private static String GROUP = "group";
private static String TEXT = "text";
/**
* Class is only used to provide state less methods and thus should not be instantiated.
*/
private PostgreSQLNodeSaver() {
}
/**
* Saves the nodes to the nodes table. As a side effect, this function also fills index lists that
* store the indices into the nodes list for all node types. TODO: This method should probably be
* split into two methods.
*
* @param provider Provides the connection to the database.
* @param newViewId ID of the new view that is being saved.
* @param nodes The nodes to save.
* @param functionNodeIndices Index into the nodes list that identifies the function nodes.
* @param codeNodeIndices Index into the nodes list that identifies the code nodes.
* @param textNodeIndices Index into the nodes list that identifies the text nodes.
* @param groupNodeIndices Index into the nodes list that identifies the group nodes.
* @param groupNodeMap Maps between node IDs and group node objects.
* @return The ID of the first node saved to the database.
* @throws SQLException Thrown if saving the nodes failed.
*/
private static int saveNodes(final AbstractSQLProvider provider, final int newViewId,
final List<INaviViewNode> nodes, final List<Integer> functionNodeIndices,
final List<Integer> codeNodeIndices, final List<Integer> textNodeIndices,
final List<Integer> groupNodeIndices, final BiMap<Integer, INaviGroupNode> groupNodeMap)
throws SQLException {
final String query =
"INSERT INTO " + CTableNames.NODES_TABLE
+ "( view_id, parent_id, type, x, y, width, height, color, bordercolor, "
+ " selected, visible) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection()
.prepareStatement(query, java.sql.Statement.RETURN_GENERATED_KEYS);
int counter = 0;
for (final INaviViewNode node : nodes) {
String nodeType = null;
if (node instanceof CCodeNode) {
nodeType = CODE;
codeNodeIndices.add(counter);
} else if (node instanceof CFunctionNode) {
nodeType = FUNCTION;
functionNodeIndices.add(counter);
} else if (node instanceof INaviGroupNode) {
nodeType = GROUP;
groupNodeIndices.add(counter);
groupNodeMap.put(counter, (INaviGroupNode) node);
} else if (node instanceof CTextNode) {
nodeType = TEXT;
textNodeIndices.add(counter);
}
counter++;
preparedStatement.setInt(1, newViewId);
preparedStatement.setNull(2, Types.INTEGER);
preparedStatement.setObject(3, nodeType, Types.OTHER);
preparedStatement.setDouble(4, node.getX());
preparedStatement.setDouble(5, node.getY());
preparedStatement.setDouble(6, node.getWidth());
preparedStatement.setDouble(7, node.getHeight());
preparedStatement.setInt(8, node.getColor().getRGB());
preparedStatement.setInt(9, node.getBorderColor().getRGB());
preparedStatement.setBoolean(10, node.isSelected());
preparedStatement.setBoolean(11, node.isVisible());
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
final ResultSet resultSet = preparedStatement.getGeneratedKeys();
int lastId = 0;
try {
while (resultSet.next()) {
if (resultSet.isFirst()) {
lastId = resultSet.getInt(1);
break;
}
}
} finally {
preparedStatement.close();
resultSet.close();
}
return lastId;
}
protected static void checkArguments(final AbstractSQLProvider provider, final int newViewId,
final List<INaviViewNode> nodes) {
Preconditions.checkNotNull(provider, "IE01992: Provider argument can not be null");
Preconditions.checkArgument(newViewId > 0,
"IE01993: New View ID argument must be greater then zero");
Preconditions.checkNotNull(nodes, "IE01994: Nodes argument can not be null");
}
/**
* Saves the mapping between code nodes and their instructions to the database.
*
* @param provider The provider used to access the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param codeNodeIndices Index into the nodes list that identifies the code nodes.
*
* @throws SQLException Thrown if saving the code node instructions failed.
*/
protected static ArrayList<Pair<INaviCodeNode, INaviInstruction>> saveCodeNodeInstructions(
final SQLProvider provider, final List<INaviViewNode> nodes, final int firstNode,
final List<Integer> codeNodeIndices) throws SQLException {
if (!nodes.isEmpty()) {
final Set<INaviInstruction> unsavedInstructions = new HashSet<INaviInstruction>();
for (final int index : codeNodeIndices) {
final CCodeNode node = (CCodeNode) nodes.get(index);
final Iterable<INaviInstruction> instructions = node.getInstructions();
for (final INaviInstruction instruction : instructions) {
if (!(instruction.isStored())) {
unsavedInstructions.add(instruction);
}
}
}
PostgreSQLInstructionFunctions.createInstructions(provider, unsavedInstructions);
final String query =
"INSERT INTO " + CTableNames.CODENODE_INSTRUCTIONS_TABLE
+ " (module_id, node_id, position, address, comment_id) VALUES (?, ?, ?, ?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
final ArrayList<Pair<INaviCodeNode, INaviInstruction>> instructionsWithUnsavedLocalComments =
new ArrayList<Pair<INaviCodeNode, INaviInstruction>>();
try {
for (final Integer index : codeNodeIndices) {
final INaviCodeNode codeNode = (INaviCodeNode) nodes.get(index);
int position = 0;
for (final INaviInstruction instruction : codeNode.getInstructions()) {
final List<IComment> comments =
codeNode.getComments().getLocalInstructionComment(instruction);
final Integer commentId =
comments == null ? null : comments.size() == 0 ? null : Iterables.getLast(comments)
.getId();
if ((comments != null) && (comments.size() != 0) && (commentId == null)) {
instructionsWithUnsavedLocalComments.add(new Pair<INaviCodeNode, INaviInstruction>(
codeNode, instruction));
}
final int moduleId = instruction.getModule().getConfiguration().getId();
preparedStatement.setInt(1, moduleId);
preparedStatement.setInt(2, firstNode + index);
preparedStatement.setInt(3, position);
preparedStatement.setObject(4, instruction.getAddress().toBigInteger(), Types.BIGINT);
if (commentId == null) {
preparedStatement.setNull(5, Types.INTEGER);
} else {
preparedStatement.setInt(5, commentId);
}
position++;
preparedStatement.addBatch();
}
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
return instructionsWithUnsavedLocalComments;
}
return null;
}
/**
* Saves the code nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param codeNodeIndices Index into the nodes list that identifies the code nodes.
*
* @throws SQLException Thrown if saving the code node instructions failed.
*/
protected static void saveCodeNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
final int firstNode, final List<Integer> codeNodeIndices) throws SQLException {
if (!codeNodeIndices.isEmpty()) {
final List<Pair<INaviCodeNode, INaviInstruction>> instructionsWithUnsavedLocalComments =
PostgreSQLNodeSaver.saveCodeNodeInstructions(provider, nodes, firstNode, codeNodeIndices);
final String query =
"INSERT INTO " + CTableNames.CODE_NODES_TABLE
+ "(module_id, node_id, parent_function, comment_id) VALUES (?, ?, ?, ?)";
final ArrayList<INaviCodeNode> codeNodesWithUnsavedComments = new ArrayList<INaviCodeNode>();
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
try {
for (final int index : codeNodeIndices) {
final INaviCodeNode codeNode = (INaviCodeNode) nodes.get(index);
codeNode.setId(firstNode + index);
INaviFunction function = null;
try {
function = codeNode.getParentFunction();
} catch (final MaybeNullException e) {
}
final int moduleId =
Iterables.getLast(codeNode.getInstructions()).getModule().getConfiguration().getId();
final List<IComment> comment = codeNode.getComments().getLocalCodeNodeComment();
final Integer commentId =
comment == null ? null : comment.size() == 0 ? null : Iterables.getLast(comment)
.getId();
if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
codeNodesWithUnsavedComments.add(codeNode);
}
preparedStatement.setInt(1, moduleId);
preparedStatement.setInt(2, firstNode + index);
if (function == null) {
preparedStatement.setNull(3, Types.BIGINT);
} else {
preparedStatement.setObject(3, function.getAddress().toBigInteger(), Types.BIGINT);
}
if (commentId == null) {
preparedStatement.setNull(4, Types.INTEGER);
} else {
preparedStatement.setInt(4, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
// TODO (timkornau): this is not the best solution and is more a test then a full fledged
// implementation.
for (final INaviCodeNode codeNode : codeNodesWithUnsavedComments) {
final ArrayList<IComment> codeNodecomments = new ArrayList<IComment>();
for (final IComment comment : codeNode.getComments().getLocalCodeNodeComment()) {
try {
final Integer commentId =
PostgreSQLNodeFunctions.appendLocalCodeNodeComment(provider, codeNode,
comment.getComment(), comment.getUser().getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
codeNodecomments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
codeNode.getComments().initializeLocalCodeNodeComment(codeNodecomments);
}
// TODO (timkornau): this is not the best solution and is more a test then a full fledged
// implementation.
for (final Pair<INaviCodeNode, INaviInstruction> pair : instructionsWithUnsavedLocalComments) {
final ArrayList<IComment> localInstructionComments = new ArrayList<IComment>();
for (final IComment comment : pair.first().getComments()
.getLocalInstructionComment(pair.second())) {
try {
final int commentId =
PostgreSQLInstructionFunctions.appendLocalInstructionComment(provider,
pair.first(), pair.second(), comment.getComment(), comment.getUser()
.getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
localInstructionComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
pair.first().getComments()
.initializeLocalInstructionComment(pair.second(), localInstructionComments);
}
}
}
/**
* Saves the function nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param functionNodeIndices Index into the nodes list that identifies the function nodes.
*
* @throws SQLException Thrown if saving the function nodes failed.
*/
protected static void saveFunctionNodes(final SQLProvider provider,
final List<INaviViewNode> nodes, final int firstNode, final List<Integer> functionNodeIndices)
throws SQLException {
if (functionNodeIndices.isEmpty()) {
return;
}
final String query =
"INSERT INTO " + CTableNames.FUNCTION_NODES_TABLE
+ "(module_id, node_id, function, comment_id) VALUES (?, ?, ?, ?)";
final ArrayList<INaviFunctionNode> functionNodesWithUnsavedComments =
new ArrayList<INaviFunctionNode>();
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
try {
for (final int index : functionNodeIndices) {
final CFunctionNode node = (CFunctionNode) nodes.get(index);
final INaviFunction function = node.getFunction();
final List<IComment> comments = node.getLocalFunctionComment();
final Integer commentId =
comments == null ? null : comments.size() == 0 ? null : Iterables.getLast(comments)
.getId();
if ((comments != null) && (comments.size() != 0) && (commentId == null)) {
functionNodesWithUnsavedComments.add(node);
}
preparedStatement.setInt(1, function.getModule().getConfiguration().getId());
preparedStatement.setInt(2, firstNode + index);
preparedStatement.setObject(3, function.getAddress().toBigInteger(), Types.BIGINT);
if (commentId == null) {
preparedStatement.setNull(4, Types.INTEGER);
} else {
preparedStatement.setInt(4, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
for (final INaviFunctionNode functionNode : functionNodesWithUnsavedComments) {
final ArrayList<IComment> functionNodeComments = new ArrayList<IComment>();
for (final IComment comment : functionNode.getLocalFunctionComment()) {
try {
final Integer commentId =
provider.appendFunctionNodeComment(functionNode, comment.getComment(), comment
.getUser().getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(), comment.getComment());
functionNodeComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
functionNode.initializeLocalFunctionComment(functionNodeComments);
}
}
/**
* Saves the group nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param groupNodeIndices Index into the nodes list that identifies the group nodes.
*
* @throws SQLException Thrown if saving the group nodes failed.
*/
protected static void saveGroupNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
final int firstNode, final List<Integer> groupNodeIndices) throws SQLException {
Preconditions.checkNotNull(provider, "IE02525: connection argument can not be null");
Preconditions.checkNotNull(nodes, "IE02526: nodes argument can not be null");
Preconditions
.checkNotNull(groupNodeIndices, "Error: groupNodeIndices argument can not be null");
if (!groupNodeIndices.isEmpty()) {
final String query =
"INSERT INTO " + CTableNames.GROUP_NODES_TABLE
+ "(node_id, collapsed, comment_id) VALUES (?, ?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
final List<INaviGroupNode> groupNodesWithUnsavedComments = new ArrayList<INaviGroupNode>();
try {
for (final Integer index : groupNodeIndices) {
final INaviGroupNode node = (INaviGroupNode) nodes.get(index);
preparedStatement.setInt(1, firstNode + index);
preparedStatement.setBoolean(2, node.isCollapsed());
final List<IComment> comment = node.getComments();
final Integer commentId =
comment == null ? null : comment.size() == 0 ? null : Iterables.getLast(comment)
.getId();
if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
groupNodesWithUnsavedComments.add(node);
}
if (commentId == null) {
preparedStatement.setNull(3, Types.INTEGER);
} else {
preparedStatement.setInt(3, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
// TODO (timkornau): this can work better.
for (final INaviGroupNode groupNode : groupNodesWithUnsavedComments) {
final ArrayList<IComment> groupNodeComments = new ArrayList<IComment>();
for (final IComment comment : groupNode.getComments()) {
try {
final Integer commentId =
provider.appendGroupNodeComment(groupNode, comment.getComment(), comment.getUser()
.getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
groupNodeComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
groupNode.initializeComment(groupNodeComments);
}
}
}
/**
* Stores parent groups for all nodes that have an assigned parent group.
*
* @param connection Provides the connection to the database.
* @param nodes All nodes that were saved.
* @param firstNode The database index of the first node.
* @param groupNodeMap Maps between node IDs and parent group node objects.
*
* @throws SQLException Thrown if the parent groups could not be assigned.
*/
protected static void saveParentGroups(final CConnection connection,
final List<INaviViewNode> nodes, final int firstNode,
final BiMap<Integer, INaviGroupNode> groupNodeMap) throws SQLException {
int counter = 0;
for (final INaviViewNode node : nodes) {
if (node.getParentGroup() != null) {
final int parentId = firstNode + groupNodeMap.inverse().get(node.getParentGroup());
final int childId = firstNode + counter;
connection.executeUpdate(String.format("UPDATE " + CTableNames.NODES_TABLE
+ " set parent_id = %d WHERE id = %d", parentId, childId), true);
}
counter++;
}
}
/**
*
* TODO (timkornau): this code here has serious issues and is in no way anything that we want to
* keep.
*
* Saves the node tags to the database.
*
* @param connection The connection to the database.
* @param nodes The nodes to save.
* @param firstNode Database index of the first node.
*
* @throws SQLException Thrown if saving the tags failed.
*/
protected static void saveTags(final CConnection connection, final List<INaviViewNode> nodes,
final int firstNode) throws SQLException {
int counter = firstNode;
final String deleteStatement =
"DELETE FROM " + CTableNames.TAGGED_NODES_TABLE + " WHERE node_id IN (%s)";
final String insertStatement = "INSERT INTO " + CTableNames.TAGGED_NODES_TABLE + " VALUES %s ";
boolean isFirst = true;
final StringBuilder range = new StringBuilder();
for (int i = 0; i < nodes.size(); i++) {
if (isFirst) {
range.append(counter);
isFirst = false;
continue;
}
range.append(", ");
range.append(counter);
++counter;
}
if (range.length() != 0) {
connection.executeUpdate(String.format(deleteStatement, range.toString()), true);
}
counter = firstNode;
final StringBuilder insert = new StringBuilder();
isFirst = true;
for (final INaviViewNode node : nodes) {
final Iterator<CTag> it = node.getTagsIterator();
while (it.hasNext()) {
final CTag tag = it.next();
insert.append(isFirst ? "" : ",");
insert.append('(');
insert.append(counter);
insert.append(", ");
insert.append(tag.getId());
insert.append(')');
isFirst = false;
}
++counter;
}
if (insert.length() != 0) {
connection.executeUpdate(String.format(insertStatement, insert.toString()), true);
}
}
/**
* Saves the text nodes to the database.
*
* @param provider The connection to the database.
* @param nodes The nodes to save.
* @param firstNode The database index of the first node.
* @param textNodeIndices Index into the nodes list that identifies the text nodes.
*
* @throws SQLException Thrown if saving the text nodes failed.
*/
protected static void saveTextNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
final int firstNode, final List<Integer> textNodeIndices) throws SQLException {
Preconditions.checkNotNull(provider, "IE02527: provider argument can not be null");
Preconditions.checkNotNull(nodes, "IE02528: nodes argument can not be null");
Preconditions
.checkNotNull(textNodeIndices, "IE02529: textNodeIndices argument can not be null");
if (!textNodeIndices.isEmpty()) {
final String query =
"INSERT INTO " + CTableNames.TEXT_NODES_TABLE + "(node_id, comment_id) VALUES (?, ?)";
final PreparedStatement preparedStatement =
provider.getConnection().getConnection().prepareStatement(query);
final List<INaviTextNode> textNodesWithUnsavedComments = new ArrayList<INaviTextNode>();
try {
for (final Integer index : textNodeIndices) {
final INaviTextNode node = (INaviTextNode) nodes.get(index);
final List<IComment> comment = node.getComments();
final Integer commentId =
comment == null ? null : comment.size() == 0 ? null : Iterables.getLast(comment)
.getId();
if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
textNodesWithUnsavedComments.add(node);
}
preparedStatement.setInt(1, firstNode + index);
if (commentId == null) {
preparedStatement.setNull(2, Types.INTEGER);
} else {
preparedStatement.setInt(2, commentId);
}
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
} finally {
preparedStatement.close();
}
// TODO (timkornau): this needs to be reworked once I have thought of a better idea for the
// unsaved comments to be stored. Possibly one can handle all of those in one query.
for (final INaviTextNode textNode : textNodesWithUnsavedComments) {
final ArrayList<IComment> textNodeComments = new ArrayList<IComment>();
for (final IComment comment : textNode.getComments()) {
try {
final Integer commentId =
provider.appendTextNodeComment(textNode, comment.getComment(), comment.getUser()
.getUserId());
final IComment newComment =
new CComment(commentId, comment.getUser(), comment.getParent(),
comment.getComment());
textNodeComments.add(newComment);
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
}
}
textNode.initializeComment(textNodeComments);
}
}
}
/**
* Sorts the group nodes of a view in a way that makes sure that group nodes inside other group
* nodes come later in the list.
*
* @param groupNodeIndices Database indices of the group nodes to sort.
* @param groupNodeMap Maps between group node database indices and objects.
*
* @return The sorted list of group node indices.
*/
protected static List<Integer> sortGroupNodes(final List<Integer> groupNodeIndices,
final BiMap<Integer, INaviGroupNode> groupNodeMap) {
final List<Integer> sortedList = new ArrayList<Integer>();
final List<Integer> clonedList = new ArrayList<Integer>(groupNodeIndices);
final Set<INaviGroupNode> addedNodes = new HashSet<INaviGroupNode>();
while (!clonedList.isEmpty()) {
for (final Integer id : clonedList) {
final INaviGroupNode node = groupNodeMap.get(id);
if ((node.getParentGroup() == null) || addedNodes.contains(node.getParentGroup())) {
addedNodes.add(node);
sortedList.add(id);
clonedList.remove(id);
break;
}
}
}
return sortedList;
}
/**
* Updates the node IDs of the nodes that were saved to the database.
*
* @param nodes The nodes whose IDs are updated.
* @param firstNode The new ID of the first node.
*/
protected static void updateNodeIds(final List<INaviViewNode> nodes, final int firstNode) {
int newIdCounter = firstNode;
for (final INaviViewNode node : nodes) {
node.setId(newIdCounter);
newIdCounter++;
}
}
/**
* Writes the nodes of a view to the database.
*
* @param provider The connection to the database.
* @param newViewId The ID of the view the nodes belong to.
* @param nodes The nodes to save.
* @throws SQLException Thrown if saving the nodes failed.
*/
public static void writeNodes(final AbstractSQLProvider provider, final int newViewId,
final List<INaviViewNode> nodes) throws SQLException {
Preconditions.checkNotNull(provider, "IE01992: Provider argument can not be null");
Preconditions.checkArgument(newViewId > 0,
"IE01993: New View ID argument must be greater then zero");
Preconditions.checkNotNull(nodes, "IE01994: Nodes argument can not be null");
if (nodes.isEmpty()) {
return;
}
final ArrayList<Integer> functionNodeIndices = new ArrayList<Integer>();
final ArrayList<Integer> codeNodeIndices = new ArrayList<Integer>();
final ArrayList<Integer> textNodeIndices = new ArrayList<Integer>();
final ArrayList<Integer> groupNodeIndices = new ArrayList<Integer>();
final BiMap<Integer, INaviGroupNode> groupNodeMap = HashBiMap.create();
final int firstNode =
saveNodes(provider, newViewId, nodes, functionNodeIndices, codeNodeIndices,
textNodeIndices, groupNodeIndices, groupNodeMap);
// After this point, the nodes table has been filled
// After each saving, the node IDs have to be updated
PostgreSQLNodeSaver.updateNodeIds(nodes, firstNode);
// Now, the individual node type tables can be saved
PostgreSQLNodeSaver.saveGroupNodes(provider, nodes, firstNode,
PostgreSQLNodeSaver.sortGroupNodes(groupNodeIndices, groupNodeMap));
PostgreSQLNodeSaver.saveFunctionNodes(provider, nodes, firstNode, functionNodeIndices);
PostgreSQLNodeSaver.saveCodeNodes(provider, nodes, firstNode, codeNodeIndices);
PostgreSQLNodeSaver.saveTextNodes(provider, nodes, firstNode, textNodeIndices);
// Once all nodes are saved, the parent nodes can be saved too
final CConnection connection = provider.getConnection();
PostgreSQLNodeSaver.saveParentGroups(connection, nodes, firstNode, groupNodeMap);
// And finally, we can save the tags associated with the nodes
PostgreSQLNodeSaver.saveTags(connection, nodes, firstNode);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.database;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.mem.unsafe.UnsafeMemoryProvider;
import org.apache.ignite.internal.metric.IoStatisticsHolderNoOp;
import org.apache.ignite.internal.pagemem.PageIdAllocator;
import org.apache.ignite.internal.pagemem.PageMemory;
import org.apache.ignite.internal.pagemem.PageUtils;
import org.apache.ignite.internal.pagemem.impl.PageMemoryNoStoreImpl;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.CacheObjectContext;
import org.apache.ignite.internal.processors.cache.CacheObjectValueContext;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow;
import org.apache.ignite.internal.processors.cache.persistence.DataRegion;
import org.apache.ignite.internal.processors.cache.persistence.DataRegionMetricsImpl;
import org.apache.ignite.internal.processors.cache.persistence.evict.NoOpPageEvictionTracker;
import org.apache.ignite.internal.processors.cache.persistence.freelist.CacheFreeList;
import org.apache.ignite.internal.processors.cache.persistence.freelist.FreeList;
import org.apache.ignite.internal.processors.cache.persistence.tree.io.CacheVersionIO;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.metric.GridMetricManager;
import org.apache.ignite.internal.processors.metric.impl.LongAdderMetric;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.apache.ignite.spi.metric.noop.NoopMetricExporterSpi;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.GridTestKernalContext;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.logger.GridTestLog4jLogger;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import static org.apache.ignite.internal.processors.database.DataRegionMetricsSelfTest.NO_OP_METRICS;
/**
*
*/
public class CacheFreeListSelfTest extends GridCommonAbstractTest {
/** */
private static final int CPUS = Runtime.getRuntime().availableProcessors();
/** */
private static final long MB = 1024L * 1024L;
/** */
private static final int BATCH_SIZE = 100;
/** */
private PageMemory pageMem;
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
if (pageMem != null)
pageMem.stop(true);
pageMem = null;
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_batched_1024() throws Exception {
checkInsertDeleteSingleThreaded(1024, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_batched_2048() throws Exception {
checkInsertDeleteSingleThreaded(2048, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_batched_4096() throws Exception {
checkInsertDeleteSingleThreaded(4096, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_batched_8192() throws Exception {
checkInsertDeleteSingleThreaded(8192, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_batched_16384() throws Exception {
checkInsertDeleteSingleThreaded(16384, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_1024() throws Exception {
checkInsertDeleteSingleThreaded(1024);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_2048() throws Exception {
checkInsertDeleteSingleThreaded(2048);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_4096() throws Exception {
checkInsertDeleteSingleThreaded(4096);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_8192() throws Exception {
checkInsertDeleteSingleThreaded(8192);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteSingleThreaded_16384() throws Exception {
checkInsertDeleteSingleThreaded(16384);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_1024() throws Exception {
checkInsertDeleteMultiThreaded(1024);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_2048() throws Exception {
checkInsertDeleteMultiThreaded(2048);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_4096() throws Exception {
checkInsertDeleteMultiThreaded(4096);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_8192() throws Exception {
checkInsertDeleteMultiThreaded(8192);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_16384() throws Exception {
checkInsertDeleteMultiThreaded(16384);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_batched_1024() throws Exception {
checkInsertDeleteMultiThreaded(1024, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_batched_2048() throws Exception {
checkInsertDeleteMultiThreaded(2048, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_batched_4096() throws Exception {
checkInsertDeleteMultiThreaded(4096, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_batched_8192() throws Exception {
checkInsertDeleteMultiThreaded(8192, true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testInsertDeleteMultiThreaded_batched_16384() throws Exception {
checkInsertDeleteMultiThreaded(16384, true);
}
/**
* @param pageSize Page size.
* @throws Exception if failed.
*/
protected void checkInsertDeleteMultiThreaded(int pageSize) throws Exception {
checkInsertDeleteMultiThreaded(pageSize, false);
}
/**
* @param pageSize Page size.
* @param batched Batch mode flag.
* @throws Exception If failed.
*/
protected void checkInsertDeleteMultiThreaded(final int pageSize, final boolean batched) throws Exception {
final FreeList<CacheDataRow> list = createFreeList(pageSize);
Random rnd = new Random();
final ConcurrentMap<Long, CacheDataRow> stored = new ConcurrentHashMap<>();
for (int i = 0; i < 100; i++) {
int keySize = rnd.nextInt(pageSize * 3 / 2) + 10;
int valSize = rnd.nextInt(pageSize * 5 / 2) + 10;
TestDataRow row = new TestDataRow(keySize, valSize);
list.insertDataRow(row, IoStatisticsHolderNoOp.INSTANCE);
assertTrue(row.link() != 0L);
CacheDataRow old = stored.put(row.link(), row);
assertNull(old);
}
final AtomicBoolean grow = new AtomicBoolean(true);
GridTestUtils.runMultiThreaded(new Callable<Object>() {
@Override public Object call() throws Exception {
List<CacheDataRow> rows = new ArrayList<>(BATCH_SIZE);
Random rnd = ThreadLocalRandom.current();
for (int i = 0; i < 200_000; i++) {
boolean grow0 = grow.get();
if (grow0) {
if (stored.size() > 20_000) {
if (grow.compareAndSet(true, false))
info("Shrink... [" + stored.size() + ']');
grow0 = false;
}
}
else {
if (stored.size() < 1_000) {
if (grow.compareAndSet(false, true))
info("Grow... [" + stored.size() + ']');
grow0 = true;
}
}
boolean insert = rnd.nextInt(100) < 70 == grow0;
if (insert) {
int keySize = rnd.nextInt(pageSize * 3 / 2) + 10;
int valSize = rnd.nextInt(pageSize * 3 / 2) + 10;
TestDataRow row = new TestDataRow(keySize, valSize);
if (batched) {
rows.add(row);
if (rows.size() == BATCH_SIZE) {
list.insertDataRows(rows, IoStatisticsHolderNoOp.INSTANCE);
for (CacheDataRow row0 : rows) {
assertTrue(row0.link() != 0L);
CacheDataRow old = stored.put(row0.link(), row0);
assertNull(old);
}
rows.clear();
}
continue;
}
list.insertDataRow(row, IoStatisticsHolderNoOp.INSTANCE);
assertTrue(row.link() != 0L);
CacheDataRow old = stored.put(row.link(), row);
assertNull(old);
}
else {
while (!stored.isEmpty()) {
Iterator<CacheDataRow> it = stored.values().iterator();
if (it.hasNext()) {
CacheDataRow row = it.next();
CacheDataRow rmvd = stored.remove(row.link());
if (rmvd != null) {
list.removeDataRowByLink(row.link(), IoStatisticsHolderNoOp.INSTANCE);
break;
}
}
}
}
}
return null;
}
}, 8, "runner");
}
/**
* @param pageSize Page size.
* @throws Exception if failed.
*/
protected void checkInsertDeleteSingleThreaded(int pageSize) throws Exception {
checkInsertDeleteSingleThreaded(pageSize, false);
}
/**
* @param pageSize Page size.
* @param batched Batch mode flag.
* @throws Exception if failed.
*/
protected void checkInsertDeleteSingleThreaded(int pageSize, boolean batched) throws Exception {
FreeList<CacheDataRow> list = createFreeList(pageSize);
Random rnd = new Random();
Map<Long, CacheDataRow> stored = new HashMap<>();
for (int i = 0; i < 100; i++) {
int keySize = rnd.nextInt(pageSize * 3 / 2) + 10;
int valSize = rnd.nextInt(pageSize * 5 / 2) + 10;
TestDataRow row = new TestDataRow(keySize, valSize);
list.insertDataRow(row, IoStatisticsHolderNoOp.INSTANCE);
assertTrue(row.link() != 0L);
CacheDataRow old = stored.put(row.link(), row);
assertNull(old);
}
boolean grow = true;
List<CacheDataRow> rows = new ArrayList<>(BATCH_SIZE);
for (int i = 0; i < 1_000_000; i++) {
if (grow) {
if (stored.size() > 20_000) {
grow = false;
info("Shrink... [" + stored.size() + ']');
}
}
else {
if (stored.size() < 1_000) {
grow = true;
info("Grow... [" + stored.size() + ']');
}
}
boolean insert = rnd.nextInt(100) < 70 == grow;
if (insert) {
int keySize = rnd.nextInt(pageSize * 3 / 2) + 10;
int valSize = rnd.nextInt(pageSize * 3 / 2) + 10;
TestDataRow row = new TestDataRow(keySize, valSize);
if (batched) {
rows.add(row);
if (rows.size() == BATCH_SIZE) {
list.insertDataRows(rows, IoStatisticsHolderNoOp.INSTANCE);
for (CacheDataRow row0 : rows) {
assertTrue(row0.link() != 0L);
CacheDataRow old = stored.put(row0.link(), row0);
assertNull(old);
}
rows.clear();
}
continue;
}
list.insertDataRow(row, IoStatisticsHolderNoOp.INSTANCE);
assertTrue(row.link() != 0L);
CacheDataRow old = stored.put(row.link(), row);
assertNull(old);
}
else {
Iterator<CacheDataRow> it = stored.values().iterator();
if (it.hasNext()) {
CacheDataRow row = it.next();
CacheDataRow rmvd = stored.remove(row.link());
assertTrue(rmvd == row);
list.removeDataRowByLink(row.link(), IoStatisticsHolderNoOp.INSTANCE);
}
}
}
}
/**
* @return Page memory.
*/
protected PageMemory createPageMemory(int pageSize, DataRegionConfiguration plcCfg) throws Exception {
PageMemory pageMem = new PageMemoryNoStoreImpl(log,
new UnsafeMemoryProvider(log),
null,
pageSize,
plcCfg,
new LongAdderMetric("NO_OP", null),
true);
pageMem.start();
return pageMem;
}
/**
* @param pageSize Page size.
* @return Free list.
* @throws Exception If failed.
*/
protected FreeList<CacheDataRow> createFreeList(int pageSize) throws Exception {
DataRegionConfiguration plcCfg = new DataRegionConfiguration()
.setInitialSize(1024 * MB)
.setMaxSize(1024 * MB);
pageMem = createPageMemory(pageSize, plcCfg);
long metaPageId = pageMem.allocatePage(1, 1, PageIdAllocator.FLAG_DATA);
IgniteConfiguration cfg = new IgniteConfiguration().setMetricExporterSpi(new NoopMetricExporterSpi());
DataRegionMetricsImpl regionMetrics = new DataRegionMetricsImpl(plcCfg,
new GridMetricManager(new GridTestKernalContext(new GridTestLog4jLogger(), cfg)),
NO_OP_METRICS);
DataRegion dataRegion = new DataRegion(pageMem, plcCfg, regionMetrics, new NoOpPageEvictionTracker());
return new CacheFreeList(
1,
"freelist",
regionMetrics,
dataRegion,
null,
metaPageId,
true,
null,
new GridTestKernalContext(log),
null,
PageIdAllocator.FLAG_IDX
);
}
/**
*
*/
private static class TestDataRow implements CacheDataRow {
/** */
private long link;
/** */
private TestCacheObject key;
/** */
private TestCacheObject val;
/** */
private GridCacheVersion ver;
/**
* @param keySize Key size.
* @param valSize Value size.
*/
private TestDataRow(int keySize, int valSize) {
key = new TestCacheObject(keySize);
val = new TestCacheObject(valSize);
ver = new GridCacheVersion(keySize, valSize, 1);
}
/** {@inheritDoc} */
@Override public KeyCacheObject key() {
return key;
}
/** {@inheritDoc} */
@Override public void key(KeyCacheObject key) {
this.key = (TestCacheObject)key;
}
/** {@inheritDoc} */
@Override public CacheObject value() {
return val;
}
/** {@inheritDoc} */
@Override public GridCacheVersion version() {
return ver;
}
/** {@inheritDoc} */
@Override public long expireTime() {
return 0;
}
/** {@inheritDoc} */
@Override public int partition() {
return 0;
}
/** {@inheritDoc} */
@Override public int size() throws IgniteCheckedException {
int len = key().valueBytesLength(null);
len += value().valueBytesLength(null) + CacheVersionIO.size(version(), false) + 8;
return len + (cacheId() != 0 ? 4 : 0);
}
/** {@inheritDoc} */
@Override public int headerSize() {
return 0;
}
/** {@inheritDoc} */
@Override public long link() {
return link;
}
/** {@inheritDoc} */
@Override public void link(long link) {
this.link = link;
}
/** {@inheritDoc} */
@Override public int hash() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public int cacheId() {
return 0;
}
/** {@inheritDoc} */
@Override public long newMvccCoordinatorVersion() {
return 0;
}
/** {@inheritDoc} */
@Override public long newMvccCounter() {
return 0;
}
/** {@inheritDoc} */
@Override public int newMvccOperationCounter() {
return 0;
}
/** {@inheritDoc} */
@Override public long mvccCoordinatorVersion() {
return 0;
}
/** {@inheritDoc} */
@Override public long mvccCounter() {
return 0;
}
/** {@inheritDoc} */
@Override public int mvccOperationCounter() {
return 0;
}
/** {@inheritDoc} */
@Override public byte mvccTxState() {
return 0;
}
/** {@inheritDoc} */
@Override public byte newMvccTxState() {
return 0;
}
}
/**
*
*/
private static class TestCacheObject implements KeyCacheObject {
/** */
private byte[] data;
/**
* @param size Object size.
*/
private TestCacheObject(int size) {
data = new byte[size];
Arrays.fill(data, (byte)size);
}
/** {@inheritDoc} */
@Override public boolean internal() {
return false;
}
/** {@inheritDoc} */
@Override public int partition() {
return 0;
}
/** {@inheritDoc} */
@Override public void partition(int part) {
assert false;
}
/** {@inheritDoc} */
@Override public KeyCacheObject copy(int part) {
assert false;
return null;
}
/** {@inheritDoc} */
@Nullable @Override public <T> T value(CacheObjectValueContext ctx, boolean cpy) {
return value(ctx, cpy, null);
}
@Override public <T> @Nullable T value(CacheObjectValueContext ctx, boolean cpy, ClassLoader ldr) {
return (T)data;
}
/** {@inheritDoc} */
@Override public byte[] valueBytes(CacheObjectValueContext ctx) throws IgniteCheckedException {
return data;
}
/** {@inheritDoc} */
@Override public int valueBytesLength(CacheObjectContext ctx) throws IgniteCheckedException {
return data.length;
}
/** {@inheritDoc} */
@Override public boolean putValue(ByteBuffer buf) throws IgniteCheckedException {
buf.put(data);
return true;
}
/** {@inheritDoc} */
@Override public int putValue(long addr) throws IgniteCheckedException {
PageUtils.putBytes(addr, 0, data);
return data.length;
}
/** {@inheritDoc} */
@Override public boolean putValue(ByteBuffer buf, int off, int len) throws IgniteCheckedException {
buf.put(data, off, len);
return true;
}
/** {@inheritDoc} */
@Override public byte cacheObjectType() {
return 42;
}
/** {@inheritDoc} */
@Override public boolean isPlatformType() {
return false;
}
/** {@inheritDoc} */
@Override public CacheObject prepareForCache(CacheObjectContext ctx) {
assert false;
return this;
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(CacheObjectValueContext ctx, ClassLoader ldr)
throws IgniteCheckedException {
assert false;
}
/** {@inheritDoc} */
@Override public void prepareMarshal(CacheObjectValueContext ctx) throws IgniteCheckedException {
assert false;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
assert false;
return false;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
assert false;
return false;
}
/** {@inheritDoc} */
@Override public short directType() {
assert false;
return 0;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
assert false;
return 0;
}
/** {@inheritDoc} */
@Override public void onAckReceived() {
assert false;
}
}
}
| |
// ========================================================================
// Copyright (c) 2010 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.websocket;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Collections;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.http.HttpURI;
import org.eclipse.jetty.io.AbstractConnection;
import org.eclipse.jetty.io.AsyncEndPoint;
import org.eclipse.jetty.io.Buffer;
import org.eclipse.jetty.io.ByteArrayBuffer;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.io.nio.IndirectNIOBuffer;
import org.eclipse.jetty.util.QuotedStringTokenizer;
import org.eclipse.jetty.util.StringUtil;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.websocket.WebSocket.OnFrame;
public class WebSocketConnectionD00 extends AbstractConnection implements WebSocketConnection, WebSocket.FrameConnection
{
private static final Logger LOG = Log.getLogger(WebSocketConnectionD00.class);
public final static byte LENGTH_FRAME=(byte)0x80;
public final static byte SENTINEL_FRAME=(byte)0x00;
final WebSocketParser _parser;
final WebSocketGenerator _generator;
final WebSocket _websocket;
final String _protocol;
String _key1;
String _key2;
ByteArrayBuffer _hixieBytes;
public WebSocketConnectionD00(WebSocket websocket, EndPoint endpoint, WebSocketBuffers buffers, long timestamp, int maxIdleTime, String protocol)
throws IOException
{
super(endpoint,timestamp);
_endp.setMaxIdleTime(maxIdleTime);
_websocket = websocket;
_protocol=protocol;
_generator = new WebSocketGeneratorD00(buffers, _endp);
_parser = new WebSocketParserD00(buffers, endpoint, new FrameHandlerD00(_websocket));
}
/* ------------------------------------------------------------ */
public org.eclipse.jetty.websocket.WebSocket.Connection getConnection()
{
return this;
}
/* ------------------------------------------------------------ */
public void setHixieKeys(String key1,String key2)
{
_key1=key1;
_key2=key2;
_hixieBytes=new IndirectNIOBuffer(16);
}
/* ------------------------------------------------------------ */
public Connection handle() throws IOException
{
try
{
// handle stupid hixie random bytes
if (_hixieBytes!=null)
{
// take any available bytes from the parser buffer, which may have already been read
Buffer buffer=_parser.getBuffer();
if (buffer!=null && buffer.length()>0)
{
int l=buffer.length();
if (l>(8-_hixieBytes.length()))
l=8-_hixieBytes.length();
_hixieBytes.put(buffer.peek(buffer.getIndex(),l));
buffer.skip(l);
}
// while we are not blocked
while(_endp.isOpen())
{
// do we now have enough
if (_hixieBytes.length()==8)
{
// we have the silly random bytes
// so let's work out the stupid 16 byte reply.
doTheHixieHixieShake();
_endp.flush(_hixieBytes);
_hixieBytes=null;
_endp.flush();
break;
}
// no, then let's fill
int filled=_endp.fill(_hixieBytes);
if (filled<0)
{
_endp.close();
break;
}
}
if (_websocket instanceof OnFrame)
((OnFrame)_websocket).onHandshake(this);
_websocket.onOpen(this);
return this;
}
// handle the framing protocol
boolean progress=true;
while (progress)
{
int flushed=_generator.flush();
int filled=_parser.parseNext();
progress = flushed>0 || filled>0;
if (filled<0 || flushed<0)
{
_endp.close();
break;
}
}
}
catch(IOException e)
{
LOG.debug(e);
try
{
_endp.close();
}
catch(IOException e2)
{
LOG.ignore(e2);
}
throw e;
}
finally
{
if (_endp.isOpen())
{
if (_endp.isInputShutdown() && _generator.isBufferEmpty())
_endp.close();
else
checkWriteable();
checkWriteable();
}
}
return this;
}
/* ------------------------------------------------------------ */
public void onInputShutdown() throws IOException
{
// TODO
}
/* ------------------------------------------------------------ */
private void doTheHixieHixieShake()
{
byte[] result=WebSocketConnectionD00.doTheHixieHixieShake(
WebSocketConnectionD00.hixieCrypt(_key1),
WebSocketConnectionD00.hixieCrypt(_key2),
_hixieBytes.asArray());
_hixieBytes.clear();
_hixieBytes.put(result);
}
/* ------------------------------------------------------------ */
public boolean isOpen()
{
return _endp!=null&&_endp.isOpen();
}
/* ------------------------------------------------------------ */
public boolean isIdle()
{
return _parser.isBufferEmpty() && _generator.isBufferEmpty();
}
/* ------------------------------------------------------------ */
public boolean isSuspended()
{
return false;
}
/* ------------------------------------------------------------ */
public void onClose()
{
_websocket.onClose(WebSocketConnectionD06.CLOSE_NORMAL,"");
}
/* ------------------------------------------------------------ */
/**
*/
public void sendMessage(String content) throws IOException
{
byte[] data = content.getBytes(StringUtil.__UTF8);
_generator.addFrame((byte)0,SENTINEL_FRAME,data,0,data.length);
_generator.flush();
checkWriteable();
}
/* ------------------------------------------------------------ */
public void sendMessage(byte[] data, int offset, int length) throws IOException
{
_generator.addFrame((byte)0,LENGTH_FRAME,data,offset,length);
_generator.flush();
checkWriteable();
}
/* ------------------------------------------------------------ */
public boolean isMore(byte flags)
{
return (flags&0x8) != 0;
}
/* ------------------------------------------------------------ */
/**
* {@inheritDoc}
*/
public void sendControl(byte code, byte[] content, int offset, int length) throws IOException
{
}
/* ------------------------------------------------------------ */
public void sendFrame(byte flags,byte opcode, byte[] content, int offset, int length) throws IOException
{
_generator.addFrame((byte)0,opcode,content,offset,length);
_generator.flush();
checkWriteable();
}
/* ------------------------------------------------------------ */
public void close(int code, String message)
{
throw new UnsupportedOperationException();
}
/* ------------------------------------------------------------ */
public void disconnect()
{
close();
}
/* ------------------------------------------------------------ */
public void close()
{
try
{
_generator.flush();
_endp.close();
}
catch(IOException e)
{
LOG.ignore(e);
}
}
/* ------------------------------------------------------------ */
public void fillBuffersFrom(Buffer buffer)
{
_parser.fill(buffer);
}
/* ------------------------------------------------------------ */
private void checkWriteable()
{
if (!_generator.isBufferEmpty() && _endp instanceof AsyncEndPoint)
((AsyncEndPoint)_endp).scheduleWrite();
}
/* ------------------------------------------------------------ */
static long hixieCrypt(String key)
{
// Don't ask me what all this is about.
// I think it's pretend secret stuff, kind of
// like talking in pig latin!
long number=0;
int spaces=0;
for (char c : key.toCharArray())
{
if (Character.isDigit(c))
number=number*10+(c-'0');
else if (c==' ')
spaces++;
}
return number/spaces;
}
public static byte[] doTheHixieHixieShake(long key1,long key2,byte[] key3)
{
try
{
MessageDigest md = MessageDigest.getInstance("MD5");
byte [] fodder = new byte[16];
fodder[0]=(byte)(0xff&(key1>>24));
fodder[1]=(byte)(0xff&(key1>>16));
fodder[2]=(byte)(0xff&(key1>>8));
fodder[3]=(byte)(0xff&key1);
fodder[4]=(byte)(0xff&(key2>>24));
fodder[5]=(byte)(0xff&(key2>>16));
fodder[6]=(byte)(0xff&(key2>>8));
fodder[7]=(byte)(0xff&key2);
System.arraycopy(key3, 0, fodder, 8, 8);
md.update(fodder);
return md.digest();
}
catch (NoSuchAlgorithmException e)
{
throw new IllegalStateException(e);
}
}
public void handshake(HttpServletRequest request, HttpServletResponse response, String subprotocol) throws IOException
{
String uri=request.getRequestURI();
String query=request.getQueryString();
if (query!=null && query.length()>0)
uri+="?"+query;
uri=new HttpURI(uri).toString();
String host=request.getHeader("Host");
String origin=request.getHeader("Sec-WebSocket-Origin");
if (origin==null)
origin=request.getHeader("Origin");
if (origin!=null)
origin= QuotedStringTokenizer.quoteIfNeeded(origin, "\r\n");
String key1 = request.getHeader("Sec-WebSocket-Key1");
if (key1!=null)
{
String key2 = request.getHeader("Sec-WebSocket-Key2");
setHixieKeys(key1,key2);
response.setHeader("Upgrade","WebSocket");
response.addHeader("Connection","Upgrade");
if (origin!=null)
response.addHeader("Sec-WebSocket-Origin",origin);
response.addHeader("Sec-WebSocket-Location",(request.isSecure()?"wss://":"ws://")+host+uri);
if (subprotocol!=null)
response.addHeader("Sec-WebSocket-Protocol",subprotocol);
response.sendError(101,"WebSocket Protocol Handshake");
}
else
{
response.setHeader("Upgrade","WebSocket");
response.addHeader("Connection","Upgrade");
response.addHeader("WebSocket-Origin",origin);
response.addHeader("WebSocket-Location",(request.isSecure()?"wss://":"ws://")+host+uri);
if (subprotocol!=null)
response.addHeader("WebSocket-Protocol",subprotocol);
response.sendError(101,"Web Socket Protocol Handshake");
response.flushBuffer();
if (_websocket instanceof OnFrame)
((OnFrame)_websocket).onHandshake(this);
_websocket.onOpen(this);
}
}
public void setMaxTextMessageSize(int size)
{
}
public void setMaxIdleTime(int ms)
{
try
{
_endp.setMaxIdleTime(ms);
}
catch(IOException e)
{
LOG.warn(e);
}
}
public void setMaxBinaryMessageSize(int size)
{
}
public int getMaxTextMessageSize()
{
return -1;
}
public int getMaxIdleTime()
{
return _endp.getMaxIdleTime();
}
public int getMaxBinaryMessageSize()
{
return -1;
}
public String getProtocol()
{
return _protocol;
}
static class FrameHandlerD00 implements WebSocketParser.FrameHandler
{
final WebSocket _websocket;
FrameHandlerD00(WebSocket websocket)
{
_websocket=websocket;
}
public void onFrame(byte flags, byte opcode, Buffer buffer)
{
try
{
byte[] array=buffer.array();
if (opcode==0)
{
if (_websocket instanceof WebSocket.OnTextMessage)
((WebSocket.OnTextMessage)_websocket).onMessage(buffer.toString(StringUtil.__UTF8));
}
else
{
if (_websocket instanceof WebSocket.OnBinaryMessage)
((WebSocket.OnBinaryMessage)_websocket).onMessage(array,buffer.getIndex(),buffer.length());
}
}
catch(Throwable th)
{
LOG.warn(th);
}
}
public void close(int code,String message)
{
}
}
public boolean isMessageComplete(byte flags)
{
return true;
}
public byte binaryOpcode()
{
return LENGTH_FRAME;
}
public byte textOpcode()
{
return SENTINEL_FRAME;
}
public boolean isControl(byte opcode)
{
return false;
}
public boolean isText(byte opcode)
{
return (opcode&LENGTH_FRAME)==0;
}
public boolean isBinary(byte opcode)
{
return (opcode&LENGTH_FRAME)!=0;
}
public boolean isContinuation(byte opcode)
{
return false;
}
public boolean isClose(byte opcode)
{
return false;
}
public boolean isPing(byte opcode)
{
return false;
}
public boolean isPong(byte opcode)
{
return false;
}
public List<Extension> getExtensions()
{
return Collections.emptyList();
}
public byte continuationOpcode()
{
return 0;
}
public byte finMask()
{
return 0;
}
public void setAllowFrameFragmentation(boolean allowFragmentation)
{
}
public boolean isAllowFrameFragmentation()
{
return false;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonShortcuts;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Factory;
import com.intellij.util.IconUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
/**
* @author dyoma
*/
public abstract class ReorderableListController <T> {
private final JList myList;
private static final Icon REMOVE_ICON = IconUtil.getRemoveIcon();
protected ReorderableListController(final JList list) {
myList = list;
}
public JList getList() {
return myList;
}
public RemoveActionDescription addRemoveAction(final String actionName) {
final RemoveActionDescription description = new RemoveActionDescription(actionName);
addActionDescription(description);
return description;
}
protected abstract void addActionDescription(ActionDescription description);
public AddActionDescription addAddAction(final String actionName, final Factory<? extends T> creator, final boolean createShortcut) {
final AddActionDescription description = new AddActionDescription(actionName, creator, createShortcut);
addActionDescription(description);
return description;
}
public AddMultipleActionDescription addAddMultipleAction(final String actionName, final Factory<? extends Collection<T>> creator, final boolean createShortcut) {
final AddMultipleActionDescription description = new AddMultipleActionDescription(actionName, creator, createShortcut);
addActionDescription(description);
return description;
}
public void addMoveUpAction() {
addAction(new AnAction(UIBundle.message("move.up.action.name"), null, IconUtil.getMoveUpIcon()) {
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
ListUtil.moveSelectedItemsUp(myList);
}
@Override
public void update(@NotNull final AnActionEvent e) {
e.getPresentation().setEnabled(ListUtil.canMoveSelectedItemsUp(myList));
}
});
}
public void addMoveDownAction() {
addAction(new AnAction(UIBundle.message("move.down.action.name"), null, AllIcons.Actions.MoveDown) {
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
ListUtil.moveSelectedItemsDown(myList);
}
@Override
public void update(@NotNull final AnActionEvent e) {
e.getPresentation().setEnabled(ListUtil.canMoveSelectedItemsDown(myList));
}
});
}
public void addAction(final AnAction action) {
addActionDescription(new FixedActionDescription(action));
}
private void handleNewElement(final T element) {
final ListModel listModel = myList.getModel();
if (listModel instanceof SortedListModel) {
((SortedListModel<T>)listModel).add(element);
}
else {
((DefaultListModel)listModel).addElement(element);
}
myList.clearSelection();
ScrollingUtil.selectItem(myList, element);
}
public static <T> ReorderableListController<T> create(final JList list, final DefaultActionGroup actionGroup) {
return new ReorderableListController<T>(list) {
@Override
protected void addActionDescription(final ActionDescription description) {
actionGroup.add(description.createAction(list));
}
};
}
protected static abstract class ActionDescription {
public abstract AnAction createAction(JComponent component);
}
public interface ActionNotification <T> {
void afterActionPerformed(T change);
}
public static abstract class CustomActionDescription <V> extends ActionDescription {
private final ArrayList<ActionNotification<V>> myPostHandlers = new ArrayList<>(1);
private boolean myShowText = false;
public void addPostHandler(final ActionNotification<V> runnable) {
myPostHandlers.add(runnable);
}
protected void runPostHandlers(final V change) {
for (final ActionNotification<V> runnable : myPostHandlers) {
runnable.afterActionPerformed(change);
}
}
@Override
public abstract CustomActionDescription.BaseAction createAction(JComponent component);
BaseAction createAction(final ActionBehaviour behaviour) {
return myShowText ?
new ActionWithText(this, getActionName(), null, getActionIcon(), behaviour) :
new BaseAction(this, getActionName(), null, getActionIcon(), behaviour);
}
protected abstract Icon getActionIcon();
protected abstract String getActionName();
public void setShowText(final boolean showText) {
myShowText = showText;
}
protected static class BaseAction<V> extends DumbAwareAction {
private final ActionBehaviour<? extends V> myBehaviour;
private final CustomActionDescription<? super V> myCustomActionDescription;
public BaseAction(final CustomActionDescription<? super V> customActionDescription,
final String text, final String description, final Icon icon, final ActionBehaviour<? extends V> behaviour) {
super(text, description, icon);
myBehaviour = behaviour;
this.myCustomActionDescription = customActionDescription;
}
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
final V change = myBehaviour.performAction(e);
if (change == null) return;
myCustomActionDescription.runPostHandlers(change);
}
@Override
public void update(@NotNull final AnActionEvent e) {
myBehaviour.updateAction(e);
}
}
private static class ActionWithText<V> extends BaseAction {
ActionWithText(final CustomActionDescription<? super V> customActionDescription, final String text,
final String description,
final Icon icon,
final ActionBehaviour<? extends V> behaviour) {
super(customActionDescription, text, description, icon, behaviour);
}
@Override
public boolean displayTextInToolbar() {
return true;
}
}
}
interface ActionBehaviour<T> {
T performAction(@NotNull AnActionEvent e);
void updateAction(@NotNull AnActionEvent e);
}
public class RemoveActionDescription extends CustomActionDescription<List<T>> {
private final String myActionName;
private Condition<? super List<T>> myConfirmation;
private Condition<? super T> myEnableCondition;
public RemoveActionDescription(final String actionName) {
myActionName = actionName;
}
@Override
public BaseAction createAction(final JComponent component) {
final ActionBehaviour<List<T>> behaviour = new ActionBehaviour<List<T>>() {
@Override
public List<T> performAction(@NotNull final AnActionEvent e) {
if (myConfirmation != null && !myConfirmation.value((List<T>)Arrays.asList(myList.getSelectedValues()))) {
return Collections.emptyList();
}
return ListUtil.removeSelectedItems(myList, myEnableCondition);
}
@Override
public void updateAction(@NotNull final AnActionEvent e) {
e.getPresentation().setEnabled(ListUtil.canRemoveSelectedItems(myList, myEnableCondition));
}
};
final BaseAction action = createAction(behaviour);
action.registerCustomShortcutSet(CommonShortcuts.getDelete(), component);
return action;
}
@Override
protected Icon getActionIcon() {
return REMOVE_ICON;
}
@Override
protected String getActionName() {
return myActionName;
}
public void setConfirmation(final Condition<? super List<T>> confirmation) {
myConfirmation = confirmation;
}
public void setEnableCondition(final Condition<? super T> enableCondition) {
myEnableCondition = enableCondition;
}
public JList getList() {
return myList;
}
}
public abstract static class AddActionDescriptionBase<V> extends CustomActionDescription<V> {
private final String myActionDescription;
private final Factory<? extends V> myAddHandler;
private final boolean myCreateShortcut;
private Icon myIcon = IconUtil.getAddIcon();
public AddActionDescriptionBase(final String actionDescription, final Factory<? extends V> addHandler, final boolean createShortcut) {
myActionDescription = actionDescription;
myAddHandler = addHandler;
myCreateShortcut = createShortcut;
}
@Override
public BaseAction createAction(final JComponent component) {
final ActionBehaviour<V> behaviour = new ActionBehaviour<V>() {
@Override
public V performAction(@NotNull final AnActionEvent e) {
return addInternal(myAddHandler.create());
}
@Override
public void updateAction(@NotNull final AnActionEvent e) {}
};
final BaseAction action = createAction(behaviour);
if (myCreateShortcut) {
action.registerCustomShortcutSet(CommonShortcuts.INSERT, component);
}
return action;
}
@Nullable
protected abstract V addInternal(final V v);
@Override
public Icon getActionIcon() {
return myIcon;
}
@Override
public String getActionName() {
return myActionDescription;
}
public void setIcon(final Icon icon) {
myIcon = icon;
}
}
public class AddActionDescription extends AddActionDescriptionBase<T> {
public AddActionDescription(final String actionDescription, final Factory<? extends T> addHandler, final boolean createShortcut) {
super(actionDescription, addHandler, createShortcut);
}
@Override
protected T addInternal(final T t) {
if (t != null) {
handleNewElement(t);
}
return t;
}
}
public class AddMultipleActionDescription extends AddActionDescriptionBase<Collection<T>> {
public AddMultipleActionDescription(final String actionDescription, final Factory<? extends Collection<T>> addHandler, final boolean createShortcut) {
super(actionDescription, addHandler, createShortcut);
}
@Override
protected Collection<T> addInternal(final Collection<T> t) {
if (t != null) {
for (T element : t) {
handleNewElement(element);
}
}
return t;
}
}
private static class FixedActionDescription extends ActionDescription {
private final AnAction myAction;
FixedActionDescription(final AnAction action) {
myAction = action;
}
@Override
public AnAction createAction(final JComponent component) {
return myAction;
}
}
}
| |
package de.lessvoid.nifty.controls.scrollbar;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Test the ScrollbarImpl.
*
* @author void
*/
public class ScrollbarImplTest {
private ScrollbarImpl scrollbar = new ScrollbarImpl();
private ScrollbarView view;
private float viewSize = 4.f;
@Before
public void before() {
view = createMock(ScrollbarView.class);
expect(view.getAreaSize()).andReturn(4).anyTimes();
expect(view.getMinHandleSize()).andReturn(1).anyTimes();
}
@After
public void after() {
verify(view);
}
@Test
public void testNoEntries() {
view.setHandle(0, 4);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
assertEquals(0.f, scrollbar.getValue());
}
@Test
public void testMaxEqualView() {
view.setHandle(0, 4);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
}
@Test
public void testMaxDoubleView() {
view.setHandle(0, 2);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 0.f, 8.f, viewSize, 1.f, 4.f);
}
@Test
public void testMaxView() {
view.setHandle(0, 1);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 0.f, 16.f, viewSize, 1.f, 4.f);
}
@Test
public void testCurrentValueTooBigValue() {
view.setHandle(0, 4);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 1.f, 4.f, viewSize, 1.f, 4.f);
}
@Test
public void testCurrentValueMaximumValue() {
view.setHandle(0, 4);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 4.f, 4.f, viewSize, 1.f, 4.f);
}
@Test
public void testMovingCurrentValue() {
view.setHandle(2, 1);
view.valueChanged(8.f);
replay(view);
scrollbar.bindToView(view, 8.f, 16.f, viewSize, 1.f, 4.f);
}
@Test
public void testStepUp() {
view.setHandle(0, 2);
view.valueChanged(0.f);
view.setHandle(1, 2);
view.valueChanged(2.f);
replay(view);
scrollbar.bindToView(view, 0.f, 8.f, viewSize, 2.f, 4.f);
scrollbar.stepUp();
}
@Test
public void testStepUpLimit() {
view.setHandle(2, 2);
view.valueChanged(4.f);
view.setHandle(2, 2);
replay(view);
scrollbar.bindToView(view, 8.f, 8.f, viewSize, 1.f, 4.f);
scrollbar.stepUp();
}
@Test
public void testStepDownLimit() {
view.setHandle(0, 2);
view.valueChanged(0.f);
view.setHandle(0, 2);
replay(view);
scrollbar.bindToView(view, 0.f, 8.f, viewSize, 2.f, 4.f);
scrollbar.stepDown();
}
@Test
public void testStepDown() {
view.setHandle(2, 2);
view.valueChanged(4.f);
view.setHandle(1, 2);
view.valueChanged(2.f);
replay(view);
scrollbar.bindToView(view, 4.f, 8.f, viewSize, 2.f, 4.f);
scrollbar.stepDown();
}
@Test
public void testPageUp() {
view.setHandle(0, 2);
view.valueChanged(0.f);
view.setHandle(2, 2);
view.valueChanged(4.f);
replay(view);
scrollbar.bindToView(view, 0.f, 8.f, viewSize, 2.f, 4.f);
scrollbar.stepPageUp();
}
@Test
public void testPageUpLimit() {
view.setHandle(2, 2);
view.valueChanged(4.f);
view.setHandle(2, 2);
replay(view);
scrollbar.bindToView(view, 8.f, 8.f, viewSize, 1.f, 4.f);
scrollbar.stepPageUp();
}
@Test
public void testPageDownLimit() {
view.setHandle(0, 2);
view.valueChanged(0.f);
view.setHandle(0, 2);
replay(view);
scrollbar.bindToView(view, 0.f, 8.f, viewSize, 2.f, 4.f);
scrollbar.stepPageDown();
}
@Test
public void testPageDown() {
view.setHandle(2, 2);
view.valueChanged(4.f);
view.setHandle(0, 2);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 4.f, 8.f, viewSize, 2.f, 4.f);
scrollbar.stepPageDown();
}
@Test
public void testSetValue() {
view.setHandle(0, 4);
view.valueChanged(0.f);
view.setHandle(0, 4);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
scrollbar.setValue(4.f);
assertEquals(0.f, scrollbar.getValue());
}
@Test
public void testSetValueMinLimit() {
view.setHandle(0, 4);
view.valueChanged(0.f);
view.setHandle(0, 4);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
scrollbar.setValue(-4.f);
assertEquals(0.f, scrollbar.getValue());
}
@Test
public void testSetValueMaxLimit() {
view.setHandle(0, 4);
view.valueChanged(0.f);
view.setHandle(0, 4);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
scrollbar.setValue(40.f);
assertEquals(0.f, scrollbar.getValue());
}
@Test
public void testSetup() {
view.setHandle(0, 4);
view.valueChanged(0.f);
view.setHandle(2, 2);
view.valueChanged(4.f);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
scrollbar.setup(4.f, 8.f, viewSize, 2.f, 4.f);
assertEquals(2.f, scrollbar.getButtonStepSize());
assertEquals(4.f, scrollbar.getValue());
assertEquals(4.f, scrollbar.getPageStepSize());
}
@Test
public void testSetMax() {
view.setHandle(0, 4);
view.valueChanged(0.f);
view.setHandle(0, 2);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
scrollbar.setWorldMax(8.f);
assertEquals(8.f, scrollbar.getWorldMax());
}
@Test
public void testSetPageStepSize() {
replay(view);
scrollbar.setPageStepSize(12.f);
assertEquals(12.f, scrollbar.getPageStepSize());
}
@Test
public void testInteractionCantMove() {
view.setHandle(0, 4);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 0.f, 4.f, viewSize, 1.f, 4.f);
scrollbar.interactionClick(3);
}
@Test
public void testPageDownInteractionClick() {
view.setHandle(0, 2);
view.valueChanged(0);
view.setHandle(2, 2);
view.valueChanged(4);
replay(view);
scrollbar.bindToView(view, 0.f, 8.f, viewSize, 1.f, 4.f);
scrollbar.interactionClick(3);
}
@Test
public void testPageUpInteractionClick() {
view.setHandle(2, 2);
view.valueChanged(4.f);
view.setHandle(0, 2);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 4.f, 8.f, viewSize, 1.f, 4.f);
scrollbar.interactionClick(1);
}
@Test
public void testMoveHandle() {
view.setHandle(2, 2);
view.valueChanged(4.f);
view.setHandle(0, 2);
view.valueChanged(0.f);
replay(view);
scrollbar.bindToView(view, 4.f, 8.f, viewSize, 1.f, 4.f);
scrollbar.interactionClick(3);
scrollbar.interactionMove(0);
}
}
| |
/*
* Copyright 2011 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.handler.codec.replay;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBufferFactory;
import org.jboss.netty.buffer.ChannelBufferIndexFinder;
import org.jboss.netty.buffer.ChannelBuffers;
class ReplayingDecoderBuffer implements ChannelBuffer {
private static final Error REPLAY = new ReplayError();
private final ChannelBuffer buffer;
private boolean terminated;
public static ReplayingDecoderBuffer EMPTY_BUFFER = new ReplayingDecoderBuffer(ChannelBuffers.EMPTY_BUFFER);
static {
EMPTY_BUFFER.terminate();
}
ReplayingDecoderBuffer(ChannelBuffer buffer) {
this.buffer = buffer;
}
void terminate() {
terminated = true;
}
public int capacity() {
if (terminated) {
return buffer.capacity();
} else {
return Integer.MAX_VALUE;
}
}
public boolean isDirect() {
return buffer.isDirect();
}
public boolean hasArray() {
return false;
}
public byte[] array() {
throw new UnsupportedOperationException();
}
public int arrayOffset() {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnreplayableOperationException();
}
@Override
public boolean equals(Object obj) {
return this == obj;
}
public int compareTo(ChannelBuffer buffer) {
throw new UnreplayableOperationException();
}
public ChannelBuffer copy() {
throw new UnreplayableOperationException();
}
public ChannelBuffer copy(int index, int length) {
checkIndex(index, length);
return buffer.copy(index, length);
}
public void discardReadBytes() {
throw new UnreplayableOperationException();
}
public void ensureWritableBytes(int writableBytes) {
throw new UnreplayableOperationException();
}
public ChannelBuffer duplicate() {
throw new UnreplayableOperationException();
}
public byte getByte(int index) {
checkIndex(index);
return buffer.getByte(index);
}
public short getUnsignedByte(int index) {
checkIndex(index);
return buffer.getUnsignedByte(index);
}
public void getBytes(int index, byte[] dst, int dstIndex, int length) {
checkIndex(index, length);
buffer.getBytes(index, dst, dstIndex, length);
}
public void getBytes(int index, byte[] dst) {
checkIndex(index, dst.length);
buffer.getBytes(index, dst);
}
public void getBytes(int index, ByteBuffer dst) {
throw new UnreplayableOperationException();
}
public void getBytes(int index, ChannelBuffer dst, int dstIndex, int length) {
checkIndex(index, length);
buffer.getBytes(index, dst, dstIndex, length);
}
public void getBytes(int index, ChannelBuffer dst, int length) {
throw new UnreplayableOperationException();
}
public void getBytes(int index, ChannelBuffer dst) {
throw new UnreplayableOperationException();
}
public int getBytes(int index, GatheringByteChannel out, int length)
throws IOException {
throw new UnreplayableOperationException();
}
public void getBytes(int index, OutputStream out, int length)
throws IOException {
throw new UnreplayableOperationException();
}
public int getInt(int index) {
checkIndex(index, 4);
return buffer.getInt(index);
}
public long getUnsignedInt(int index) {
checkIndex(index, 4);
return buffer.getUnsignedInt(index);
}
public long getLong(int index) {
checkIndex(index, 8);
return buffer.getLong(index);
}
public int getMedium(int index) {
checkIndex(index, 3);
return buffer.getMedium(index);
}
public int getUnsignedMedium(int index) {
checkIndex(index, 3);
return buffer.getUnsignedMedium(index);
}
public short getShort(int index) {
checkIndex(index, 2);
return buffer.getShort(index);
}
public int getUnsignedShort(int index) {
checkIndex(index, 2);
return buffer.getUnsignedShort(index);
}
public char getChar(int index) {
checkIndex(index, 2);
return buffer.getChar(index);
}
public float getFloat(int index) {
checkIndex(index, 4);
return buffer.getFloat(index);
}
public double getDouble(int index) {
checkIndex(index, 8);
return buffer.getDouble(index);
}
@Override
public int hashCode() {
throw new UnreplayableOperationException();
}
public int indexOf(int fromIndex, int toIndex, byte value) {
int endIndex = buffer.indexOf(fromIndex, toIndex, value);
if (endIndex < 0) {
throw REPLAY;
}
return endIndex;
}
public int indexOf(int fromIndex, int toIndex,
ChannelBufferIndexFinder indexFinder) {
int endIndex = buffer.indexOf(fromIndex, toIndex, indexFinder);
if (endIndex < 0) {
throw REPLAY;
}
return endIndex;
}
public int bytesBefore(byte value) {
int bytes = buffer.bytesBefore(value);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
public int bytesBefore(ChannelBufferIndexFinder indexFinder) {
int bytes = buffer.bytesBefore(indexFinder);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
public int bytesBefore(int length, byte value) {
checkReadableBytes(length);
int bytes = buffer.bytesBefore(length, value);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
public int bytesBefore(int length, ChannelBufferIndexFinder indexFinder) {
checkReadableBytes(length);
int bytes = buffer.bytesBefore(length, indexFinder);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
public int bytesBefore(int index, int length, byte value) {
int bytes = buffer.bytesBefore(index, length, value);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
public int bytesBefore(int index, int length,
ChannelBufferIndexFinder indexFinder) {
int bytes = buffer.bytesBefore(index, length, indexFinder);
if (bytes < 0) {
throw REPLAY;
}
return bytes;
}
public void markReaderIndex() {
buffer.markReaderIndex();
}
public void markWriterIndex() {
throw new UnreplayableOperationException();
}
public ChannelBufferFactory factory() {
return buffer.factory();
}
public ByteOrder order() {
return buffer.order();
}
public boolean readable() {
return terminated? buffer.readable() : true;
}
public int readableBytes() {
if (terminated) {
return buffer.readableBytes();
} else {
return Integer.MAX_VALUE - buffer.readerIndex();
}
}
public byte readByte() {
checkReadableBytes(1);
return buffer.readByte();
}
public short readUnsignedByte() {
checkReadableBytes(1);
return buffer.readUnsignedByte();
}
public void readBytes(byte[] dst, int dstIndex, int length) {
checkReadableBytes(length);
buffer.readBytes(dst, dstIndex, length);
}
public void readBytes(byte[] dst) {
checkReadableBytes(dst.length);
buffer.readBytes(dst);
}
public void readBytes(ByteBuffer dst) {
throw new UnreplayableOperationException();
}
public void readBytes(ChannelBuffer dst, int dstIndex, int length) {
checkReadableBytes(length);
buffer.readBytes(dst, dstIndex, length);
}
public void readBytes(ChannelBuffer dst, int length) {
throw new UnreplayableOperationException();
}
public void readBytes(ChannelBuffer dst) {
throw new UnreplayableOperationException();
}
@Deprecated
public ChannelBuffer readBytes(ChannelBufferIndexFinder endIndexFinder) {
int endIndex = buffer.indexOf(buffer.readerIndex(), buffer.writerIndex(), endIndexFinder);
if (endIndex < 0) {
throw REPLAY;
}
return buffer.readBytes(endIndex - buffer.readerIndex());
}
public int readBytes(GatheringByteChannel out, int length)
throws IOException {
throw new UnreplayableOperationException();
}
public ChannelBuffer readBytes(int length) {
checkReadableBytes(length);
return buffer.readBytes(length);
}
@Deprecated
public ChannelBuffer readSlice(
ChannelBufferIndexFinder endIndexFinder) {
int endIndex = buffer.indexOf(buffer.readerIndex(), buffer.writerIndex(), endIndexFinder);
if (endIndex < 0) {
throw REPLAY;
}
return buffer.readSlice(endIndex - buffer.readerIndex());
}
public ChannelBuffer readSlice(int length) {
checkReadableBytes(length);
return buffer.readSlice(length);
}
public void readBytes(OutputStream out, int length) throws IOException {
throw new UnreplayableOperationException();
}
public int readerIndex() {
return buffer.readerIndex();
}
public void readerIndex(int readerIndex) {
buffer.readerIndex(readerIndex);
}
public int readInt() {
checkReadableBytes(4);
return buffer.readInt();
}
public long readUnsignedInt() {
checkReadableBytes(4);
return buffer.readUnsignedInt();
}
public long readLong() {
checkReadableBytes(8);
return buffer.readLong();
}
public int readMedium() {
checkReadableBytes(3);
return buffer.readMedium();
}
public int readUnsignedMedium() {
checkReadableBytes(3);
return buffer.readUnsignedMedium();
}
public short readShort() {
checkReadableBytes(2);
return buffer.readShort();
}
public int readUnsignedShort() {
checkReadableBytes(2);
return buffer.readUnsignedShort();
}
public char readChar() {
checkReadableBytes(2);
return buffer.readChar();
}
public float readFloat() {
checkReadableBytes(4);
return buffer.readFloat();
}
public double readDouble() {
checkReadableBytes(8);
return buffer.readDouble();
}
public void resetReaderIndex() {
buffer.resetReaderIndex();
}
public void resetWriterIndex() {
throw new UnreplayableOperationException();
}
public void setByte(int index, int value) {
throw new UnreplayableOperationException();
}
public void setBytes(int index, byte[] src, int srcIndex, int length) {
throw new UnreplayableOperationException();
}
public void setBytes(int index, byte[] src) {
throw new UnreplayableOperationException();
}
public void setBytes(int index, ByteBuffer src) {
throw new UnreplayableOperationException();
}
public void setBytes(int index, ChannelBuffer src, int srcIndex, int length) {
throw new UnreplayableOperationException();
}
public void setBytes(int index, ChannelBuffer src, int length) {
throw new UnreplayableOperationException();
}
public void setBytes(int index, ChannelBuffer src) {
throw new UnreplayableOperationException();
}
public int setBytes(int index, InputStream in, int length)
throws IOException {
throw new UnreplayableOperationException();
}
public void setZero(int index, int length) {
throw new UnreplayableOperationException();
}
public int setBytes(int index, ScatteringByteChannel in, int length)
throws IOException {
throw new UnreplayableOperationException();
}
public void setIndex(int readerIndex, int writerIndex) {
throw new UnreplayableOperationException();
}
public void setInt(int index, int value) {
throw new UnreplayableOperationException();
}
public void setLong(int index, long value) {
throw new UnreplayableOperationException();
}
public void setMedium(int index, int value) {
throw new UnreplayableOperationException();
}
public void setShort(int index, int value) {
throw new UnreplayableOperationException();
}
public void setChar(int index, int value) {
throw new UnreplayableOperationException();
}
public void setFloat(int index, float value) {
throw new UnreplayableOperationException();
}
public void setDouble(int index, double value) {
throw new UnreplayableOperationException();
}
@Deprecated
public int skipBytes(ChannelBufferIndexFinder firstIndexFinder) {
int oldReaderIndex = buffer.readerIndex();
int newReaderIndex = buffer.indexOf(oldReaderIndex, buffer.writerIndex(), firstIndexFinder);
if (newReaderIndex < 0) {
throw REPLAY;
}
buffer.readerIndex(newReaderIndex);
return newReaderIndex - oldReaderIndex;
}
public void skipBytes(int length) {
checkReadableBytes(length);
buffer.skipBytes(length);
}
public ChannelBuffer slice() {
throw new UnreplayableOperationException();
}
public ChannelBuffer slice(int index, int length) {
checkIndex(index, length);
return buffer.slice(index, length);
}
public ByteBuffer toByteBuffer() {
throw new UnreplayableOperationException();
}
public ByteBuffer toByteBuffer(int index, int length) {
checkIndex(index, length);
return buffer.toByteBuffer(index, length);
}
public ByteBuffer[] toByteBuffers() {
throw new UnreplayableOperationException();
}
public ByteBuffer[] toByteBuffers(int index, int length) {
checkIndex(index, length);
return buffer.toByteBuffers(index, length);
}
public String toString(int index, int length, Charset charset) {
checkIndex(index, length);
return buffer.toString(index, length, charset);
}
public String toString(Charset charsetName) {
throw new UnreplayableOperationException();
}
@Deprecated
public String toString(int index, int length, String charsetName) {
checkIndex(index, length);
return buffer.toString(index, length, charsetName);
}
@Deprecated
public String toString(
int index, int length, String charsetName,
ChannelBufferIndexFinder terminatorFinder) {
checkIndex(index, length);
return buffer.toString(index, length, charsetName, terminatorFinder);
}
@Deprecated
public String toString(String charsetName) {
throw new UnreplayableOperationException();
}
@Deprecated
public String toString(
String charsetName, ChannelBufferIndexFinder terminatorFinder) {
throw new UnreplayableOperationException();
}
@Override
public String toString() {
return getClass().getSimpleName() + '(' +
"ridx=" +
readerIndex() +
", " +
"widx=" +
writerIndex() +
')';
}
public boolean writable() {
return false;
}
public int writableBytes() {
return 0;
}
public void writeByte(int value) {
throw new UnreplayableOperationException();
}
public void writeBytes(byte[] src, int srcIndex, int length) {
throw new UnreplayableOperationException();
}
public void writeBytes(byte[] src) {
throw new UnreplayableOperationException();
}
public void writeBytes(ByteBuffer src) {
throw new UnreplayableOperationException();
}
public void writeBytes(ChannelBuffer src, int srcIndex, int length) {
throw new UnreplayableOperationException();
}
public void writeBytes(ChannelBuffer src, int length) {
throw new UnreplayableOperationException();
}
public void writeBytes(ChannelBuffer src) {
throw new UnreplayableOperationException();
}
public int writeBytes(InputStream in, int length) throws IOException {
throw new UnreplayableOperationException();
}
public int writeBytes(ScatteringByteChannel in, int length)
throws IOException {
throw new UnreplayableOperationException();
}
public void writeInt(int value) {
throw new UnreplayableOperationException();
}
public void writeLong(long value) {
throw new UnreplayableOperationException();
}
public void writeMedium(int value) {
throw new UnreplayableOperationException();
}
public void writeZero(int length) {
throw new UnreplayableOperationException();
}
public int writerIndex() {
return buffer.writerIndex();
}
public void writerIndex(int writerIndex) {
throw new UnreplayableOperationException();
}
public void writeShort(int value) {
throw new UnreplayableOperationException();
}
public void writeChar(int value) {
throw new UnreplayableOperationException();
}
public void writeFloat(float value) {
throw new UnreplayableOperationException();
}
public void writeDouble(double value) {
throw new UnreplayableOperationException();
}
private void checkIndex(int index) {
if (index > buffer.writerIndex()) {
throw REPLAY;
}
}
private void checkIndex(int index, int length) {
if (index + length > buffer.writerIndex()) {
throw REPLAY;
}
}
private void checkReadableBytes(int readableBytes) {
if (buffer.readableBytes() < readableBytes) {
throw REPLAY;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker;
import java.net.URI;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
import org.apache.activemq.broker.region.Destination;
import org.apache.activemq.broker.region.MessageReference;
import org.apache.activemq.broker.region.Subscription;
import org.apache.activemq.broker.region.virtual.VirtualDestination;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.BrokerId;
import org.apache.activemq.command.BrokerInfo;
import org.apache.activemq.command.ConnectionInfo;
import org.apache.activemq.command.ConsumerControl;
import org.apache.activemq.command.ConsumerInfo;
import org.apache.activemq.command.DestinationInfo;
import org.apache.activemq.command.Message;
import org.apache.activemq.command.MessageAck;
import org.apache.activemq.command.MessageDispatch;
import org.apache.activemq.command.MessageDispatchNotification;
import org.apache.activemq.command.MessagePull;
import org.apache.activemq.command.ProducerInfo;
import org.apache.activemq.command.RemoveSubscriptionInfo;
import org.apache.activemq.command.Response;
import org.apache.activemq.command.SessionInfo;
import org.apache.activemq.command.TransactionId;
import org.apache.activemq.store.PListStore;
import org.apache.activemq.thread.Scheduler;
import org.apache.activemq.usage.Usage;
/**
* Implementation of the broker where all it's methods throw an
* BrokerStoppedException.
*
*
*/
public class ErrorBroker implements Broker {
private final String message;
public ErrorBroker(String message) {
this.message = message;
}
@Override
public Map<ActiveMQDestination, Destination> getDestinationMap() {
return Collections.emptyMap();
}
@Override
public Map<ActiveMQDestination, Destination> getDestinationMap(ActiveMQDestination destination) {
return Collections.emptyMap();
}
@Override
public Set<Destination> getDestinations(ActiveMQDestination destination) {
return Collections.emptySet();
}
@Override
public Broker getAdaptor(Class<?> type) {
return type.isInstance(this) ? this : null;
}
@Override
public BrokerId getBrokerId() {
throw new BrokerStoppedException(this.message);
}
@Override
public String getBrokerName() {
throw new BrokerStoppedException(this.message);
}
@Override
public void addConnection(ConnectionContext context, ConnectionInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeConnection(ConnectionContext context, ConnectionInfo info, Throwable error) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void addSession(ConnectionContext context, SessionInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeSession(ConnectionContext context, SessionInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void addProducer(ConnectionContext context, ProducerInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeProducer(ConnectionContext context, ProducerInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public Connection[] getClients() throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public ActiveMQDestination[] getDestinations() throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public TransactionId[] getPreparedTransactions(ConnectionContext context) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void beginTransaction(ConnectionContext context, TransactionId xid) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public int prepareTransaction(ConnectionContext context, TransactionId xid) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void rollbackTransaction(ConnectionContext context, TransactionId xid) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void commitTransaction(ConnectionContext context, TransactionId xid, boolean onePhase) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void forgetTransaction(ConnectionContext context, TransactionId transactionId) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public Destination addDestination(ConnectionContext context, ActiveMQDestination destination,boolean flag) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeDestination(ConnectionContext context, ActiveMQDestination destination, long timeout) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public Subscription addConsumer(ConnectionContext context, ConsumerInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeConsumer(ConnectionContext context, ConsumerInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeSubscription(ConnectionContext context, RemoveSubscriptionInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void send(ProducerBrokerExchange producerExchange, Message message) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void acknowledge(ConsumerBrokerExchange consumerExchange, MessageAck ack) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void gc() {
throw new BrokerStoppedException(this.message);
}
@Override
public void start() throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void stop() throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void addBroker(Connection connection, BrokerInfo info) {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeBroker(Connection connection, BrokerInfo info) {
throw new BrokerStoppedException(this.message);
}
@Override
public BrokerInfo[] getPeerBrokerInfos() {
throw new BrokerStoppedException(this.message);
}
@Override
public void preProcessDispatch(MessageDispatch messageDispatch) {
throw new BrokerStoppedException(this.message);
}
@Override
public void postProcessDispatch(MessageDispatch messageDispatch) {
throw new BrokerStoppedException(this.message);
}
@Override
public void processDispatchNotification(MessageDispatchNotification messageDispatchNotification) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public boolean isStopped() {
return true;
}
@Override
public Set<ActiveMQDestination> getDurableDestinations() {
throw new BrokerStoppedException(this.message);
}
@Override
public void addDestinationInfo(ConnectionContext context, DestinationInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public void removeDestinationInfo(ConnectionContext context, DestinationInfo info) throws Exception {
throw new BrokerStoppedException(this.message);
}
@Override
public boolean isFaultTolerantConfiguration() {
throw new BrokerStoppedException(this.message);
}
@Override
public ConnectionContext getAdminConnectionContext() {
throw new BrokerStoppedException(this.message);
}
@Override
public void setAdminConnectionContext(ConnectionContext adminConnectionContext) {
throw new BrokerStoppedException(this.message);
}
@Override
public Response messagePull(ConnectionContext context, MessagePull pull) {
throw new BrokerStoppedException(this.message);
}
@Override
public PListStore getTempDataStore() {
throw new BrokerStoppedException(this.message);
}
@Override
public URI getVmConnectorURI() {
throw new BrokerStoppedException(this.message);
}
@Override
public void brokerServiceStarted() {
throw new BrokerStoppedException(this.message);
}
@Override
public BrokerService getBrokerService() {
throw new BrokerStoppedException(this.message);
}
@Override
public boolean isExpired(MessageReference messageReference) {
throw new BrokerStoppedException(this.message);
}
@Override
public void messageExpired(ConnectionContext context, MessageReference message, Subscription subscription) {
throw new BrokerStoppedException(this.message);
}
@Override
public boolean sendToDeadLetterQueue(ConnectionContext context, MessageReference messageReference,
Subscription subscription, Throwable poisonCause) {
throw new BrokerStoppedException(this.message);
}
@Override
public Broker getRoot() {
throw new BrokerStoppedException(this.message);
}
@Override
public long getBrokerSequenceId() {
throw new BrokerStoppedException(this.message);
}
@Override
public void fastProducer(ConnectionContext context,ProducerInfo producerInfo,ActiveMQDestination destination) {
throw new BrokerStoppedException(this.message);
}
@Override
public void isFull(ConnectionContext context,Destination destination, Usage<?> usage) {
throw new BrokerStoppedException(this.message);
}
@Override
public void messageConsumed(ConnectionContext context,MessageReference messageReference) {
throw new BrokerStoppedException(this.message);
}
@Override
public void messageDelivered(ConnectionContext context,MessageReference messageReference) {
throw new BrokerStoppedException(this.message);
}
@Override
public void messageDiscarded(ConnectionContext context, Subscription sub, MessageReference messageReference) {
throw new BrokerStoppedException(this.message);
}
@Override
public void slowConsumer(ConnectionContext context, Destination destination,Subscription subs) {
throw new BrokerStoppedException(this.message);
}
@Override
public void virtualDestinationAdded(ConnectionContext context,
VirtualDestination virtualDestination) {
throw new BrokerStoppedException(this.message);
}
@Override
public void virtualDestinationRemoved(ConnectionContext context,
VirtualDestination virtualDestination) {
throw new BrokerStoppedException(this.message);
}
@Override
public void nowMasterBroker() {
throw new BrokerStoppedException(this.message);
}
@Override
public void processConsumerControl(ConsumerBrokerExchange consumerExchange,
ConsumerControl control) {
throw new BrokerStoppedException(this.message);
}
@Override
public void reapplyInterceptor() {
throw new BrokerStoppedException(this.message);
}
@Override
public Scheduler getScheduler() {
throw new BrokerStoppedException(this.message);
}
@Override
public ThreadPoolExecutor getExecutor() {
throw new BrokerStoppedException(this.message);
}
@Override
public void networkBridgeStarted(BrokerInfo brokerInfo, boolean createdByDuplex, String remoteIp) {
throw new BrokerStoppedException(this.message);
}
@Override
public void networkBridgeStopped(BrokerInfo brokerInfo) {
throw new BrokerStoppedException(this.message);
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.sesame.marketdata.scenarios;
/**
* <p/>
* Please see distribution for license.
*/
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve;
import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderDiscount;
import com.opengamma.sesame.MulticurveBundle;
import com.opengamma.sesame.marketdata.MarketDataId;
import com.opengamma.sesame.marketdata.MulticurveId;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableConstructor;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* Filter that matches curves in a {@link MulticurveBundle} based on whether they are curves used for forecasting.
*/
@BeanDefinition
public final class AllForecastingCurvesFilter implements MarketDataFilter, ImmutableBean {
@ImmutableConstructor
public AllForecastingCurvesFilter() {
}
@Override
public Set<MulticurveMatchDetails> apply(MarketDataId<?> marketDataId) {
MulticurveId id = (MulticurveId) marketDataId;
MulticurveMetadata metadata = MulticurveMetadata.forConfiguration(id.resolveConfig());
Set<MulticurveMatchDetails> filter = Sets.newHashSet();
Set<String> curves = metadata.getForecastingCurveNames();
for (String curveName : curves) {
filter.add(StandardMatchDetails.multicurve(curveName));
}
return ImmutableSet.copyOf(filter);
}
@Override
public Set<MulticurveMatchDetails> apply(MarketDataId<?> marketDataId, Object marketData) {
final Set<MulticurveMatchDetails> result = Sets.newHashSet();
MulticurveBundle multicurve = (MulticurveBundle) marketData;
MulticurveProviderDiscount provider = multicurve.getMulticurveProvider();
for (YieldAndDiscountCurve curve : provider.getForwardIborCurves().values()) {
result.add(StandardMatchDetails.multicurve(curve.getName()));
}
for (YieldAndDiscountCurve curve : provider.getForwardONCurves().values()) {
result.add(StandardMatchDetails.multicurve(curve.getName()));
}
return result;
}
@Override
public Class<?> getMarketDataType() {
return MulticurveBundle.class;
}
@Override
public Class<MulticurveId> getMarketDataIdType() {
return MulticurveId.class;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code AllForecastingCurvesFilter}.
* @return the meta-bean, not null
*/
public static AllForecastingCurvesFilter.Meta meta() {
return AllForecastingCurvesFilter.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(AllForecastingCurvesFilter.Meta.INSTANCE);
}
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static AllForecastingCurvesFilter.Builder builder() {
return new AllForecastingCurvesFilter.Builder();
}
@Override
public AllForecastingCurvesFilter.Meta metaBean() {
return AllForecastingCurvesFilter.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
return true;
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(32);
buf.append("AllForecastingCurvesFilter{");
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code AllForecastingCurvesFilter}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null);
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
public AllForecastingCurvesFilter.Builder builder() {
return new AllForecastingCurvesFilter.Builder();
}
@Override
public Class<? extends AllForecastingCurvesFilter> beanType() {
return AllForecastingCurvesFilter.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code AllForecastingCurvesFilter}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<AllForecastingCurvesFilter> {
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
throw new NoSuchElementException("Unknown property: " + propertyName);
}
@Override
public Builder set(String propertyName, Object newValue) {
throw new NoSuchElementException("Unknown property: " + propertyName);
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public AllForecastingCurvesFilter build() {
return new AllForecastingCurvesFilter();
}
//-----------------------------------------------------------------------
@Override
public String toString() {
return "AllForecastingCurvesFilter.Builder{}";
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.jvm.java;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleParams;
import com.facebook.buck.core.rules.common.BuildRules;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.util.immutables.BuckStyleValueWithBuilder;
import com.facebook.buck.jvm.core.HasJavaAbi;
import com.facebook.buck.jvm.core.JavaAbis;
import com.facebook.buck.jvm.java.JarBuildStepsFactory.JavaDependencyInfo;
import com.facebook.buck.util.stream.RichStream;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import java.util.Objects;
import org.immutables.builder.Builder;
import org.immutables.value.Value;
@BuckStyleValueWithBuilder
abstract class DefaultJavaLibraryClasspaths {
@Builder.Parameter
abstract ActionGraphBuilder getActionGraphBuilder();
abstract BuildRuleParams getBuildRuleParams();
abstract JavaLibraryDeps getDeps();
@Value.Default
public CompileAgainstLibraryType getCompileAgainstLibraryType() {
return CompileAgainstLibraryType.FULL;
}
@Value.Default
public boolean shouldCreateSourceOnlyAbi() {
return false;
}
@Value.Lazy
public ImmutableSortedSet<BuildRule> getFirstOrderPackageableDeps() {
if (shouldCreateSourceOnlyAbi()) {
// Nothing is packaged based on a source ABI rule
return ImmutableSortedSet.of();
}
return getDeps().getDeps();
}
@Value.Lazy
protected ImmutableSortedSet<SourcePath> getCompileTimeClasspathSourcePaths() {
return getCompileTimeClasspathDeps().stream()
.map(BuildRule::getSourcePathToOutput)
.filter(Objects::nonNull)
.collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural()));
}
ImmutableSortedSet<BuildRule> getCompileTimeClasspathDeps() {
ImmutableSortedSet<BuildRule> buildRules;
switch (getCompileAgainstLibraryType()) {
case FULL:
buildRules = getCompileTimeClasspathFullDeps();
break;
case ABI:
case SOURCE_ONLY_ABI:
buildRules = getCompileTimeClasspathAbiDeps();
break;
default:
throw new IllegalStateException();
}
return buildRules;
}
@Value.Lazy
protected ImmutableSortedSet<BuildRule> getCompileTimeClasspathFullDeps() {
return getCompileTimeClasspathUnfilteredFullDeps().stream()
.filter(dep -> dep instanceof HasJavaAbi)
.collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural()));
}
@Value.Lazy
protected ImmutableSortedSet<BuildRule> getCompileTimeClasspathAbiDeps() {
if (getCompileAgainstLibraryType() == CompileAgainstLibraryType.SOURCE_ONLY_ABI) {
return getCompileTimeClasspathSourceOnlyAbiDeps();
}
Iterable<BuildRule> classpathFullDeps = getCompileTimeClasspathFullDeps();
if (shouldCreateSourceOnlyAbi()) {
classpathFullDeps =
Iterables.concat(
rulesRequiredForSourceOnlyAbi(classpathFullDeps), getDeps().getSourceOnlyAbiDeps());
}
return JavaLibraryRules.getAbiRules(getActionGraphBuilder(), classpathFullDeps);
}
private ImmutableSortedSet<BuildRule> getCompileTimeClasspathSourceOnlyAbiDeps() {
Iterable<BuildRule> classpathFullDeps = getCompileTimeClasspathFullDeps();
if (shouldCreateSourceOnlyAbi()) {
classpathFullDeps =
Iterables.concat(
rulesRequiredForSourceOnlyAbi(classpathFullDeps), getDeps().getSourceOnlyAbiDeps());
}
return JavaLibraryRules.getSourceOnlyAbiRules(getActionGraphBuilder(), classpathFullDeps);
}
@Value.Lazy
protected Iterable<BuildRule> getCompileTimeFirstOrderDeps() {
return Iterables.concat(
getDeps().getDeps(),
Objects.requireNonNull(getDeps()).getProvidedDeps(),
Objects.requireNonNull(getDeps()).getExportedProvidedDeps());
}
@Value.Lazy
protected ImmutableSet<BuildRule> getCompileTimeClasspathUnfilteredFullDeps() {
Iterable<BuildRule> firstOrderDeps = getCompileTimeFirstOrderDeps();
ImmutableSet<BuildRule> rulesExportedByDependencies =
BuildRules.getUnsortedExportedRules(firstOrderDeps);
return RichStream.from(Iterables.concat(firstOrderDeps, rulesExportedByDependencies))
.collect(ImmutableSet.toImmutableSet());
}
@Value.Lazy
DefaultJavaLibraryClasspaths getSourceOnlyAbiClasspaths() {
if (shouldCreateSourceOnlyAbi()) {
return this;
}
return ImmutableDefaultJavaLibraryClasspaths.builder()
.from(this)
.setShouldCreateSourceOnlyAbi(true)
.build();
}
private Iterable<BuildRule> rulesRequiredForSourceOnlyAbi(Iterable<BuildRule> rules) {
return RichStream.from(rules)
.filter(
rule -> {
if (rule instanceof MaybeRequiredForSourceOnlyAbi) {
MaybeRequiredForSourceOnlyAbi maybeRequired = (MaybeRequiredForSourceOnlyAbi) rule;
return maybeRequired.getRequiredForSourceOnlyAbi();
}
return false;
})
.toOnceIterable();
}
private ImmutableSortedMap<BuildTarget, BuildRule> toLibraryTargetKeyedMap(
Iterable<BuildRule> rules) {
return RichStream.from(rules)
.collect(
ImmutableSortedMap.toImmutableSortedMap(
Ordering.natural(), this::toLibraryTarget, rule -> rule));
}
private BuildTarget toLibraryTarget(BuildRule rule) {
return JavaAbis.isLibraryTarget(rule.getBuildTarget())
? rule.getBuildTarget()
: JavaAbis.getLibraryTarget(rule.getBuildTarget());
}
@Value.Lazy
public ImmutableList<JavaDependencyInfo> getDependencyInfos() {
ImmutableList.Builder<JavaDependencyInfo> builder = ImmutableList.builder();
ImmutableSortedMap<BuildTarget, BuildRule> abiDeps =
toLibraryTargetKeyedMap(getCompileTimeClasspathAbiDeps());
ImmutableSortedMap<BuildTarget, BuildRule> sourceOnlyAbiDeps =
toLibraryTargetKeyedMap(getSourceOnlyAbiClasspaths().getCompileTimeClasspathDeps());
for (BuildRule compileTimeDep : getCompileTimeClasspathDeps()) {
Preconditions.checkState(compileTimeDep instanceof HasJavaAbi);
BuildTarget compileTimeDepLibraryTarget = toLibraryTarget(compileTimeDep);
boolean requiredForSourceOnlyAbi = sourceOnlyAbiDeps.containsKey(compileTimeDepLibraryTarget);
boolean isAbiDep = abiDeps.containsKey(compileTimeDepLibraryTarget);
SourcePath compileTimeSourcePath = compileTimeDep.getSourcePathToOutput();
// Some deps might not actually contain any source files. In that case, they have no output.
// Just skip them.
if (compileTimeSourcePath == null) {
continue;
}
SourcePath abiClasspath;
if (isAbiDep) {
abiClasspath =
Objects.requireNonNull(
abiDeps.get(compileTimeDepLibraryTarget).getSourcePathToOutput());
} else {
abiClasspath = compileTimeSourcePath;
}
builder.add(
new JavaDependencyInfo(compileTimeSourcePath, abiClasspath, requiredForSourceOnlyAbi));
}
return builder.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.client.consumer.store;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.client.log.ClientLogger;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.UtilAll;
import org.apache.rocketmq.common.help.FAQUrl;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.slf4j.Logger;
/**
* Local storage implementation
*/
public class LocalFileOffsetStore implements OffsetStore {
public final static String LOCAL_OFFSET_STORE_DIR = System.getProperty(
"rocketmq.client.localOffsetStoreDir",
System.getProperty("user.home") + File.separator + ".rocketmq_offsets");
private final static Logger log = ClientLogger.getLog();
private final MQClientInstance mQClientFactory;
private final String groupName;
private final String storePath;
private ConcurrentHashMap<MessageQueue, AtomicLong> offsetTable =
new ConcurrentHashMap<MessageQueue, AtomicLong>();
public LocalFileOffsetStore(MQClientInstance mQClientFactory, String groupName) {
this.mQClientFactory = mQClientFactory;
this.groupName = groupName;
this.storePath = LOCAL_OFFSET_STORE_DIR + File.separator + //
this.mQClientFactory.getClientId() + File.separator + //
this.groupName + File.separator + //
"offsets.json";
}
@Override
public void load() throws MQClientException {
OffsetSerializeWrapper offsetSerializeWrapper = this.readLocalOffset();
if (offsetSerializeWrapper != null && offsetSerializeWrapper.getOffsetTable() != null) {
offsetTable.putAll(offsetSerializeWrapper.getOffsetTable());
for (MessageQueue mq : offsetSerializeWrapper.getOffsetTable().keySet()) {
AtomicLong offset = offsetSerializeWrapper.getOffsetTable().get(mq);
log.info("load consumer's offset, {} {} {}",
this.groupName,
mq,
offset.get());
}
}
}
@Override
public void updateOffset(MessageQueue mq, long offset, boolean increaseOnly) {
if (mq != null) {
AtomicLong offsetOld = this.offsetTable.get(mq);
if (null == offsetOld) {
offsetOld = this.offsetTable.putIfAbsent(mq, new AtomicLong(offset));
}
if (null != offsetOld) {
if (increaseOnly) {
MixAll.compareAndIncreaseOnly(offsetOld, offset);
} else {
offsetOld.set(offset);
}
}
}
}
@Override
public long readOffset(final MessageQueue mq, final ReadOffsetType type) {
if (mq != null) {
switch (type) {
case MEMORY_FIRST_THEN_STORE:
case READ_FROM_MEMORY: {
AtomicLong offset = this.offsetTable.get(mq);
if (offset != null) {
return offset.get();
} else if (ReadOffsetType.READ_FROM_MEMORY == type) {
return -1;
}
}
case READ_FROM_STORE: {
OffsetSerializeWrapper offsetSerializeWrapper;
try {
offsetSerializeWrapper = this.readLocalOffset();
} catch (MQClientException e) {
return -1;
}
if (offsetSerializeWrapper != null && offsetSerializeWrapper.getOffsetTable() != null) {
AtomicLong offset = offsetSerializeWrapper.getOffsetTable().get(mq);
if (offset != null) {
this.updateOffset(mq, offset.get(), false);
return offset.get();
}
}
}
default:
break;
}
}
return -1;
}
@Override
public void persistAll(Set<MessageQueue> mqs) {
if (null == mqs || mqs.isEmpty())
return;
OffsetSerializeWrapper offsetSerializeWrapper = new OffsetSerializeWrapper();
for (Map.Entry<MessageQueue, AtomicLong> entry : this.offsetTable.entrySet()) {
if (mqs.contains(entry.getKey())) {
AtomicLong offset = entry.getValue();
offsetSerializeWrapper.getOffsetTable().put(entry.getKey(), offset);
}
}
String jsonString = offsetSerializeWrapper.toJson(true);
if (jsonString != null) {
try {
MixAll.string2File(jsonString, this.storePath);
} catch (IOException e) {
log.error("persistAll consumer offset Exception, " + this.storePath, e);
}
}
}
@Override
public void persist(MessageQueue mq) {
}
@Override
public void removeOffset(MessageQueue mq) {
}
@Override
public void updateConsumeOffsetToBroker(final MessageQueue mq, final long offset, final boolean isOneway)
throws RemotingException, MQBrokerException, InterruptedException, MQClientException {
}
@Override
public Map<MessageQueue, Long> cloneOffsetTable(String topic) {
Map<MessageQueue, Long> cloneOffsetTable = new HashMap<MessageQueue, Long>();
for (Map.Entry<MessageQueue, AtomicLong> entry : this.offsetTable.entrySet()) {
MessageQueue mq = entry.getKey();
if (!UtilAll.isBlank(topic) && !topic.equals(mq.getTopic())) {
continue;
}
cloneOffsetTable.put(mq, entry.getValue().get());
}
return cloneOffsetTable;
}
private OffsetSerializeWrapper readLocalOffset() throws MQClientException {
String content = MixAll.file2String(this.storePath);
if (null == content || content.length() == 0) {
return this.readLocalOffsetBak();
} else {
OffsetSerializeWrapper offsetSerializeWrapper = null;
try {
offsetSerializeWrapper =
OffsetSerializeWrapper.fromJson(content, OffsetSerializeWrapper.class);
} catch (Exception e) {
log.warn("readLocalOffset Exception, and try to correct", e);
return this.readLocalOffsetBak();
}
return offsetSerializeWrapper;
}
}
private OffsetSerializeWrapper readLocalOffsetBak() throws MQClientException {
String content = MixAll.file2String(this.storePath + ".bak");
if (content != null && content.length() > 0) {
OffsetSerializeWrapper offsetSerializeWrapper = null;
try {
offsetSerializeWrapper =
OffsetSerializeWrapper.fromJson(content, OffsetSerializeWrapper.class);
} catch (Exception e) {
log.warn("readLocalOffset Exception", e);
throw new MQClientException("readLocalOffset Exception, maybe fastjson version too low" //
+ FAQUrl.suggestTodo(FAQUrl.LOAD_JSON_EXCEPTION), //
e);
}
return offsetSerializeWrapper;
}
return null;
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.firefox;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.openqa.selenium.io.FileHandler;
import org.openqa.selenium.io.TemporaryFilesystem;
import org.openqa.selenium.io.Zip;
import org.openqa.selenium.testing.InProject;
import org.openqa.selenium.testing.drivers.Firebug;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
@RunWith(JUnit4.class)
public class FirefoxProfileTest {
private static final String FIREBUG_PATH = "third_party/firebug/firebug-1.5.0-fx.xpi";
private static final String FIREBUG_RESOURCE_PATH =
"/org/openqa/selenium/testing/drivers/firebug-1.5.0-fx.xpi";
private FirefoxProfile profile;
@Before
public void setUp() throws Exception {
profile = new FirefoxProfile();
}
@Test
public void shouldQuoteStringsWhenSettingStringProperties() throws Exception {
profile.setPreference("cheese", "brie");
assertPreferenceValueEquals("cheese", "\"brie\"");
}
@Test
public void getStringPreferenceShouldReturnUserSuppliedValueWhenSet() throws Exception {
String key = "cheese";
String value = "brie";
profile.setPreference(key, value);
String defaultValue = "edam";
assertEquals(value, profile.getStringPreference(key, defaultValue));
}
@Test
public void getStringPreferenceShouldReturnDefaultValueWhenSet() throws Exception {
String key = "cheese";
String defaultValue = "brie";
assertEquals(defaultValue, profile.getStringPreference(key, defaultValue));
}
@Test
public void shouldSetIntegerPreferences() throws Exception {
profile.setPreference("cheese", 1234);
assertPreferenceValueEquals("cheese", 1234);
}
@Test
public void getIntegerPreferenceShouldReturnUserSuppliedValueWhenSet() throws Exception {
String key = "cheese";
int value = 1234;
profile.setPreference(key, value);
int defaultValue = -42;
assertEquals(1234, profile.getIntegerPreference(key, defaultValue));
}
@Test
public void getIntegerPreferenceShouldReturnDefaultValueWhenSet() throws Exception {
String key = "cheese";
int defaultValue = 42;
assertEquals(defaultValue, profile.getIntegerPreference(key, defaultValue));
}
@Test
public void shouldSetBooleanPreferences() throws Exception {
profile.setPreference("cheese", false);
assertPreferenceValueEquals("cheese", false);
}
@Test
public void getBooleanPreferenceShouldReturnUserSuppliedValueWhenSet() throws Exception {
String key = "cheese";
boolean value = true;
profile.setPreference(key, value);
boolean defaultValue = false;
assertEquals(value, profile.getBooleanPreference(key, defaultValue));
}
@Test
public void getBooleanPreferenceShouldReturnDefaultValueWhenSet() throws Exception {
String key = "cheese";
boolean defaultValue = true;
assertEquals(defaultValue, profile.getBooleanPreference(key, defaultValue));
}
@Test
public void shouldSetDefaultPreferences() throws Exception {
assertPreferenceValueEquals("network.http.phishy-userpass-length", 255);
}
@Test
public void shouldNotResetFrozenPreferences() throws Exception {
try {
profile.setPreference("network.http.phishy-userpass-length", 1024);
fail("Should not be able to reset a frozen preference");
} catch (IllegalArgumentException ex) {
// expected
}
assertPreferenceValueEquals("network.http.phishy-userpass-length", 255);
}
@Test
public void shouldInstallExtensionFromZip() throws IOException {
FirefoxProfile profile = new FirefoxProfile();
profile.addExtension(InProject.locate(FIREBUG_PATH));
File profileDir = profile.layoutOnDisk();
File extensionDir = new File(profileDir, "extensions/firebug@software.joehewitt.com");
assertTrue(extensionDir.exists());
}
@Test
public void shouldInstallExtensionFromDirectory() throws IOException {
FirefoxProfile profile = new FirefoxProfile();
File extension = InProject.locate(FIREBUG_PATH);
File unzippedExtension = FileHandler.unzip(new FileInputStream(extension));
profile.addExtension(unzippedExtension);
File profileDir = profile.layoutOnDisk();
File extensionDir = new File(profileDir, "extensions/firebug@software.joehewitt.com");
assertTrue(extensionDir.exists());
}
@Test
public void shouldInstallExtensionUsingClasspath() throws IOException {
FirefoxProfile profile = new FirefoxProfile();
profile.addExtension(Firebug.class, FIREBUG_RESOURCE_PATH);
File profileDir = profile.layoutOnDisk();
File extensionDir = new File(profileDir, "extensions/firebug@software.joehewitt.com");
assertTrue(extensionDir.exists());
}
@Test
public void shouldConvertItselfIntoAMeaningfulRepresentation() throws IOException {
FirefoxProfile profile = new FirefoxProfile();
profile.setPreference("i.like.cheese", true);
String json = profile.toJson();
assertNotNull(json);
File dir = TemporaryFilesystem.getDefaultTmpFS().createTempDir("webdriver", "duplicated");
new Zip().unzip(json, dir);
File prefs = new File(dir, "user.js");
assertTrue(prefs.exists());
assertTrue(FileHandler.readAsString(prefs).contains("i.like.cheese"));
}
private List<String> readGeneratedProperties(FirefoxProfile profile) throws Exception {
File generatedProfile = profile.layoutOnDisk();
File prefs = new File(generatedProfile, "user.js");
BufferedReader reader = new BufferedReader(new FileReader(prefs));
List<String> prefLines = new ArrayList<String>();
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
prefLines.add(line);
}
reader.close();
return prefLines;
}
@Test
public void layoutOnDiskSetsUserPreferences() throws IOException {
profile.setPreference("browser.startup.homepage", "http://www.example.com");
Preferences parsedPrefs = parseUserPrefs(profile);
assertEquals("http://www.example.com", parsedPrefs.getPreference("browser.startup.homepage"));
}
@Test
public void userPrefsArePreservedWhenConvertingToAndFromJson() throws IOException {
profile.setPreference("browser.startup.homepage", "http://www.example.com");
String json = profile.toJson();
FirefoxProfile rebuilt = FirefoxProfile.fromJson(json);
Preferences parsedPrefs = parseUserPrefs(rebuilt);
assertEquals("http://www.example.com", parsedPrefs.getPreference("browser.startup.homepage"));
}
@Test
public void backslashedCharsArePreservedWhenConvertingToAndFromJson() throws IOException {
String dir = "c:\\aaa\\bbb\\ccc\\ddd\\eee\\fff\\ggg\\hhh\\iii\\jjj\\kkk\\lll\\mmm\\nnn\\ooo\\ppp\\qqq\\rrr\\sss\\ttt\\uuu\\vvv\\www\\xxx\\yyy\\zzz";
profile.setPreference("browser.download.dir", dir);
String json = profile.toJson();
FirefoxProfile rebuilt = FirefoxProfile.fromJson(json);
Preferences parsedPrefs = parseUserPrefs(rebuilt);
assertEquals(dir, parsedPrefs.getPreference("browser.download.dir"));
}
private void assertPreferenceValueEquals(String key, Object value) throws Exception {
List<String> props = readGeneratedProperties(profile);
boolean seenKey = false;
for (String line : props) {
if (line.contains(key) && line.contains(", " + value + ")")) {
seenKey = true;
}
}
assertTrue("Did not see value being set correctly", seenKey);
}
private Preferences parseUserPrefs(FirefoxProfile profile) throws IOException {
File directory = profile.layoutOnDisk();
File userPrefs = new File(directory, "user.js");
FileReader reader = new FileReader(userPrefs);
return new Preferences(new StringReader("{\"mutable\": {}, \"frozen\": {}}"), reader);
}
}
| |
/*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.persistence.ocfl.impl;
import org.fcrepo.kernel.api.ContainmentIndex;
import org.fcrepo.kernel.api.Transaction;
import org.fcrepo.kernel.api.TransactionManager;
import org.fcrepo.kernel.api.identifiers.FedoraId;
import org.fcrepo.kernel.api.operations.CreateResourceOperation;
import org.fcrepo.kernel.api.operations.NonRdfSourceOperation;
import org.fcrepo.kernel.api.operations.RdfSourceOperation;
import org.fcrepo.persistence.api.PersistentStorageSession;
import org.fcrepo.persistence.api.PersistentStorageSessionManager;
import org.fcrepo.persistence.api.exceptions.PersistentStorageException;
import org.fcrepo.persistence.ocfl.api.FedoraOCFLMappingNotFoundException;
import org.fcrepo.persistence.ocfl.api.FedoraToOCFLObjectIndex;
import org.fcrepo.persistence.ocfl.api.IndexBuilder;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import static java.lang.System.currentTimeMillis;
import static org.fcrepo.kernel.api.operations.ResourceOperationType.CREATE;
import static org.fcrepo.persistence.ocfl.impl.OCFLPersistentStorageUtils.createRepository;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.withSettings;
import static org.springframework.test.util.ReflectionTestUtils.setField;
/**
* @author dbernstein
* @since 6.0.0
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class IndexBuilderImplTest {
private PersistentStorageSessionManager sessionManager;
private FedoraToOCFLObjectIndex index;
private IndexBuilder indexBuilder;
@Mock
private TransactionManager transactionManager;
@Mock
private Transaction transaction;
@Mock
private ContainmentIndex containmentIndex;
private final String session1Id = "session1";
private final FedoraId resource1 = FedoraId.create("info:fedora/resource1");
private final FedoraId resource2 = FedoraId.create(resource1 + "/resource2");
@Before
public void setup() throws IOException {
final var targetDir = new File("target");
final var dataDir = new File(targetDir, "test-fcrepo-data-" + currentTimeMillis());
final var repoDir = new File(dataDir,"ocfl-repo");
final var workDir = new File(dataDir,"ocfl-work");
final var staging = new File(dataDir,"ocfl-staging");
final var repository = createRepository(repoDir, workDir);
index = new FedoraToOCFLObjectIndexImpl();
index.reset();
final var ocflObjectSessionFactory = new DefaultOCFLObjectSessionFactory(staging);
setField(ocflObjectSessionFactory, "ocflRepository", repository);
sessionManager = new OCFLPersistentSessionManager();
setField(sessionManager, "fedoraOcflIndex", index);
setField(sessionManager, "objectSessionFactory", ocflObjectSessionFactory);
indexBuilder = new IndexBuilderImpl();
setField(indexBuilder, "ocflRepository", repository);
setField(indexBuilder, "fedoraToOCFLObjectIndex", index);
setField(indexBuilder, "objectSessionFactory", ocflObjectSessionFactory);
setField(indexBuilder, "containmentIndex", containmentIndex);
setField(indexBuilder, "transactionManager", transactionManager);
when(transaction.getId()).thenReturn("tx-id");
when(transactionManager.create()).thenReturn(transaction);
}
@Test
public void rebuildWhenRepoContainsArchivalGroupObject() throws Exception {
final var session = sessionManager.getSession(session1Id);
createResource(session, resource1, true);
createChildResource(session, resource1, resource2);
session.commit();
assertHasOcflId("resource1", resource1);
assertHasOcflId("resource1", resource2);
index.reset();
assertDoesNotHaveOcflId(resource1);
assertDoesNotHaveOcflId(resource2);
indexBuilder.rebuild();
assertHasOcflId("resource1", resource1);
assertHasOcflId("resource1", resource2);
verify(transaction).getId();
verify(transactionManager).create();
verify(containmentIndex).addContainedBy(transaction.getId(), FedoraId.getRepositoryRootId(), resource1);
verify(containmentIndex).addContainedBy(transaction.getId(), resource1, resource2);
verify(containmentIndex).commitTransaction(transaction);
}
@Test
public void rebuildWhenRepoContainsNonArchivalGroupObject() throws Exception {
final var session = sessionManager.getSession(session1Id);
createResource(session, resource1, false);
createChildResource(session, resource1, resource2);
session.commit();
assertHasOcflId("resource1", resource1);
assertHasOcflId("resource1_resource2", resource2);
index.reset();
assertDoesNotHaveOcflId(resource1);
assertDoesNotHaveOcflId(resource2);
indexBuilder.rebuild();
assertHasOcflId("resource1", resource1);
assertHasOcflId("resource1_resource2", resource2);
verify(transaction).getId();
verify(transactionManager).create();
verify(containmentIndex).addContainedBy(transaction.getId(), FedoraId.getRepositoryRootId(), resource1);
verify(containmentIndex).addContainedBy(transaction.getId(), resource1, resource2);
verify(containmentIndex).commitTransaction(transaction);
}
private void assertDoesNotHaveOcflId(final FedoraId resourceId) {
try {
index.getMapping(resourceId.getResourceId());
fail(resourceId + " should not exist in index");
} catch (final FedoraOCFLMappingNotFoundException e) {
//do nothing - expected
}
}
private void assertHasOcflId(final String expectedOcflId, final FedoraId resourceId)
throws FedoraOCFLMappingNotFoundException {
assertEquals(expectedOcflId, index.getMapping(resourceId.getResourceId()).getOcflObjectId());
}
private void createResource(final PersistentStorageSession session,
final FedoraId resourceId, final boolean isArchivalGroup)
throws PersistentStorageException {
final var operation = mock(RdfSourceOperation.class, withSettings().extraInterfaces(
CreateResourceOperation.class));
when(operation.getResourceId()).thenReturn(resourceId.getResourceId());
when(operation.getType()).thenReturn(CREATE);
when(((CreateResourceOperation)operation).isArchivalGroup()).thenReturn(isArchivalGroup);
session.persist(operation);
}
private void createChildResource(final PersistentStorageSession session,
final FedoraId parentId, final FedoraId childId)
throws PersistentStorageException {
final var operation = mock(NonRdfSourceOperation.class, withSettings().extraInterfaces(
CreateResourceOperation.class));
when(operation.getResourceId()).thenReturn(childId.getResourceId());
when(operation.getType()).thenReturn(CREATE);
final var bytes = "test".getBytes();
final var stream = new ByteArrayInputStream(bytes);
when(operation.getContentSize()).thenReturn((long)bytes.length);
when(operation.getContentStream()).thenReturn(stream);
when(operation.getMimeType()).thenReturn("text/plain");
when(operation.getFilename()).thenReturn("test");
when(((CreateResourceOperation)operation).getParentId()).thenReturn(parentId.getResourceId());
session.persist(operation);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authc.oidc;
import com.nimbusds.jose.JOSEException;
import com.nimbusds.jose.JWSAlgorithm;
import com.nimbusds.jose.jwk.JWK;
import com.nimbusds.jose.jwk.JWKSelector;
import com.nimbusds.jose.jwk.JWKSet;
import com.nimbusds.jose.jwk.source.JWKSource;
import com.nimbusds.jose.proc.BadJOSEException;
import com.nimbusds.jose.proc.JWSVerificationKeySelector;
import com.nimbusds.jose.proc.SecurityContext;
import com.nimbusds.jose.util.IOUtils;
import com.nimbusds.jwt.JWT;
import com.nimbusds.jwt.JWTClaimsSet;
import com.nimbusds.oauth2.sdk.AuthorizationCode;
import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant;
import com.nimbusds.oauth2.sdk.ErrorObject;
import com.nimbusds.oauth2.sdk.ResponseType;
import com.nimbusds.oauth2.sdk.TokenErrorResponse;
import com.nimbusds.oauth2.sdk.auth.ClientAuthenticationMethod;
import com.nimbusds.oauth2.sdk.auth.ClientSecretJWT;
import com.nimbusds.oauth2.sdk.auth.Secret;
import com.nimbusds.oauth2.sdk.id.State;
import com.nimbusds.oauth2.sdk.token.AccessToken;
import com.nimbusds.oauth2.sdk.token.BearerTokenError;
import com.nimbusds.oauth2.sdk.util.JSONObjectUtils;
import com.nimbusds.openid.connect.sdk.AuthenticationErrorResponse;
import com.nimbusds.openid.connect.sdk.AuthenticationResponse;
import com.nimbusds.openid.connect.sdk.AuthenticationResponseParser;
import com.nimbusds.openid.connect.sdk.AuthenticationSuccessResponse;
import com.nimbusds.openid.connect.sdk.Nonce;
import com.nimbusds.openid.connect.sdk.OIDCTokenResponse;
import com.nimbusds.openid.connect.sdk.claims.AccessTokenHash;
import com.nimbusds.openid.connect.sdk.token.OIDCTokens;
import com.nimbusds.openid.connect.sdk.validators.AccessTokenValidator;
import com.nimbusds.openid.connect.sdk.validators.IDTokenValidator;
import net.minidev.json.JSONArray;
import org.apache.commons.codec.Charsets;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.auth.AuthenticationException;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.entity.ContentType;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.impl.nio.client.HttpAsyncClients;
import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager;
import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.nio.conn.NoopIOSessionStrategy;
import org.apache.http.nio.conn.SchemeIOSessionStrategy;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.http.nio.reactor.ConnectingIOReactor;
import org.apache.http.util.EntityUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.util.concurrent.ListenableFuture;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.security.authc.RealmConfig;
import org.elasticsearch.xpack.core.security.authc.RealmSettings;
import org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings;
import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
import org.elasticsearch.xpack.core.ssl.SSLService;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLContext;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.ALLOWED_CLOCK_SKEW;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_CONNECTION_READ_TIMEOUT;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_CONNECT_TIMEOUT;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_MAX_CONNECTIONS;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_MAX_ENDPOINT_CONNECTIONS;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_PROXY_HOST;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_PROXY_PORT;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_PROXY_SCHEME;
import static org.elasticsearch.xpack.core.security.authc.oidc.OpenIdConnectRealmSettings.HTTP_SOCKET_TIMEOUT;
/**
* Handles an OpenID Connect Authentication response as received by the facilitator. In the case of an implicit flow, validates
* the ID Token and extracts the elasticsearch user properties from it. In the case of an authorization code flow, it first
* exchanges the code in the authentication response for an ID Token at the token endpoint of the OpenID Connect Provider.
*/
public class OpenIdConnectAuthenticator {
private final RealmConfig realmConfig;
private final OpenIdConnectProviderConfiguration opConfig;
private final RelyingPartyConfiguration rpConfig;
private final SSLService sslService;
private AtomicReference<IDTokenValidator> idTokenValidator = new AtomicReference<>();
private final CloseableHttpAsyncClient httpClient;
private final ResourceWatcherService watcherService;
private static final Logger LOGGER = LogManager.getLogger(OpenIdConnectAuthenticator.class);
public OpenIdConnectAuthenticator(RealmConfig realmConfig, OpenIdConnectProviderConfiguration opConfig,
RelyingPartyConfiguration rpConfig, SSLService sslService, ResourceWatcherService watcherService) {
this.realmConfig = realmConfig;
this.opConfig = opConfig;
this.rpConfig = rpConfig;
this.sslService = sslService;
this.httpClient = createHttpClient();
this.watcherService = watcherService;
this.idTokenValidator.set(createIdTokenValidator(true));
}
// For testing
OpenIdConnectAuthenticator(RealmConfig realmConfig, OpenIdConnectProviderConfiguration opConfig, RelyingPartyConfiguration rpConfig,
SSLService sslService, IDTokenValidator idTokenValidator, ResourceWatcherService watcherService) {
this.realmConfig = realmConfig;
this.opConfig = opConfig;
this.rpConfig = rpConfig;
this.sslService = sslService;
this.httpClient = createHttpClient();
this.idTokenValidator.set(idTokenValidator);
this.watcherService = watcherService;
}
/**
* Processes an OpenID Connect Response to an Authentication Request that comes in the form of a URL with the necessary parameters,
* that is contained in the provided Token. If the response is valid, it calls the provided listener with a set of OpenID Connect
* claims that identify the authenticated user. If the UserInfo endpoint is specified in the configuration, we attempt to make a
* UserInfo request and add the returned claims to the Id Token claims.
*
* @param token The OpenIdConnectToken to consume
* @param listener The listener to notify with the resolved {@link JWTClaimsSet}
*/
public void authenticate(OpenIdConnectToken token, final ActionListener<JWTClaimsSet> listener) {
try {
AuthenticationResponse authenticationResponse = AuthenticationResponseParser.parse(new URI(token.getRedirectUrl()));
final Nonce expectedNonce = token.getNonce();
State expectedState = token.getState();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("OpenID Connect Provider redirected user to [{}]. Expected Nonce is [{}] and expected State is [{}]",
token.getRedirectUrl(), expectedNonce, expectedState);
}
if (authenticationResponse instanceof AuthenticationErrorResponse) {
ErrorObject error = ((AuthenticationErrorResponse) authenticationResponse).getErrorObject();
listener.onFailure(new ElasticsearchSecurityException("OpenID Connect Provider response indicates authentication failure" +
"Code=[{}], Description=[{}]", error.getCode(), error.getDescription()));
return;
}
final AuthenticationSuccessResponse response = authenticationResponse.toSuccessResponse();
validateState(expectedState, response.getState());
validateResponseType(response);
if (rpConfig.getResponseType().impliesCodeFlow()) {
final AuthorizationCode code = response.getAuthorizationCode();
exchangeCodeForToken(code, ActionListener.wrap(tokens -> {
final AccessToken accessToken = tokens.v1();
final JWT idToken = tokens.v2();
validateAccessToken(accessToken, idToken);
getUserClaims(accessToken, idToken, expectedNonce, true, listener);
}, listener::onFailure));
} else {
final JWT idToken = response.getIDToken();
final AccessToken accessToken = response.getAccessToken();
validateAccessToken(accessToken, idToken);
getUserClaims(accessToken, idToken, expectedNonce, true, listener);
}
} catch (ElasticsearchSecurityException e) {
// Don't wrap in a new ElasticsearchSecurityException
listener.onFailure(e);
} catch (Exception e) {
listener.onFailure(new ElasticsearchSecurityException("Failed to consume the OpenID connect response. ", e));
}
}
/**
* Collects all the user claims we can get for the authenticated user. This happens in two steps:
* <ul>
* <li>First we attempt to validate the Id Token we have received and get any claims it contains</li>
* <li>If we have received an Access Token and the UserInfo endpoint is configured, we also attempt to get the user info response
* from there and parse the returned claims,
* see {@link OpenIdConnectAuthenticator#getAndCombineUserInfoClaims(AccessToken, JWTClaimsSet, ActionListener)}</li>
* </ul>
*
* @param accessToken The {@link AccessToken} that the OP has issued for this user
* @param idToken The {@link JWT} Id Token that the OP has issued for this user
* @param expectedNonce The nonce value we sent in the authentication request and should be contained in the Id Token
* @param claimsListener The listener to notify with the resolved {@link JWTClaimsSet}
*/
private void getUserClaims(@Nullable AccessToken accessToken, JWT idToken, Nonce expectedNonce, boolean shouldRetry,
ActionListener<JWTClaimsSet> claimsListener) {
try {
JWTClaimsSet verifiedIdTokenClaims = idTokenValidator.get().validate(idToken, expectedNonce).toJWTClaimsSet();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Received and validated the Id Token for the user: [{}]", verifiedIdTokenClaims);
}
// Add the Id Token string as a synthetic claim
final Map<String, Object> verifiedIdTokenClaimsObject = verifiedIdTokenClaims.toJSONObject();
final JWTClaimsSet idTokenClaim = new JWTClaimsSet.Builder().claim("id_token_hint", idToken.serialize()).build();
mergeObjects(verifiedIdTokenClaimsObject, idTokenClaim.toJSONObject());
final JWTClaimsSet enrichedVerifiedIdTokenClaims = JWTClaimsSet.parse(verifiedIdTokenClaimsObject);
if (accessToken != null && opConfig.getUserinfoEndpoint() != null) {
getAndCombineUserInfoClaims(accessToken, enrichedVerifiedIdTokenClaims, claimsListener);
} else {
if (accessToken == null && opConfig.getUserinfoEndpoint() != null) {
LOGGER.debug("UserInfo endpoint is configured but the OP didn't return an access token so we can't query it");
} else if (accessToken != null && opConfig.getUserinfoEndpoint() == null) {
LOGGER.debug("OP returned an access token but the UserInfo endpoint is not configured.");
}
claimsListener.onResponse(enrichedVerifiedIdTokenClaims);
}
} catch (BadJOSEException e) {
// We only try to update the cached JWK set once if a remote source is used and
// RSA or ECDSA is used for signatures
if (shouldRetry
&& JWSAlgorithm.Family.HMAC_SHA.contains(rpConfig.getSignatureAlgorithm()) == false
&& opConfig.getJwkSetPath().startsWith("https://")) {
((ReloadableJWKSource) ((JWSVerificationKeySelector) idTokenValidator.get().getJWSKeySelector()).getJWKSource())
.triggerReload(ActionListener.wrap(v -> {
getUserClaims(accessToken, idToken, expectedNonce, false, claimsListener);
}, ex -> {
LOGGER.trace("Attempted and failed to refresh JWK cache upon token validation failure", e);
claimsListener.onFailure(ex);
}));
} else {
claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e));
}
} catch (com.nimbusds.oauth2.sdk.ParseException | ParseException | JOSEException e) {
claimsListener.onFailure(new ElasticsearchSecurityException("Failed to parse or validate the ID Token", e));
}
}
/**
* Validates an access token according to the
* <a href="https://openid.net/specs/openid-connect-core-1_0.html#ImplicitTokenValidation">specification</a>.
* <p>
* When using the authorization code flow the OP might not provide the at_hash parameter in the
* Id Token as allowed in the specification. In such a case we can't validate the access token
* but this is considered safe as it was received in a back channel communication that was protected
* by TLS. Also when using the implicit flow with the response type set to "id_token", no Access
* Token will be returned from the OP
*
* @param accessToken The Access Token to validate. Can be null when the configured response type is "id_token"
* @param idToken The Id Token that was received in the same response
*/
private void validateAccessToken(AccessToken accessToken, JWT idToken) {
try {
if (rpConfig.getResponseType().equals(ResponseType.parse("id_token token")) ||
rpConfig.getResponseType().equals(ResponseType.parse("code"))) {
assert (accessToken != null) : "Access Token cannot be null for Response Type " + rpConfig.getResponseType().toString();
final boolean isValidationOptional = rpConfig.getResponseType().equals(ResponseType.parse("code"));
// only "Bearer" is defined in the specification but check just in case
if (accessToken.getType().toString().equals("Bearer") == false) {
throw new ElasticsearchSecurityException("Invalid access token type [{}], while [Bearer] was expected",
accessToken.getType());
}
String atHashValue = idToken.getJWTClaimsSet().getStringClaim("at_hash");
if (Strings.hasText(atHashValue) == false) {
if (isValidationOptional == false) {
throw new ElasticsearchSecurityException("Failed to verify access token. ID Token doesn't contain at_hash claim ");
}
} else {
AccessTokenHash atHash = new AccessTokenHash(atHashValue);
JWSAlgorithm jwsAlgorithm = JWSAlgorithm.parse(idToken.getHeader().getAlgorithm().getName());
AccessTokenValidator.validate(accessToken, jwsAlgorithm, atHash);
}
} else if (rpConfig.getResponseType().equals(ResponseType.parse("id_token")) && accessToken != null) {
// This should NOT happen and indicates a misconfigured OP. Warn the user but do not fail
LOGGER.warn("Access Token incorrectly returned from the OpenId Connect Provider while using \"id_token\" response type.");
}
} catch (Exception e) {
throw new ElasticsearchSecurityException("Failed to verify access token.", e);
}
}
/**
* Reads and parses a JWKSet from a file
*
* @param jwkSetPath The path to the file that contains the JWKs as a string.
* @return the parsed {@link JWKSet}
* @throws ParseException if the file cannot be parsed
* @throws IOException if the file cannot be read
*/
private JWKSet readJwkSetFromFile(String jwkSetPath) throws IOException, ParseException {
final Path path = realmConfig.env().configFile().resolve(jwkSetPath);
// avoid using JWKSet.loadFile() as it does not close FileInputStream internally
String jwkSet = Files.readString(path, StandardCharsets.UTF_8);
return JWKSet.parse(jwkSet);
}
/**
* Validate that the response we received corresponds to the response type we requested
*
* @param response The {@link AuthenticationSuccessResponse} we received
* @throws ElasticsearchSecurityException if the response is not the expected one for the configured response type
*/
private void validateResponseType(AuthenticationSuccessResponse response) {
if (rpConfig.getResponseType().equals(response.impliedResponseType()) == false) {
throw new ElasticsearchSecurityException("Unexpected response type [{}], while [{}] is configured",
response.impliedResponseType(), rpConfig.getResponseType());
}
}
/**
* Validate that the state parameter the response contained corresponds to the one that we generated in the
* beginning of this authentication attempt and was stored with the user's session at the facilitator
*
* @param expectedState The state that was originally generated
* @param state The state that was contained in the response
*/
private void validateState(State expectedState, State state) {
if (null == state) {
throw new ElasticsearchSecurityException("Failed to validate the response, the response did not contain a state parameter");
} else if (null == expectedState) {
throw new ElasticsearchSecurityException("Failed to validate the response, the user's session did not contain a state " +
"parameter");
} else if (state.equals(expectedState) == false) {
throw new ElasticsearchSecurityException("Invalid state parameter [{}], while [{}] was expected", state, expectedState);
}
}
/**
* Attempts to make a request to the UserInfo Endpoint of the OpenID Connect provider
*/
private void getAndCombineUserInfoClaims(AccessToken accessToken, JWTClaimsSet verifiedIdTokenClaims,
ActionListener<JWTClaimsSet> claimsListener) {
try {
final HttpGet httpGet = new HttpGet(opConfig.getUserinfoEndpoint());
httpGet.setHeader("Authorization", "Bearer " + accessToken.getValue());
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
httpClient.execute(httpGet, new FutureCallback<HttpResponse>() {
@Override
public void completed(HttpResponse result) {
handleUserinfoResponse(result, verifiedIdTokenClaims, claimsListener);
}
@Override
public void failed(Exception ex) {
claimsListener.onFailure(new ElasticsearchSecurityException("Failed to get claims from the Userinfo Endpoint.",
ex));
}
@Override
public void cancelled() {
claimsListener.onFailure(
new ElasticsearchSecurityException("Failed to get claims from the Userinfo Endpoint. Request was cancelled"));
}
});
return null;
});
} catch (Exception e) {
claimsListener.onFailure(new ElasticsearchSecurityException("Failed to get user information from the UserInfo endpoint.", e));
}
}
/**
* Handle the UserInfo Response from the OpenID Connect Provider. If successful, merge the returned claims with the claims
* of the Id Token and call the provided listener.
*/
private void handleUserinfoResponse(HttpResponse httpResponse, JWTClaimsSet verifiedIdTokenClaims,
ActionListener<JWTClaimsSet> claimsListener) {
try {
final HttpEntity entity = httpResponse.getEntity();
final Header encodingHeader = entity.getContentEncoding();
final Charset encoding = encodingHeader == null ? StandardCharsets.UTF_8 : Charsets.toCharset(encodingHeader.getValue());
final Header contentHeader = entity.getContentType();
final String contentAsString = EntityUtils.toString(entity, encoding);
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Received UserInfo Response from OP with status [{}] and content [{}] ",
httpResponse.getStatusLine().getStatusCode(), contentAsString);
}
if (httpResponse.getStatusLine().getStatusCode() == 200) {
if (ContentType.parse(contentHeader.getValue()).getMimeType().equals("application/json")) {
final JWTClaimsSet userInfoClaims = JWTClaimsSet.parse(contentAsString);
validateUserInfoResponse(userInfoClaims, verifiedIdTokenClaims.getSubject(), claimsListener);
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Successfully retrieved user information: [{}]", userInfoClaims);
}
final Map<String, Object> combinedClaims = verifiedIdTokenClaims.toJSONObject();
mergeObjects(combinedClaims, userInfoClaims.toJSONObject());
claimsListener.onResponse(JWTClaimsSet.parse(combinedClaims));
} else if (ContentType.parse(contentHeader.getValue()).getMimeType().equals("application/jwt")) {
//TODO Handle validating possibly signed responses
claimsListener.onFailure(new IllegalStateException("Unable to parse Userinfo Response. Signed/encrypted JWTs are" +
"not currently supported"));
} else {
claimsListener.onFailure(new IllegalStateException("Unable to parse Userinfo Response. Content type was expected to " +
"be [application/json] or [appliation/jwt] but was [" + contentHeader.getValue() + "]"));
}
} else {
final Header wwwAuthenticateHeader = httpResponse.getFirstHeader("WWW-Authenticate");
if (Strings.hasText(wwwAuthenticateHeader.getValue())) {
BearerTokenError error = BearerTokenError.parse(wwwAuthenticateHeader.getValue());
claimsListener.onFailure(
new ElasticsearchSecurityException("Failed to get user information from the UserInfo endpoint. Code=[{}], " +
"Description=[{}]", error.getCode(), error.getDescription()));
} else {
claimsListener.onFailure(
new ElasticsearchSecurityException("Failed to get user information from the UserInfo endpoint. Code=[{}], " +
"Description=[{}]", httpResponse.getStatusLine().getStatusCode(),
httpResponse.getStatusLine().getReasonPhrase()));
}
}
} catch (Exception e) {
claimsListener.onFailure(new ElasticsearchSecurityException("Failed to get user information from the UserInfo endpoint.",
e));
}
}
/**
* Validates that the userinfo response contains a sub Claim and that this claim value is the same as the one returned in the ID Token
*/
private void validateUserInfoResponse(JWTClaimsSet userInfoClaims, String expectedSub, ActionListener<JWTClaimsSet> claimsListener) {
if (userInfoClaims.getSubject().isEmpty()) {
claimsListener.onFailure(new ElasticsearchSecurityException("Userinfo Response did not contain a sub Claim"));
} else if (userInfoClaims.getSubject().equals(expectedSub) == false) {
claimsListener.onFailure(new ElasticsearchSecurityException("Userinfo Response is not valid as it is for " +
"subject [{}] while the ID Token was for subject [{}]", userInfoClaims.getSubject(),
expectedSub));
}
}
/**
* Attempts to make a request to the Token Endpoint of the OpenID Connect provider in order to exchange an
* authorization code for an Id Token (and potentially an Access Token)
*/
private void exchangeCodeForToken(AuthorizationCode code, ActionListener<Tuple<AccessToken, JWT>> tokensListener) {
try {
final AuthorizationCodeGrant codeGrant = new AuthorizationCodeGrant(code, rpConfig.getRedirectUri());
final HttpPost httpPost = new HttpPost(opConfig.getTokenEndpoint());
httpPost.setHeader("Content-type", "application/x-www-form-urlencoded");
final List<NameValuePair> params = new ArrayList<>();
for (Map.Entry<String, List<String>> entry : codeGrant.toParameters().entrySet()) {
// All parameters of AuthorizationCodeGrant are singleton lists
params.add(new BasicNameValuePair(entry.getKey(), entry.getValue().get(0)));
}
if (rpConfig.getClientAuthenticationMethod().equals(ClientAuthenticationMethod.CLIENT_SECRET_BASIC)) {
UsernamePasswordCredentials creds =
new UsernamePasswordCredentials(URLEncoder.encode(rpConfig.getClientId().getValue(), StandardCharsets.UTF_8),
URLEncoder.encode(rpConfig.getClientSecret().toString(), StandardCharsets.UTF_8));
httpPost.addHeader(new BasicScheme().authenticate(creds, httpPost, null));
} else if (rpConfig.getClientAuthenticationMethod().equals(ClientAuthenticationMethod.CLIENT_SECRET_POST)) {
params.add(new BasicNameValuePair("client_id", rpConfig.getClientId().getValue()));
params.add(new BasicNameValuePair("client_secret", rpConfig.getClientSecret().toString()));
} else if (rpConfig.getClientAuthenticationMethod().equals(ClientAuthenticationMethod.CLIENT_SECRET_JWT)) {
ClientSecretJWT clientSecretJWT = new ClientSecretJWT(rpConfig.getClientId(), opConfig.getTokenEndpoint(),
rpConfig.getClientAuthenticationJwtAlgorithm(), new Secret(rpConfig.getClientSecret().toString()));
for (Map.Entry<String, List<String>> entry : clientSecretJWT.toParameters().entrySet()) {
// Both client_assertion and client_assertion_type are singleton lists
params.add(new BasicNameValuePair(entry.getKey(), entry.getValue().get(0)));
}
} else {
tokensListener.onFailure(new ElasticsearchSecurityException("Failed to exchange code for Id Token using Token Endpoint." +
"Expected client authentication method to be one of " + OpenIdConnectRealmSettings.CLIENT_AUTH_METHODS
+ " but was [" + rpConfig.getClientAuthenticationMethod() + "]"));
}
httpPost.setEntity(new UrlEncodedFormEntity(params));
SpecialPermission.check();
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
httpClient.execute(httpPost, new FutureCallback<HttpResponse>() {
@Override
public void completed(HttpResponse result) {
handleTokenResponse(result, tokensListener);
}
@Override
public void failed(Exception ex) {
tokensListener.onFailure(
new ElasticsearchSecurityException("Failed to exchange code for Id Token using the Token Endpoint.", ex));
}
@Override
public void cancelled() {
final String message = "Failed to exchange code for Id Token using the Token Endpoint. Request was cancelled";
tokensListener.onFailure(new ElasticsearchSecurityException(message));
}
});
return null;
});
} catch (AuthenticationException | UnsupportedEncodingException | JOSEException e) {
tokensListener.onFailure(
new ElasticsearchSecurityException("Failed to exchange code for Id Token using the Token Endpoint.", e));
}
}
/**
* Handle the Token Response from the OpenID Connect Provider. If successful, extract the (yet not validated) Id Token
* and access token and call the provided listener.
*/
private void handleTokenResponse(HttpResponse httpResponse, ActionListener<Tuple<AccessToken, JWT>> tokensListener) {
try {
final HttpEntity entity = httpResponse.getEntity();
final Header encodingHeader = entity.getContentEncoding();
final Header contentHeader = entity.getContentType();
if (ContentType.parse(contentHeader.getValue()).getMimeType().equals("application/json") == false) {
tokensListener.onFailure(new IllegalStateException("Unable to parse Token Response. Content type was expected to be " +
"[application/json] but was [" + contentHeader.getValue() + "]"));
return;
}
final Charset encoding = encodingHeader == null ? StandardCharsets.UTF_8 : Charsets.toCharset(encodingHeader.getValue());
final RestStatus responseStatus = RestStatus.fromCode(httpResponse.getStatusLine().getStatusCode());
if (RestStatus.OK != responseStatus) {
final String json = EntityUtils.toString(entity, encoding);
LOGGER.warn("Received Token Response from OP with status [{}] and content [{}]", responseStatus, json);
if (RestStatus.BAD_REQUEST == responseStatus) {
final TokenErrorResponse tokenErrorResponse = TokenErrorResponse.parse(JSONObjectUtils.parse(json));
tokensListener.onFailure(
new ElasticsearchSecurityException("Failed to exchange code for Id Token. Code=[{}], Description=[{}]",
tokenErrorResponse.getErrorObject().getCode(), tokenErrorResponse.getErrorObject().getDescription()));
} else {
tokensListener.onFailure(new ElasticsearchSecurityException("Failed to exchange code for Id Token"));
}
} else {
final OIDCTokenResponse oidcTokenResponse = OIDCTokenResponse.parse(
JSONObjectUtils.parse(EntityUtils.toString(entity, encoding)));
final OIDCTokens oidcTokens = oidcTokenResponse.getOIDCTokens();
final AccessToken accessToken = oidcTokens.getAccessToken();
final JWT idToken = oidcTokens.getIDToken();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Successfully exchanged code for ID Token [{}] and Access Token [{}]", idToken,
truncateToken(accessToken.toString()));
}
if (idToken == null) {
tokensListener.onFailure(
new ElasticsearchSecurityException("Token Response did not contain an ID Token or parsing of the JWT failed."));
return;
}
tokensListener.onResponse(new Tuple<>(accessToken, idToken));
}
} catch (Exception e) {
tokensListener.onFailure(
new ElasticsearchSecurityException("Failed to exchange code for Id Token using the Token Endpoint. " +
"Unable to parse Token Response", e));
}
}
private static String truncateToken(String input) {
if (Strings.hasText(input) == false || input.length() <= 4) {
return input;
}
return input.substring(0, 2) + "***" + input.substring(input.length() - 2);
}
/**
* Creates a {@link CloseableHttpAsyncClient} that uses a {@link PoolingNHttpClientConnectionManager}
*/
private CloseableHttpAsyncClient createHttpClient() {
try {
SpecialPermission.check();
return AccessController.doPrivileged(
(PrivilegedExceptionAction<CloseableHttpAsyncClient>) () -> {
ConnectingIOReactor ioReactor = new DefaultConnectingIOReactor();
final String sslKey = RealmSettings.realmSslPrefix(realmConfig.identifier());
final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration(sslKey);
final SSLContext clientContext = sslService.sslContext(sslConfiguration);
final HostnameVerifier verifier = SSLService.getHostnameVerifier(sslConfiguration);
Registry<SchemeIOSessionStrategy> registry = RegistryBuilder.<SchemeIOSessionStrategy>create()
.register("http", NoopIOSessionStrategy.INSTANCE)
.register("https", new SSLIOSessionStrategy(clientContext, verifier))
.build();
PoolingNHttpClientConnectionManager connectionManager = new PoolingNHttpClientConnectionManager(ioReactor, registry);
connectionManager.setDefaultMaxPerRoute(realmConfig.getSetting(HTTP_MAX_ENDPOINT_CONNECTIONS));
connectionManager.setMaxTotal(realmConfig.getSetting(HTTP_MAX_CONNECTIONS));
final RequestConfig requestConfig = RequestConfig.custom()
.setConnectTimeout(Math.toIntExact(realmConfig.getSetting(HTTP_CONNECT_TIMEOUT).getMillis()))
.setConnectionRequestTimeout(Math.toIntExact(realmConfig.getSetting(HTTP_CONNECTION_READ_TIMEOUT).getSeconds()))
.setSocketTimeout(Math.toIntExact(realmConfig.getSetting(HTTP_SOCKET_TIMEOUT).getMillis())).build();
HttpAsyncClientBuilder httpAsyncClientBuilder = HttpAsyncClients.custom()
.setConnectionManager(connectionManager)
.setDefaultRequestConfig(requestConfig);
if (realmConfig.hasSetting(HTTP_PROXY_HOST)) {
httpAsyncClientBuilder.setProxy(new HttpHost(realmConfig.getSetting(HTTP_PROXY_HOST),
realmConfig.getSetting(HTTP_PROXY_PORT), realmConfig.getSetting(HTTP_PROXY_SCHEME)));
}
CloseableHttpAsyncClient httpAsyncClient = httpAsyncClientBuilder.build();
httpAsyncClient.start();
return httpAsyncClient;
});
} catch (PrivilegedActionException e) {
throw new IllegalStateException("Unable to create a HttpAsyncClient instance", e);
}
}
/*
* Creates an {@link IDTokenValidator} based on the current Relying Party configuration
*/
IDTokenValidator createIdTokenValidator(boolean addFileWatcherIfRequired) {
try {
final JWSAlgorithm requestedAlgorithm = rpConfig.getSignatureAlgorithm();
final int allowedClockSkew = Math.toIntExact(realmConfig.getSetting(ALLOWED_CLOCK_SKEW).getMillis());
final IDTokenValidator idTokenValidator;
if (JWSAlgorithm.Family.HMAC_SHA.contains(requestedAlgorithm)) {
final Secret clientSecret = new Secret(rpConfig.getClientSecret().toString());
idTokenValidator =
new IDTokenValidator(opConfig.getIssuer(), rpConfig.getClientId(), requestedAlgorithm, clientSecret);
} else {
String jwkSetPath = opConfig.getJwkSetPath();
if (jwkSetPath.startsWith("http://")) {
throw new IllegalArgumentException("The [http] protocol is not supported as it is insecure. Use [https] instead");
} else if (jwkSetPath.startsWith("https://")) {
final JWSVerificationKeySelector keySelector = new JWSVerificationKeySelector(requestedAlgorithm,
new ReloadableJWKSource(new URL(jwkSetPath)));
idTokenValidator = new IDTokenValidator(opConfig.getIssuer(), rpConfig.getClientId(), keySelector, null);
} else {
if (addFileWatcherIfRequired) {
setMetadataFileWatcher(jwkSetPath);
}
final JWKSet jwkSet = readJwkSetFromFile(jwkSetPath);
idTokenValidator = new IDTokenValidator(opConfig.getIssuer(), rpConfig.getClientId(), requestedAlgorithm, jwkSet);
}
}
idTokenValidator.setMaxClockSkew(allowedClockSkew);
return idTokenValidator;
} catch (IOException | ParseException e) {
throw new IllegalStateException("Unable to create a IDTokenValidator instance", e);
}
}
private void setMetadataFileWatcher(String jwkSetPath) throws IOException {
final Path path = realmConfig.env().configFile().resolve(jwkSetPath);
FileWatcher watcher = new FileWatcher(path);
watcher.addListener(new FileListener(LOGGER, () -> this.idTokenValidator.set(createIdTokenValidator(false))));
watcherService.add(watcher, ResourceWatcherService.Frequency.MEDIUM);
}
/**
* Merges the Map with the claims of the ID Token with the Map with the claims of the UserInfo response.
* The merging is performed based on the following rules:
* <ul>
* <li>If the values for a given claim are primitives (of the same type), the value from the ID Token is retained</li>
* <li>If the values for a given claim are Objects, the values are merged</li>
* <li>If the values for a given claim are Arrays, the values are merged without removing duplicates</li>
* <li>If the values for a given claim are of different types, an exception is thrown</li>
* </ul>
*
* @param userInfo The Map with the ID Token claims
* @param idToken The Map with the UserInfo Response claims
* @return the merged Map
*/
// pkg protected for testing
static Map<String, Object> mergeObjects(Map<String, Object> idToken, Map<String, Object> userInfo) {
for (Map.Entry<String, Object> entry : idToken.entrySet()) {
Object value1 = entry.getValue();
Object value2 = userInfo.get(entry.getKey());
if (value2 == null) {
continue;
}
if (value1 instanceof JSONArray) {
idToken.put(entry.getKey(), mergeArrays((JSONArray) value1, value2));
} else if (value1 instanceof Map) {
idToken.put(entry.getKey(), mergeObjects((Map<String, Object>) value1, value2));
} else if (value1.getClass().equals(value2.getClass()) == false) {
// A special handling for certain OPs that mix the usage of true and "true"
if (value1 instanceof Boolean && value2 instanceof String && String.valueOf(value1).equals(value2)) {
idToken.put(entry.getKey(), value1);
} else if (value2 instanceof Boolean && value1 instanceof String && String.valueOf(value2).equals(value1)) {
idToken.put(entry.getKey(), value2);
} else {
throw new IllegalStateException("Error merging ID token and userinfo claim value for claim [" + entry.getKey() + "]. " +
"Cannot merge [" + value1.getClass().getName() + "] with [" + value2.getClass().getName() + "]");
}
}
}
for (Map.Entry<String, Object> entry : userInfo.entrySet()) {
if (idToken.containsKey(entry.getKey()) == false) {
idToken.put(entry.getKey(), entry.getValue());
}
}
return idToken;
}
private static Map<String, Object> mergeObjects(Map<String, Object> jsonObject1, Object jsonObject2) {
if (jsonObject2 == null) {
return jsonObject1;
}
if (jsonObject2 instanceof Map) {
return mergeObjects(jsonObject1, (Map<String, Object>) jsonObject2);
}
throw new IllegalStateException("Error while merging ID token and userinfo claims. " +
"Cannot merge a Map with a [" + jsonObject2.getClass().getName() + "]");
}
private static JSONArray mergeArrays(JSONArray jsonArray1, Object jsonArray2) {
if (jsonArray2 == null) {
return jsonArray1;
}
if (jsonArray2 instanceof JSONArray) {
return mergeArrays(jsonArray1, (JSONArray) jsonArray2);
}
if (jsonArray2 instanceof String) {
jsonArray1.add(jsonArray2);
}
return jsonArray1;
}
private static JSONArray mergeArrays(JSONArray jsonArray1, JSONArray jsonArray2) {
jsonArray1.addAll(jsonArray2);
return jsonArray1;
}
protected void close() {
try {
this.httpClient.close();
} catch (IOException e) {
LOGGER.debug("Unable to close the HttpAsyncClient", e);
}
}
private static class FileListener implements FileChangesListener {
private final Logger logger;
private final CheckedRunnable<Exception> onChange;
private FileListener(Logger logger, CheckedRunnable<Exception> onChange) {
this.logger = logger;
this.onChange = onChange;
}
@Override
public void onFileCreated(Path file) {
onFileChanged(file);
}
@Override
public void onFileDeleted(Path file) {
onFileChanged(file);
}
@Override
public void onFileChanged(Path file) {
try {
onChange.run();
} catch (Exception e) {
logger.warn(new ParameterizedMessage("An error occurred while reloading file {}", file), e);
}
}
}
/**
* Remote JSON Web Key source specified by a JWKSet URL. The retrieved JWK set is cached to
* avoid unnecessary http requests. A single attempt to update the cached set is made
* (with {@link ReloadableJWKSource#triggerReload}) when the {@link IDTokenValidator} fails
* to validate an ID Token (because of an unknown key) as this might mean that the OpenID
* Connect Provider has rotated the signing keys.
*/
class ReloadableJWKSource<C extends SecurityContext> implements JWKSource<C> {
private volatile JWKSet cachedJwkSet = new JWKSet();
private final AtomicReference<ListenableFuture<Void>> reloadFutureRef = new AtomicReference<>();
private final URL jwkSetPath;
private ReloadableJWKSource(URL jwkSetPath) {
this.jwkSetPath = jwkSetPath;
triggerReload(ActionListener.wrap(success -> LOGGER.trace("Successfully loaded and cached remote JWKSet on startup"),
failure -> LOGGER.trace("Failed to load and cache remote JWKSet on startup", failure)));
}
@Override
public List<JWK> get(JWKSelector jwkSelector, C context) {
return jwkSelector.select(cachedJwkSet);
}
void triggerReload(ActionListener<Void> toNotify) {
ListenableFuture<Void> future = reloadFutureRef.get();
while (future == null) {
future = new ListenableFuture<>();
if (reloadFutureRef.compareAndSet(null, future)) {
reloadAsync(future);
} else {
future = reloadFutureRef.get();
}
}
future.addListener(toNotify);
}
void reloadAsync(final ListenableFuture<Void> future) {
try {
final HttpGet httpGet = new HttpGet(jwkSetPath.toURI());
AccessController.doPrivileged((PrivilegedAction<Void>) () -> {
httpClient.execute(httpGet, new FutureCallback<HttpResponse>() {
@Override
public void completed(HttpResponse result) {
try {
cachedJwkSet = JWKSet.parse(IOUtils.readInputStreamToString(result.getEntity().getContent(),
StandardCharsets.UTF_8));
reloadFutureRef.set(null);
LOGGER.trace("Successfully refreshed and cached remote JWKSet");
future.onResponse(null);
} catch (Exception e) {
failed(e);
}
}
@Override
public void failed(Exception ex) {
future.onFailure(new ElasticsearchSecurityException("Failed to retrieve remote JWK set.", ex));
reloadFutureRef.set(null);
}
@Override
public void cancelled() {
future.onFailure(
new ElasticsearchSecurityException("Failed to retrieve remote JWK set. Request was cancelled."));
reloadFutureRef.set(null);
}
});
return null;
});
} catch (Exception e) {
future.onFailure(e);
reloadFutureRef.set(null);
}
}
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.logging.v2;
import static com.google.cloud.logging.v2.LoggingClient.ListLogEntriesPagedResponse;
import static com.google.cloud.logging.v2.LoggingClient.ListLogsPagedResponse;
import static com.google.cloud.logging.v2.LoggingClient.ListMonitoredResourceDescriptorsPagedResponse;
import com.google.api.MonitoredResource;
import com.google.api.MonitoredResourceDescriptor;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.grpc.testing.MockStreamObserver;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiStreamObserver;
import com.google.api.gax.rpc.BidiStreamingCallable;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.common.collect.Lists;
import com.google.logging.v2.BillingAccountName;
import com.google.logging.v2.DeleteLogRequest;
import com.google.logging.v2.FolderName;
import com.google.logging.v2.ListLogEntriesRequest;
import com.google.logging.v2.ListLogEntriesResponse;
import com.google.logging.v2.ListLogsRequest;
import com.google.logging.v2.ListLogsResponse;
import com.google.logging.v2.ListMonitoredResourceDescriptorsRequest;
import com.google.logging.v2.ListMonitoredResourceDescriptorsResponse;
import com.google.logging.v2.LogEntry;
import com.google.logging.v2.LogName;
import com.google.logging.v2.OrganizationName;
import com.google.logging.v2.ProjectName;
import com.google.logging.v2.TailLogEntriesRequest;
import com.google.logging.v2.TailLogEntriesResponse;
import com.google.logging.v2.WriteLogEntriesRequest;
import com.google.logging.v2.WriteLogEntriesResponse;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Duration;
import com.google.protobuf.Empty;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class LoggingClientTest {
private static MockLoggingServiceV2 mockLoggingServiceV2;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private LoggingClient client;
@BeforeClass
public static void startStaticServer() {
mockLoggingServiceV2 = new MockLoggingServiceV2();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockLoggingServiceV2));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
LoggingSettings settings =
LoggingSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = LoggingClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void deleteLogTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockLoggingServiceV2.addResponse(expectedResponse);
LogName logName = LogName.ofProjectLogName("[PROJECT]", "[LOG]");
client.deleteLog(logName);
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteLogRequest actualRequest = ((DeleteLogRequest) actualRequests.get(0));
Assert.assertEquals(logName.toString(), actualRequest.getLogName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteLogExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
LogName logName = LogName.ofProjectLogName("[PROJECT]", "[LOG]");
client.deleteLog(logName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteLogTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockLoggingServiceV2.addResponse(expectedResponse);
String logName = "logName341528559";
client.deleteLog(logName);
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteLogRequest actualRequest = ((DeleteLogRequest) actualRequests.get(0));
Assert.assertEquals(logName, actualRequest.getLogName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteLogExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
String logName = "logName341528559";
client.deleteLog(logName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void writeLogEntriesTest() throws Exception {
WriteLogEntriesResponse expectedResponse = WriteLogEntriesResponse.newBuilder().build();
mockLoggingServiceV2.addResponse(expectedResponse);
LogName logName = LogName.ofProjectLogName("[PROJECT]", "[LOG]");
MonitoredResource resource = MonitoredResource.newBuilder().build();
Map<String, String> labels = new HashMap<>();
List<LogEntry> entries = new ArrayList<>();
WriteLogEntriesResponse actualResponse =
client.writeLogEntries(logName, resource, labels, entries);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
WriteLogEntriesRequest actualRequest = ((WriteLogEntriesRequest) actualRequests.get(0));
Assert.assertEquals(logName.toString(), actualRequest.getLogName());
Assert.assertEquals(resource, actualRequest.getResource());
Assert.assertEquals(labels, actualRequest.getLabelsMap());
Assert.assertEquals(entries, actualRequest.getEntriesList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void writeLogEntriesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
LogName logName = LogName.ofProjectLogName("[PROJECT]", "[LOG]");
MonitoredResource resource = MonitoredResource.newBuilder().build();
Map<String, String> labels = new HashMap<>();
List<LogEntry> entries = new ArrayList<>();
client.writeLogEntries(logName, resource, labels, entries);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void writeLogEntriesTest2() throws Exception {
WriteLogEntriesResponse expectedResponse = WriteLogEntriesResponse.newBuilder().build();
mockLoggingServiceV2.addResponse(expectedResponse);
String logName = "logName341528559";
MonitoredResource resource = MonitoredResource.newBuilder().build();
Map<String, String> labels = new HashMap<>();
List<LogEntry> entries = new ArrayList<>();
WriteLogEntriesResponse actualResponse =
client.writeLogEntries(logName, resource, labels, entries);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
WriteLogEntriesRequest actualRequest = ((WriteLogEntriesRequest) actualRequests.get(0));
Assert.assertEquals(logName, actualRequest.getLogName());
Assert.assertEquals(resource, actualRequest.getResource());
Assert.assertEquals(labels, actualRequest.getLabelsMap());
Assert.assertEquals(entries, actualRequest.getEntriesList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void writeLogEntriesExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
String logName = "logName341528559";
MonitoredResource resource = MonitoredResource.newBuilder().build();
Map<String, String> labels = new HashMap<>();
List<LogEntry> entries = new ArrayList<>();
client.writeLogEntries(logName, resource, labels, entries);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLogEntriesTest() throws Exception {
LogEntry responsesElement = LogEntry.newBuilder().build();
ListLogEntriesResponse expectedResponse =
ListLogEntriesResponse.newBuilder()
.setNextPageToken("")
.addAllEntries(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
List<String> resourceNames = new ArrayList<>();
String filter = "filter-1274492040";
String orderBy = "orderBy-1207110587";
ListLogEntriesPagedResponse pagedListResponse =
client.listLogEntries(resourceNames, filter, orderBy);
List<LogEntry> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getEntriesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLogEntriesRequest actualRequest = ((ListLogEntriesRequest) actualRequests.get(0));
Assert.assertEquals(resourceNames, actualRequest.getResourceNamesList());
Assert.assertEquals(filter, actualRequest.getFilter());
Assert.assertEquals(orderBy, actualRequest.getOrderBy());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLogEntriesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
List<String> resourceNames = new ArrayList<>();
String filter = "filter-1274492040";
String orderBy = "orderBy-1207110587";
client.listLogEntries(resourceNames, filter, orderBy);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listMonitoredResourceDescriptorsTest() throws Exception {
MonitoredResourceDescriptor responsesElement = MonitoredResourceDescriptor.newBuilder().build();
ListMonitoredResourceDescriptorsResponse expectedResponse =
ListMonitoredResourceDescriptorsResponse.newBuilder()
.setNextPageToken("")
.addAllResourceDescriptors(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
ListMonitoredResourceDescriptorsRequest request =
ListMonitoredResourceDescriptorsRequest.newBuilder()
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListMonitoredResourceDescriptorsPagedResponse pagedListResponse =
client.listMonitoredResourceDescriptors(request);
List<MonitoredResourceDescriptor> resources =
Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getResourceDescriptorsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListMonitoredResourceDescriptorsRequest actualRequest =
((ListMonitoredResourceDescriptorsRequest) actualRequests.get(0));
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listMonitoredResourceDescriptorsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
ListMonitoredResourceDescriptorsRequest request =
ListMonitoredResourceDescriptorsRequest.newBuilder()
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listMonitoredResourceDescriptors(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLogsTest() throws Exception {
String responsesElement = "responsesElement-318365110";
ListLogsResponse expectedResponse =
ListLogsResponse.newBuilder()
.setNextPageToken("")
.addAllLogNames(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
BillingAccountName parent = BillingAccountName.of("[BILLING_ACCOUNT]");
ListLogsPagedResponse pagedListResponse = client.listLogs(parent);
List<String> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLogNamesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLogsRequest actualRequest = ((ListLogsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLogsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
BillingAccountName parent = BillingAccountName.of("[BILLING_ACCOUNT]");
client.listLogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLogsTest2() throws Exception {
String responsesElement = "responsesElement-318365110";
ListLogsResponse expectedResponse =
ListLogsResponse.newBuilder()
.setNextPageToken("")
.addAllLogNames(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
FolderName parent = FolderName.of("[FOLDER]");
ListLogsPagedResponse pagedListResponse = client.listLogs(parent);
List<String> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLogNamesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLogsRequest actualRequest = ((ListLogsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLogsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
FolderName parent = FolderName.of("[FOLDER]");
client.listLogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLogsTest3() throws Exception {
String responsesElement = "responsesElement-318365110";
ListLogsResponse expectedResponse =
ListLogsResponse.newBuilder()
.setNextPageToken("")
.addAllLogNames(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
OrganizationName parent = OrganizationName.of("[ORGANIZATION]");
ListLogsPagedResponse pagedListResponse = client.listLogs(parent);
List<String> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLogNamesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLogsRequest actualRequest = ((ListLogsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLogsExceptionTest3() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
OrganizationName parent = OrganizationName.of("[ORGANIZATION]");
client.listLogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLogsTest4() throws Exception {
String responsesElement = "responsesElement-318365110";
ListLogsResponse expectedResponse =
ListLogsResponse.newBuilder()
.setNextPageToken("")
.addAllLogNames(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
ListLogsPagedResponse pagedListResponse = client.listLogs(parent);
List<String> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLogNamesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLogsRequest actualRequest = ((ListLogsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLogsExceptionTest4() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
client.listLogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLogsTest5() throws Exception {
String responsesElement = "responsesElement-318365110";
ListLogsResponse expectedResponse =
ListLogsResponse.newBuilder()
.setNextPageToken("")
.addAllLogNames(Arrays.asList(responsesElement))
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
String parent = "parent-995424086";
ListLogsPagedResponse pagedListResponse = client.listLogs(parent);
List<String> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLogNamesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLoggingServiceV2.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLogsRequest actualRequest = ((ListLogsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLogsExceptionTest5() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
try {
String parent = "parent-995424086";
client.listLogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void tailLogEntriesTest() throws Exception {
TailLogEntriesResponse expectedResponse =
TailLogEntriesResponse.newBuilder()
.addAllEntries(new ArrayList<LogEntry>())
.addAllSuppressionInfo(new ArrayList<TailLogEntriesResponse.SuppressionInfo>())
.build();
mockLoggingServiceV2.addResponse(expectedResponse);
TailLogEntriesRequest request =
TailLogEntriesRequest.newBuilder()
.addAllResourceNames(new ArrayList<String>())
.setFilter("filter-1274492040")
.setBufferWindow(Duration.newBuilder().build())
.build();
MockStreamObserver<TailLogEntriesResponse> responseObserver = new MockStreamObserver<>();
BidiStreamingCallable<TailLogEntriesRequest, TailLogEntriesResponse> callable =
client.tailLogEntriesCallable();
ApiStreamObserver<TailLogEntriesRequest> requestObserver =
callable.bidiStreamingCall(responseObserver);
requestObserver.onNext(request);
requestObserver.onCompleted();
List<TailLogEntriesResponse> actualResponses = responseObserver.future().get();
Assert.assertEquals(1, actualResponses.size());
Assert.assertEquals(expectedResponse, actualResponses.get(0));
}
@Test
public void tailLogEntriesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLoggingServiceV2.addException(exception);
TailLogEntriesRequest request =
TailLogEntriesRequest.newBuilder()
.addAllResourceNames(new ArrayList<String>())
.setFilter("filter-1274492040")
.setBufferWindow(Duration.newBuilder().build())
.build();
MockStreamObserver<TailLogEntriesResponse> responseObserver = new MockStreamObserver<>();
BidiStreamingCallable<TailLogEntriesRequest, TailLogEntriesResponse> callable =
client.tailLogEntriesCallable();
ApiStreamObserver<TailLogEntriesRequest> requestObserver =
callable.bidiStreamingCall(responseObserver);
requestObserver.onNext(request);
try {
List<TailLogEntriesResponse> actualResponses = responseObserver.future().get();
Assert.fail("No exception thrown");
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof InvalidArgumentException);
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.openshift;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.fabric8.api.CreateContainerBasicOptions;
import io.fabric8.api.CreateRemoteContainerOptions;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
public class CreateOpenshiftContainerOptions extends CreateContainerBasicOptions<CreateOpenshiftContainerOptions> implements CreateRemoteContainerOptions {
private static final long serialVersionUID = 4489740280396972109L;
public static String OPENSHIFT_BROKER_HOST = "OPENSHIFT_BROKER_HOST";
public static String OPENSHIFT_NAMESPACE = "OPENSHIFT_NAMESPACE";
public static class Builder extends CreateContainerBasicOptions.Builder<Builder> {
@JsonProperty
private String serverUrl = System.getenv(OPENSHIFT_BROKER_HOST);
@JsonProperty
private String login;
@JsonProperty
private String password;
@JsonProperty
private String domain = System.getenv(OPENSHIFT_NAMESPACE);
@JsonProperty
private String gearProfile = "small";
@JsonProperty
private Map<String, String> environmentalVariables = new HashMap<String, String>();
@JsonProperty
private List<String> fallbackRepositories = new ArrayList<String>();
public Builder serverUrl(final String serverUrl) {
this.serverUrl = serverUrl;
return this;
}
public Builder login(final String login) {
this.login = login;
return this;
}
public Builder password(final String password) {
this.password = password;
return this;
}
public Builder domain(final String domain) {
this.domain = domain;
return this;
}
public Builder gearProfile(final String gearProfile) {
this.gearProfile = gearProfile;
return this;
}
public Builder environmentalVariables(final Map<String, String> environmentalVariables) {
this.environmentalVariables = environmentalVariables;
return this;
}
public Builder fallbackRepositories(final List<String> fallbackRepositories) {
this.fallbackRepositories = fallbackRepositories;
return this;
}
public String getServerUrl() {
return serverUrl;
}
public void setServerUrl(String serverUrl) {
this.serverUrl = serverUrl;
}
public String getLogin() {
return login;
}
public void setLogin(String login) {
this.login = login;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public Map<String, String> getEnvironmentalVariables() {
return environmentalVariables;
}
public void setEnvironmentalVariables(Map<String, String> environmentalVariables) {
this.environmentalVariables = environmentalVariables;
}
public List<String> getFallbackRepositories() {
return fallbackRepositories;
}
public void setFallbackRepositories(List<String> fallbackRepositories) {
this.fallbackRepositories = fallbackRepositories;
}
public CreateOpenshiftContainerOptions build() {
return new CreateOpenshiftContainerOptions(getBindAddress(), getResolver(), getGlobalResolver(), getManualIp(), getMinimumPort(),
getMaximumPort(), getProfiles(), getVersion(), getDataStoreProperties(), getZooKeeperServerPort(), getZooKeeperServerConnectionPort(), getZookeeperPassword(), isEnsembleStart(), isAgentEnabled(), isAutoImportEnabled(),
getImportPath(), getUsers(), getName(), getParent(), "openshift", isEnsembleServer(), getPreferredAddress(), getSystemProperties(),
getNumber(), getProxyUri(), getZookeeperUrl(), getJvmOpts(), isAdminAccess(), isClean(),
serverUrl, login, password, domain, gearProfile, environmentalVariables, fallbackRepositories);
}
}
public static Builder builder() {
return new Builder();
}
@JsonProperty
private final String serverUrl;
@JsonProperty
private final String login;
@JsonProperty
private final String password;
@JsonProperty
private final String domain;
@JsonProperty
private final String gearProfile;
@JsonProperty
private final Map<String, String> environmentalVariables;
@JsonProperty
private final List<String> fallbackRepositories;
public CreateOpenshiftContainerOptions(String bindAddress, String resolver, String globalResolver, String manualIp, int minimumPort, int maximumPort, Set<String> profiles, String version, Map<String, String> dataStoreProperties, int zooKeeperServerPort, int zooKeeperServerConnectionPort, String zookeeperPassword, boolean ensembleStart, boolean agentEnabled, boolean autoImportEnabled, String importPath, Map<String, String> users, String name, String parent, String providerType, boolean ensembleServer, String preferredAddress, Map<String, Properties> systemProperties, Integer number, URI proxyUri, String zookeeperUrl, String jvmOpts, boolean adminAccess, boolean clean, String serverUrl, String login, String password, String domain, String gearProfile, Map<String, String> environmentalVariables, List<String> fallbackRepositories) {
super(bindAddress, resolver, globalResolver, manualIp, minimumPort, maximumPort, profiles, version, dataStoreProperties, zooKeeperServerPort, zooKeeperServerConnectionPort, zookeeperPassword, ensembleStart, agentEnabled, false, 0, autoImportEnabled, importPath, users, name, parent, providerType, ensembleServer, preferredAddress, systemProperties, number, proxyUri, zookeeperUrl, jvmOpts, adminAccess, clean);
this.serverUrl = serverUrl;
this.login = login;
this.password = password;
this.domain = domain;
this.gearProfile = gearProfile;
this.environmentalVariables = environmentalVariables;
this.fallbackRepositories = fallbackRepositories;
}
@Override
public CreateOpenshiftContainerOptions updateCredentials(String user, String credential) {
return new CreateOpenshiftContainerOptions(getBindAddress(), getResolver(), getGlobalResolver(), getManualIp(), getMinimumPort(),
getMaximumPort(), getProfiles(), getVersion(), getDataStoreProperties(), getZooKeeperServerPort(), getZooKeeperServerConnectionPort(), getZookeeperPassword(), isEnsembleStart(), isAgentEnabled(), isAutoImportEnabled(),
getImportPath(), getUsers(), getName(), getParent(), "openshift", isEnsembleServer(), getPreferredAddress(), getSystemProperties(),
getNumber(), getProxyUri(), getZookeeperUrl(), getJvmOpts(), isAdminAccess(), isClean(),
serverUrl, user, password, domain, gearProfile, environmentalVariables, fallbackRepositories);
}
public String getServerUrl() {
return serverUrl;
}
public String getLogin() {
return login;
}
public String getPassword() {
return password;
}
public String getDomain() {
return domain;
}
public String getGearProfile() {
return gearProfile;
}
@Override
public String getHostNameContext() {
return "openshift";
}
@Override
public String getPath() {
return "~/";
}
@Override
public Map<String, String> getEnvironmentalVariables() {
return environmentalVariables;
}
@Override
public Boolean doUploadDistribution() {
return false;
}
public List<String> getFallbackRepositories() {
return fallbackRepositories;
}
}
| |
/**
* Copyright 2014 Brandon Arp
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arpnetworking.tsdcore.sinks;
import com.arpnetworking.logback.annotations.LogValue;
import com.arpnetworking.steno.LogValueMapFactory;
import com.arpnetworking.steno.Logger;
import com.arpnetworking.steno.LoggerFactory;
import com.arpnetworking.tsdcore.model.AggregatedData;
import com.arpnetworking.tsdcore.model.Condition;
import com.google.common.collect.Lists;
import net.sf.oval.constraint.NotNull;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.HttpParams;
import java.io.IOException;
import java.net.URI;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicLong;
/**
* Publishes to an HTTP endpoint. This class is thread safe.
*
* @author Brandon Arp (barp at groupon dot com)
*/
public abstract class HttpPostSink extends BaseSink {
/**
* {@inheritDoc}
*/
@Override
public void recordAggregateData(final Collection<AggregatedData> data, final Collection<Condition> conditions) {
LOGGER.debug()
.setMessage("Writing aggregated data")
.addData("sink", getName())
.addData("dataSize", data.size())
.addData("conditionsSize", conditions.size())
.addData("uri", _uri)
.log();
if (!data.isEmpty() || !conditions.isEmpty()) {
// TODO(vkoskela): Support parallel post requests [MAI-97]
for (final HttpUriRequest request : createRequests(data, conditions)) {
HttpEntity responseEntity = null;
try {
// TODO(vkoskela): Add logging to client [MAI-89]
// TODO(vkoskela): Add instrumentation to client [MAI-90]
final HttpResponse result = CLIENT.execute(request);
responseEntity = result.getEntity();
final int responseStatusCode = result.getStatusLine().getStatusCode();
if (responseStatusCode == HttpStatus.SC_OK) {
LOGGER.debug()
.setMessage("Post accepted")
.addData("sink", getName())
.addData("uri", _uri)
.addData("status", responseStatusCode)
.log();
} else {
LOGGER.warn()
.setMessage("Post rejected")
.addData("sink", getName())
.addData("uri", _uri)
.addData("status", responseStatusCode)
.addData("requestSize", getContentLength(request))
.log();
}
_postRequests.incrementAndGet();
} catch (final IOException e) {
LOGGER.error()
.setMessage("Post error")
.addData("sink", getName())
.addData("uri", _uri)
.addData("requestSize", getContentLength(request))
.setThrowable(e)
.log();
} finally {
if (responseEntity != null) {
try {
responseEntity.getContent().close();
// CHECKSTYLE.OFF: IllegalCatch - Catch all exceptions
} catch (final Exception e) {
// CHECKSTYLE.ON: IllegalCatch
LOGGER.warn()
.setMessage("Error closing response content stream")
.addData("sink", getName())
.addData("uri", _uri)
.addData("requestSize", getContentLength(request))
.setThrowable(e)
.log();
}
}
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
LOGGER.info()
.setMessage("Closing sink")
.addData("sink", getName())
.addData("recordsWritten", _postRequests)
.addData("uri", _uri)
.log();
}
/**
* Generate a Steno log compatible representation.
*
* @return Steno log compatible representation.
*/
@LogValue
@Override
public Object toLogValue() {
return LogValueMapFactory.of(
"super", super.toLogValue(),
"Uri", _uri,
"PostRequests", _postRequests);
}
/**
* Creates an HTTP request from a serialized data entry. Default is an <code>HttpPost</code> containing
* serializedData as the body with content type of application/json
* @param serializedData The serialized data.
* @return <code>HttpRequest</code> to execute
*/
protected HttpUriRequest createRequest(final String serializedData) {
final StringEntity requestEntity = new StringEntity(serializedData, ContentType.APPLICATION_JSON);
final HttpPost request = new HttpPost(_uri);
request.setEntity(requestEntity);
return request;
}
/**
* Create HTTP requests for each serialized data entry. The list is
* guaranteed to be non-empty.
*
* @param data The <code>List</code> of <code>AggregatedData</code> to be
* serialized.
* @param conditions The <code>List</code> of <code>Condition</code>
* instances to be published
* @return The <code>HttpRequest</code> instance to execute.
*/
protected Collection<HttpUriRequest> createRequests(
final Collection<AggregatedData> data,
final Collection<Condition> conditions) {
final Collection<HttpUriRequest> requests = Lists.newArrayList();
for (final String serializedData : serialize(data, conditions)) {
requests.add(createRequest(serializedData));
}
return requests;
}
/**
* Accessor for the <code>URI</code>.
*
* @return The <code>URI</code>.
*/
protected URI getUri() {
return _uri;
}
/**
* Serialize the <code>AggregatedData</code> and <code>Condition</code> instances
* for posting.
*
* @param data The <code>List</code> of <code>AggregatedData</code> to be
* serialized.
* @param conditions The <code>List</code> of <code>Condition</code>
* instances to be published
* @return The serialized representation of <code>AggregatedData</code>.
*/
protected abstract Collection<String> serialize(
final Collection<AggregatedData> data,
final Collection<Condition> conditions);
private static long getContentLength(final HttpUriRequest request) {
if (request instanceof HttpEntityEnclosingRequestBase) {
final HttpEntityEnclosingRequestBase entityRequest = (HttpEntityEnclosingRequestBase) request;
final HttpEntity entity = entityRequest.getEntity();
if (entity != null) {
return entity.getContentLength();
}
}
return -1;
}
/**
* Protected constructor.
*
* @param builder Instance of <code>Builder</code>.
*/
protected HttpPostSink(final Builder<?, ?> builder) {
super(builder);
_uri = builder._uri;
}
private final URI _uri;
private final AtomicLong _postRequests = new AtomicLong(0);
private static final ClientConnectionManager CONNECTION_MANAGER = new PoolingClientConnectionManager();
private static final HttpClient CLIENT = new DefaultHttpClient(CONNECTION_MANAGER);
private static final Logger LOGGER = LoggerFactory.getLogger(HttpPostSink.class);
private static final int CONNECTION_TIMEOUT_IN_MILLISECONDS = 3000;
static {
final HttpParams params = CLIENT.getParams();
params.setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, CONNECTION_TIMEOUT_IN_MILLISECONDS);
}
/**
* Implementation of abstract builder pattern for <code>HttpPostSink</code>.
*
* @author Ville Koskela (vkoskela at groupon dot com)
*/
public abstract static class Builder<B extends BaseSink.Builder<B, S>, S extends HttpPostSink> extends BaseSink.Builder<B, S> {
/**
* The <code>URI</code> to post the aggregated data to. Cannot be null.
*
* @param value The <code>URI</code> to post the aggregated data to.
* @return This instance of <code>Builder</code>.
*/
public B setUri(final URI value) {
_uri = value;
return self();
}
/**
* Protected constructor for subclasses.
*
* @param targetClass The concrete type to be created by the builder of
* <code>AggregatedDataSink</code> implementation.
*/
protected Builder(final Class<S> targetClass) {
super(targetClass);
}
@NotNull
private URI _uri;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/admin/v1/iam.proto
package com.google.iam.admin.v1;
/**
* <pre>
* Represents a service account key.
* A service account has two sets of key-pairs: user-managed, and
* system-managed.
* User-managed key-pairs can be created and deleted by users. Users are
* responsible for rotating these keys periodically to ensure security of
* their service accounts. Users retain the private key of these key-pairs,
* and Google retains ONLY the public key.
* System-managed key-pairs are managed automatically by Google, and rotated
* daily without user intervention. The private key never leaves Google's
* servers to maximize security.
* Public keys for all service accounts are also published at the OAuth2
* Service Account API.
* </pre>
*
* Protobuf type {@code google.iam.admin.v1.ServiceAccountKey}
*/
public final class ServiceAccountKey extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.iam.admin.v1.ServiceAccountKey)
ServiceAccountKeyOrBuilder {
// Use ServiceAccountKey.newBuilder() to construct.
private ServiceAccountKey(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ServiceAccountKey() {
name_ = "";
privateKeyType_ = 0;
keyAlgorithm_ = 0;
privateKeyData_ = com.google.protobuf.ByteString.EMPTY;
publicKeyData_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private ServiceAccountKey(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 16: {
int rawValue = input.readEnum();
privateKeyType_ = rawValue;
break;
}
case 26: {
privateKeyData_ = input.readBytes();
break;
}
case 34: {
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (validAfterTime_ != null) {
subBuilder = validAfterTime_.toBuilder();
}
validAfterTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(validAfterTime_);
validAfterTime_ = subBuilder.buildPartial();
}
break;
}
case 42: {
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (validBeforeTime_ != null) {
subBuilder = validBeforeTime_.toBuilder();
}
validBeforeTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(validBeforeTime_);
validBeforeTime_ = subBuilder.buildPartial();
}
break;
}
case 58: {
publicKeyData_ = input.readBytes();
break;
}
case 64: {
int rawValue = input.readEnum();
keyAlgorithm_ = rawValue;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.iam.admin.v1.IamProto.internal_static_google_iam_admin_v1_ServiceAccountKey_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.admin.v1.IamProto.internal_static_google_iam_admin_v1_ServiceAccountKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.admin.v1.ServiceAccountKey.class, com.google.iam.admin.v1.ServiceAccountKey.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PRIVATE_KEY_TYPE_FIELD_NUMBER = 2;
private int privateKeyType_;
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public int getPrivateKeyTypeValue() {
return privateKeyType_;
}
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public com.google.iam.admin.v1.ServiceAccountPrivateKeyType getPrivateKeyType() {
com.google.iam.admin.v1.ServiceAccountPrivateKeyType result = com.google.iam.admin.v1.ServiceAccountPrivateKeyType.valueOf(privateKeyType_);
return result == null ? com.google.iam.admin.v1.ServiceAccountPrivateKeyType.UNRECOGNIZED : result;
}
public static final int KEY_ALGORITHM_FIELD_NUMBER = 8;
private int keyAlgorithm_;
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public int getKeyAlgorithmValue() {
return keyAlgorithm_;
}
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public com.google.iam.admin.v1.ServiceAccountKeyAlgorithm getKeyAlgorithm() {
com.google.iam.admin.v1.ServiceAccountKeyAlgorithm result = com.google.iam.admin.v1.ServiceAccountKeyAlgorithm.valueOf(keyAlgorithm_);
return result == null ? com.google.iam.admin.v1.ServiceAccountKeyAlgorithm.UNRECOGNIZED : result;
}
public static final int PRIVATE_KEY_DATA_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString privateKeyData_;
/**
* <pre>
* The private key data. Only provided in `CreateServiceAccountKey`
* responses.
* </pre>
*
* <code>optional bytes private_key_data = 3;</code>
*/
public com.google.protobuf.ByteString getPrivateKeyData() {
return privateKeyData_;
}
public static final int PUBLIC_KEY_DATA_FIELD_NUMBER = 7;
private com.google.protobuf.ByteString publicKeyData_;
/**
* <pre>
* The public key data. Only provided in `GetServiceAccountKey` responses.
* </pre>
*
* <code>optional bytes public_key_data = 7;</code>
*/
public com.google.protobuf.ByteString getPublicKeyData() {
return publicKeyData_;
}
public static final int VALID_AFTER_TIME_FIELD_NUMBER = 4;
private com.google.protobuf.Timestamp validAfterTime_;
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public boolean hasValidAfterTime() {
return validAfterTime_ != null;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public com.google.protobuf.Timestamp getValidAfterTime() {
return validAfterTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : validAfterTime_;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public com.google.protobuf.TimestampOrBuilder getValidAfterTimeOrBuilder() {
return getValidAfterTime();
}
public static final int VALID_BEFORE_TIME_FIELD_NUMBER = 5;
private com.google.protobuf.Timestamp validBeforeTime_;
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public boolean hasValidBeforeTime() {
return validBeforeTime_ != null;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public com.google.protobuf.Timestamp getValidBeforeTime() {
return validBeforeTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : validBeforeTime_;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public com.google.protobuf.TimestampOrBuilder getValidBeforeTimeOrBuilder() {
return getValidBeforeTime();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (privateKeyType_ != com.google.iam.admin.v1.ServiceAccountPrivateKeyType.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, privateKeyType_);
}
if (!privateKeyData_.isEmpty()) {
output.writeBytes(3, privateKeyData_);
}
if (validAfterTime_ != null) {
output.writeMessage(4, getValidAfterTime());
}
if (validBeforeTime_ != null) {
output.writeMessage(5, getValidBeforeTime());
}
if (!publicKeyData_.isEmpty()) {
output.writeBytes(7, publicKeyData_);
}
if (keyAlgorithm_ != com.google.iam.admin.v1.ServiceAccountKeyAlgorithm.KEY_ALG_UNSPECIFIED.getNumber()) {
output.writeEnum(8, keyAlgorithm_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (privateKeyType_ != com.google.iam.admin.v1.ServiceAccountPrivateKeyType.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, privateKeyType_);
}
if (!privateKeyData_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, privateKeyData_);
}
if (validAfterTime_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getValidAfterTime());
}
if (validBeforeTime_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(5, getValidBeforeTime());
}
if (!publicKeyData_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(7, publicKeyData_);
}
if (keyAlgorithm_ != com.google.iam.admin.v1.ServiceAccountKeyAlgorithm.KEY_ALG_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(8, keyAlgorithm_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.iam.admin.v1.ServiceAccountKey)) {
return super.equals(obj);
}
com.google.iam.admin.v1.ServiceAccountKey other = (com.google.iam.admin.v1.ServiceAccountKey) obj;
boolean result = true;
result = result && getName()
.equals(other.getName());
result = result && privateKeyType_ == other.privateKeyType_;
result = result && keyAlgorithm_ == other.keyAlgorithm_;
result = result && getPrivateKeyData()
.equals(other.getPrivateKeyData());
result = result && getPublicKeyData()
.equals(other.getPublicKeyData());
result = result && (hasValidAfterTime() == other.hasValidAfterTime());
if (hasValidAfterTime()) {
result = result && getValidAfterTime()
.equals(other.getValidAfterTime());
}
result = result && (hasValidBeforeTime() == other.hasValidBeforeTime());
if (hasValidBeforeTime()) {
result = result && getValidBeforeTime()
.equals(other.getValidBeforeTime());
}
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + PRIVATE_KEY_TYPE_FIELD_NUMBER;
hash = (53 * hash) + privateKeyType_;
hash = (37 * hash) + KEY_ALGORITHM_FIELD_NUMBER;
hash = (53 * hash) + keyAlgorithm_;
hash = (37 * hash) + PRIVATE_KEY_DATA_FIELD_NUMBER;
hash = (53 * hash) + getPrivateKeyData().hashCode();
hash = (37 * hash) + PUBLIC_KEY_DATA_FIELD_NUMBER;
hash = (53 * hash) + getPublicKeyData().hashCode();
if (hasValidAfterTime()) {
hash = (37 * hash) + VALID_AFTER_TIME_FIELD_NUMBER;
hash = (53 * hash) + getValidAfterTime().hashCode();
}
if (hasValidBeforeTime()) {
hash = (37 * hash) + VALID_BEFORE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getValidBeforeTime().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.ServiceAccountKey parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.iam.admin.v1.ServiceAccountKey prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a service account key.
* A service account has two sets of key-pairs: user-managed, and
* system-managed.
* User-managed key-pairs can be created and deleted by users. Users are
* responsible for rotating these keys periodically to ensure security of
* their service accounts. Users retain the private key of these key-pairs,
* and Google retains ONLY the public key.
* System-managed key-pairs are managed automatically by Google, and rotated
* daily without user intervention. The private key never leaves Google's
* servers to maximize security.
* Public keys for all service accounts are also published at the OAuth2
* Service Account API.
* </pre>
*
* Protobuf type {@code google.iam.admin.v1.ServiceAccountKey}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.iam.admin.v1.ServiceAccountKey)
com.google.iam.admin.v1.ServiceAccountKeyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.iam.admin.v1.IamProto.internal_static_google_iam_admin_v1_ServiceAccountKey_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.admin.v1.IamProto.internal_static_google_iam_admin_v1_ServiceAccountKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.admin.v1.ServiceAccountKey.class, com.google.iam.admin.v1.ServiceAccountKey.Builder.class);
}
// Construct using com.google.iam.admin.v1.ServiceAccountKey.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
name_ = "";
privateKeyType_ = 0;
keyAlgorithm_ = 0;
privateKeyData_ = com.google.protobuf.ByteString.EMPTY;
publicKeyData_ = com.google.protobuf.ByteString.EMPTY;
if (validAfterTimeBuilder_ == null) {
validAfterTime_ = null;
} else {
validAfterTime_ = null;
validAfterTimeBuilder_ = null;
}
if (validBeforeTimeBuilder_ == null) {
validBeforeTime_ = null;
} else {
validBeforeTime_ = null;
validBeforeTimeBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.iam.admin.v1.IamProto.internal_static_google_iam_admin_v1_ServiceAccountKey_descriptor;
}
public com.google.iam.admin.v1.ServiceAccountKey getDefaultInstanceForType() {
return com.google.iam.admin.v1.ServiceAccountKey.getDefaultInstance();
}
public com.google.iam.admin.v1.ServiceAccountKey build() {
com.google.iam.admin.v1.ServiceAccountKey result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.iam.admin.v1.ServiceAccountKey buildPartial() {
com.google.iam.admin.v1.ServiceAccountKey result = new com.google.iam.admin.v1.ServiceAccountKey(this);
result.name_ = name_;
result.privateKeyType_ = privateKeyType_;
result.keyAlgorithm_ = keyAlgorithm_;
result.privateKeyData_ = privateKeyData_;
result.publicKeyData_ = publicKeyData_;
if (validAfterTimeBuilder_ == null) {
result.validAfterTime_ = validAfterTime_;
} else {
result.validAfterTime_ = validAfterTimeBuilder_.build();
}
if (validBeforeTimeBuilder_ == null) {
result.validBeforeTime_ = validBeforeTime_;
} else {
result.validBeforeTime_ = validBeforeTimeBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.iam.admin.v1.ServiceAccountKey) {
return mergeFrom((com.google.iam.admin.v1.ServiceAccountKey)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.iam.admin.v1.ServiceAccountKey other) {
if (other == com.google.iam.admin.v1.ServiceAccountKey.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.privateKeyType_ != 0) {
setPrivateKeyTypeValue(other.getPrivateKeyTypeValue());
}
if (other.keyAlgorithm_ != 0) {
setKeyAlgorithmValue(other.getKeyAlgorithmValue());
}
if (other.getPrivateKeyData() != com.google.protobuf.ByteString.EMPTY) {
setPrivateKeyData(other.getPrivateKeyData());
}
if (other.getPublicKeyData() != com.google.protobuf.ByteString.EMPTY) {
setPublicKeyData(other.getPublicKeyData());
}
if (other.hasValidAfterTime()) {
mergeValidAfterTime(other.getValidAfterTime());
}
if (other.hasValidBeforeTime()) {
mergeValidBeforeTime(other.getValidBeforeTime());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.iam.admin.v1.ServiceAccountKey parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.iam.admin.v1.ServiceAccountKey) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <pre>
* The resource name of the service account key in the following format
* `projects/{project}/serviceAccounts/{account}/keys/{key}`.
* </pre>
*
* <code>optional string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private int privateKeyType_ = 0;
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public int getPrivateKeyTypeValue() {
return privateKeyType_;
}
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public Builder setPrivateKeyTypeValue(int value) {
privateKeyType_ = value;
onChanged();
return this;
}
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public com.google.iam.admin.v1.ServiceAccountPrivateKeyType getPrivateKeyType() {
com.google.iam.admin.v1.ServiceAccountPrivateKeyType result = com.google.iam.admin.v1.ServiceAccountPrivateKeyType.valueOf(privateKeyType_);
return result == null ? com.google.iam.admin.v1.ServiceAccountPrivateKeyType.UNRECOGNIZED : result;
}
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public Builder setPrivateKeyType(com.google.iam.admin.v1.ServiceAccountPrivateKeyType value) {
if (value == null) {
throw new NullPointerException();
}
privateKeyType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The output format for the private key.
* Only provided in `CreateServiceAccountKey` responses, not
* in `GetServiceAccountKey` or `ListServiceAccountKey` responses.
* Google never exposes system-managed private keys, and never retains
* user-managed private keys.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountPrivateKeyType private_key_type = 2;</code>
*/
public Builder clearPrivateKeyType() {
privateKeyType_ = 0;
onChanged();
return this;
}
private int keyAlgorithm_ = 0;
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public int getKeyAlgorithmValue() {
return keyAlgorithm_;
}
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public Builder setKeyAlgorithmValue(int value) {
keyAlgorithm_ = value;
onChanged();
return this;
}
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public com.google.iam.admin.v1.ServiceAccountKeyAlgorithm getKeyAlgorithm() {
com.google.iam.admin.v1.ServiceAccountKeyAlgorithm result = com.google.iam.admin.v1.ServiceAccountKeyAlgorithm.valueOf(keyAlgorithm_);
return result == null ? com.google.iam.admin.v1.ServiceAccountKeyAlgorithm.UNRECOGNIZED : result;
}
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public Builder setKeyAlgorithm(com.google.iam.admin.v1.ServiceAccountKeyAlgorithm value) {
if (value == null) {
throw new NullPointerException();
}
keyAlgorithm_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Specifies the algorithm (and possibly key size) for the key.
* </pre>
*
* <code>optional .google.iam.admin.v1.ServiceAccountKeyAlgorithm key_algorithm = 8;</code>
*/
public Builder clearKeyAlgorithm() {
keyAlgorithm_ = 0;
onChanged();
return this;
}
private com.google.protobuf.ByteString privateKeyData_ = com.google.protobuf.ByteString.EMPTY;
/**
* <pre>
* The private key data. Only provided in `CreateServiceAccountKey`
* responses.
* </pre>
*
* <code>optional bytes private_key_data = 3;</code>
*/
public com.google.protobuf.ByteString getPrivateKeyData() {
return privateKeyData_;
}
/**
* <pre>
* The private key data. Only provided in `CreateServiceAccountKey`
* responses.
* </pre>
*
* <code>optional bytes private_key_data = 3;</code>
*/
public Builder setPrivateKeyData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
privateKeyData_ = value;
onChanged();
return this;
}
/**
* <pre>
* The private key data. Only provided in `CreateServiceAccountKey`
* responses.
* </pre>
*
* <code>optional bytes private_key_data = 3;</code>
*/
public Builder clearPrivateKeyData() {
privateKeyData_ = getDefaultInstance().getPrivateKeyData();
onChanged();
return this;
}
private com.google.protobuf.ByteString publicKeyData_ = com.google.protobuf.ByteString.EMPTY;
/**
* <pre>
* The public key data. Only provided in `GetServiceAccountKey` responses.
* </pre>
*
* <code>optional bytes public_key_data = 7;</code>
*/
public com.google.protobuf.ByteString getPublicKeyData() {
return publicKeyData_;
}
/**
* <pre>
* The public key data. Only provided in `GetServiceAccountKey` responses.
* </pre>
*
* <code>optional bytes public_key_data = 7;</code>
*/
public Builder setPublicKeyData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
publicKeyData_ = value;
onChanged();
return this;
}
/**
* <pre>
* The public key data. Only provided in `GetServiceAccountKey` responses.
* </pre>
*
* <code>optional bytes public_key_data = 7;</code>
*/
public Builder clearPublicKeyData() {
publicKeyData_ = getDefaultInstance().getPublicKeyData();
onChanged();
return this;
}
private com.google.protobuf.Timestamp validAfterTime_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> validAfterTimeBuilder_;
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public boolean hasValidAfterTime() {
return validAfterTimeBuilder_ != null || validAfterTime_ != null;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public com.google.protobuf.Timestamp getValidAfterTime() {
if (validAfterTimeBuilder_ == null) {
return validAfterTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : validAfterTime_;
} else {
return validAfterTimeBuilder_.getMessage();
}
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public Builder setValidAfterTime(com.google.protobuf.Timestamp value) {
if (validAfterTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
validAfterTime_ = value;
onChanged();
} else {
validAfterTimeBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public Builder setValidAfterTime(
com.google.protobuf.Timestamp.Builder builderForValue) {
if (validAfterTimeBuilder_ == null) {
validAfterTime_ = builderForValue.build();
onChanged();
} else {
validAfterTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public Builder mergeValidAfterTime(com.google.protobuf.Timestamp value) {
if (validAfterTimeBuilder_ == null) {
if (validAfterTime_ != null) {
validAfterTime_ =
com.google.protobuf.Timestamp.newBuilder(validAfterTime_).mergeFrom(value).buildPartial();
} else {
validAfterTime_ = value;
}
onChanged();
} else {
validAfterTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public Builder clearValidAfterTime() {
if (validAfterTimeBuilder_ == null) {
validAfterTime_ = null;
onChanged();
} else {
validAfterTime_ = null;
validAfterTimeBuilder_ = null;
}
return this;
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public com.google.protobuf.Timestamp.Builder getValidAfterTimeBuilder() {
onChanged();
return getValidAfterTimeFieldBuilder().getBuilder();
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
public com.google.protobuf.TimestampOrBuilder getValidAfterTimeOrBuilder() {
if (validAfterTimeBuilder_ != null) {
return validAfterTimeBuilder_.getMessageOrBuilder();
} else {
return validAfterTime_ == null ?
com.google.protobuf.Timestamp.getDefaultInstance() : validAfterTime_;
}
}
/**
* <pre>
* The key can be used after this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_after_time = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>
getValidAfterTimeFieldBuilder() {
if (validAfterTimeBuilder_ == null) {
validAfterTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getValidAfterTime(),
getParentForChildren(),
isClean());
validAfterTime_ = null;
}
return validAfterTimeBuilder_;
}
private com.google.protobuf.Timestamp validBeforeTime_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> validBeforeTimeBuilder_;
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public boolean hasValidBeforeTime() {
return validBeforeTimeBuilder_ != null || validBeforeTime_ != null;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public com.google.protobuf.Timestamp getValidBeforeTime() {
if (validBeforeTimeBuilder_ == null) {
return validBeforeTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : validBeforeTime_;
} else {
return validBeforeTimeBuilder_.getMessage();
}
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public Builder setValidBeforeTime(com.google.protobuf.Timestamp value) {
if (validBeforeTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
validBeforeTime_ = value;
onChanged();
} else {
validBeforeTimeBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public Builder setValidBeforeTime(
com.google.protobuf.Timestamp.Builder builderForValue) {
if (validBeforeTimeBuilder_ == null) {
validBeforeTime_ = builderForValue.build();
onChanged();
} else {
validBeforeTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public Builder mergeValidBeforeTime(com.google.protobuf.Timestamp value) {
if (validBeforeTimeBuilder_ == null) {
if (validBeforeTime_ != null) {
validBeforeTime_ =
com.google.protobuf.Timestamp.newBuilder(validBeforeTime_).mergeFrom(value).buildPartial();
} else {
validBeforeTime_ = value;
}
onChanged();
} else {
validBeforeTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public Builder clearValidBeforeTime() {
if (validBeforeTimeBuilder_ == null) {
validBeforeTime_ = null;
onChanged();
} else {
validBeforeTime_ = null;
validBeforeTimeBuilder_ = null;
}
return this;
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public com.google.protobuf.Timestamp.Builder getValidBeforeTimeBuilder() {
onChanged();
return getValidBeforeTimeFieldBuilder().getBuilder();
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
public com.google.protobuf.TimestampOrBuilder getValidBeforeTimeOrBuilder() {
if (validBeforeTimeBuilder_ != null) {
return validBeforeTimeBuilder_.getMessageOrBuilder();
} else {
return validBeforeTime_ == null ?
com.google.protobuf.Timestamp.getDefaultInstance() : validBeforeTime_;
}
}
/**
* <pre>
* The key can be used before this timestamp.
* </pre>
*
* <code>optional .google.protobuf.Timestamp valid_before_time = 5;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>
getValidBeforeTimeFieldBuilder() {
if (validBeforeTimeBuilder_ == null) {
validBeforeTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getValidBeforeTime(),
getParentForChildren(),
isClean());
validBeforeTime_ = null;
}
return validBeforeTimeBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.iam.admin.v1.ServiceAccountKey)
}
// @@protoc_insertion_point(class_scope:google.iam.admin.v1.ServiceAccountKey)
private static final com.google.iam.admin.v1.ServiceAccountKey DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.iam.admin.v1.ServiceAccountKey();
}
public static com.google.iam.admin.v1.ServiceAccountKey getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ServiceAccountKey>
PARSER = new com.google.protobuf.AbstractParser<ServiceAccountKey>() {
public ServiceAccountKey parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ServiceAccountKey(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ServiceAccountKey> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ServiceAccountKey> getParserForType() {
return PARSER;
}
public com.google.iam.admin.v1.ServiceAccountKey getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.idm.engine.test;
import java.util.ArrayList;
import java.util.List;
import org.flowable.idm.api.IdmIdentityService;
import org.flowable.idm.engine.IdmEngine;
import org.flowable.idm.engine.IdmEngineConfiguration;
import org.junit.internal.AssumptionViolatedException;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.runners.model.MultipleFailureException;
import org.junit.runners.model.Statement;
/**
* Convenience for IdmEngine and services initialization in the form of a JUnit rule.
*
* <p>
* Usage:
* </p>
*
* <pre>
* public class YourTest {
*
* @Rule
* public FlowableIdmRule flowableIdmRule = new FlowableIdmRule();
*
* ...
* }
* </pre>
*
* <p>
* The IdmEngine and the services will be made available to the test class through the getters of the FlowableRule. The idmEngine will be initialized by default with the flowable.idm.cfg.xml resource
* on the classpath. To specify a different configuration file, pass the resource location in {@link #FlowableIdmRule(String) the appropriate constructor}. Process engines will be cached statically.
* Right before the first time the setUp is called for a given configuration resource, the process engine will be constructed.
* </p>
*
* <p>
* You can declare a deployment with the {@link FormDeploymentAnnotation} annotation. This base class will make sure that this deployment gets deployed before the setUp and
* {@link RepositoryService#deleteDeployment(String, boolean) cascade deleted} after the tearDown.
* </p>
*
* @author Tijs Rademakers
*/
public class FlowableIdmRule implements TestRule {
protected String configurationResource = "flowable.idm.cfg.xml";
protected String deploymentId;
protected IdmEngineConfiguration idmEngineConfiguration;
protected IdmEngine idmEngine;
protected IdmIdentityService identityService;
public FlowableIdmRule() {
}
public FlowableIdmRule(String configurationResource) {
this.configurationResource = configurationResource;
}
public FlowableIdmRule(IdmEngine idmEngine) {
setIdmEngine(idmEngine);
}
/**
* Implementation based on {@link TestWatcher}.
*/
@Override
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
List<Throwable> errors = new ArrayList<>();
startingQuietly(description, errors);
try {
base.evaluate();
succeededQuietly(description, errors);
} catch (AssumptionViolatedException e) {
errors.add(e);
skippedQuietly(e, description, errors);
} catch (Throwable t) {
errors.add(t);
failedQuietly(t, description, errors);
}
MultipleFailureException.assertEmpty(errors);
}
};
}
private void succeededQuietly(Description description, List<Throwable> errors) {
try {
succeeded(description);
} catch (Throwable t) {
errors.add(t);
}
}
private void failedQuietly(Throwable t, Description description, List<Throwable> errors) {
try {
failed(t, description);
} catch (Throwable t1) {
errors.add(t1);
}
}
private void skippedQuietly(AssumptionViolatedException e, Description description, List<Throwable> errors) {
try {
skipped(e, description);
} catch (Throwable t) {
errors.add(t);
}
}
private void startingQuietly(Description description, List<Throwable> errors) {
try {
starting(description);
} catch (Throwable t) {
errors.add(t);
}
}
/**
* Invoked when a test succeeds
*/
protected void succeeded(Description description) {
}
/**
* Invoked when a test fails
*/
protected void failed(Throwable e, Description description) {
}
/**
* Invoked when a test is skipped due to a failed assumption.
*/
protected void skipped(AssumptionViolatedException e, Description description) {
}
protected void starting(Description description) {
if (idmEngine == null) {
initializeIdmEngine();
}
if (idmEngineConfiguration == null) {
initializeServices();
}
configureIdmEngine();
}
protected void initializeIdmEngine() {
idmEngine = IdmTestHelper.getIdmEngine(configurationResource);
}
protected void initializeServices() {
idmEngineConfiguration = idmEngine.getIdmEngineConfiguration();
identityService = idmEngine.getIdmIdentityService();
}
protected void configureIdmEngine() {
/* meant to be overridden */
}
public String getConfigurationResource() {
return configurationResource;
}
public void setConfigurationResource(String configurationResource) {
this.configurationResource = configurationResource;
}
public IdmEngine getIdmEngine() {
return idmEngine;
}
public void setIdmEngine(IdmEngine idmEngine) {
this.idmEngine = idmEngine;
initializeServices();
}
public IdmIdentityService getIdentityService() {
return identityService;
}
public void setIdentityService(IdmIdentityService identityService) {
this.identityService = identityService;
}
public void setIdmEngineConfiguration(IdmEngineConfiguration idmEngineConfiguration) {
this.idmEngineConfiguration = idmEngineConfiguration;
}
}
| |
package utils.tablegen;
/*
* Extremely Compiler Collection
* Copyright (c) 2015-2020, Jianping Zeng.
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the namespace of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import backend.codegen.MVT;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.set.hash.TIntHashSet;
import tools.Util;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.function.BiPredicate;
import java.util.function.Predicate;
import static tools.Util.anyOf;
import static tools.Util.noneOf;
import static utils.tablegen.CodeGenHwModes.DefaultMode;
/**
* @author Jianping Zeng.
* @version 0.4
*/
public class TypeInfer {
private TreePattern tp;
public boolean forceMode;
/**
* Set during code generation phase.
*/
public boolean codeGen;
public boolean isLegalTypeCached;
private TypeSetByHwMode legalCache;
public TypeInfer(TreePattern tp) {
this.tp = tp;
forceMode = false;
codeGen = false;
isLegalTypeCached = false;
legalCache = new TypeSetByHwMode();
}
public boolean isConcrete(TypeSetByHwMode vts,
boolean allowEmpty) {
return vts.isValueTypeByHwMode(allowEmpty);
}
public ValueTypeByHwMode getConcrete(TypeSetByHwMode vts,
boolean allowEmpty) {
Util.assertion(vts.isValueTypeByHwMode(allowEmpty));
return vts.getValueTypeByHwMode();
}
public boolean mergeInTypeInfo(TypeSetByHwMode out, TypeSetByHwMode in) {
if (in.isEmpty() || out.equals(in) || tp.hasError())
return false;
if (out.isEmpty())
return out.insert(in);
boolean changed = out.constrain(in);
if (changed && out.isEmpty())
tp.error("Type inference contradiction");
return changed;
}
public boolean mergeInTypeInfo(TypeSetByHwMode out, int simpleVT) {
return mergeInTypeInfo(out, new TypeSetByHwMode(simpleVT));
}
public boolean mergeInTypeInfo(TypeSetByHwMode out, ValueTypeByHwMode in) {
return mergeInTypeInfo(out, new TypeSetByHwMode(in));
}
/**
* To reduce the set {@code out} has at most one VT for each mode.
*
* @param out
* @return Return true if the {@code out} has been changed.
*/
public boolean forceArbitrary(TypeSetByHwMode out) {
if (tp.hasError()) return false;
boolean changed = false;
for (Map.Entry<Integer, MachineValueTypeSet> itr : out.map.entrySet()) {
if (itr.getValue().size() <= 1)
continue;
MVT vt = itr.getValue().getFirstSetBit();
Util.assertion(vt != null);
itr.getValue().clear();
itr.getValue().insert(vt);
changed = true;
}
return changed;
}
public static final Predicate<MVT> IsIntegerOrPtr = vt ->
vt.isInteger() || vt.simpleVT == MVT.iPTR;
public static final Predicate<MVT> IsFloatingPoint = MVT::isFloatingPoint;
public static final Predicate<MVT> IsVector = MVT::isVector;
public static final Predicate<MVT> IsScalar = vt -> !vt.isVector();
private boolean enforceByPredicate(TypeSetByHwMode out, Predicate<MVT> pred) {
if (tp.hasError())
return false;
if (!out.isEmpty())
return out.constrain(pred);
return out.assignIf(getLegalType(), pred);
}
/**
* Remove any non-integer type in this set.
*
* @param out
* @return
*/
public boolean enforceInteger(TypeSetByHwMode out) {
return enforceByPredicate(out, IsIntegerOrPtr);
}
public boolean enforceFloatingPoint(TypeSetByHwMode out) {
return enforceByPredicate(out, IsFloatingPoint);
}
public boolean enforceScalar(TypeSetByHwMode out) {
return enforceByPredicate(out, IsScalar);
}
public boolean enforceVector(TypeSetByHwMode out) {
return enforceByPredicate(out, IsVector);
}
public boolean enforceAny(TypeSetByHwMode out) {
if (tp.hasError() || !out.isEmpty())
return false;
return out.insert(getLegalType());
}
private <T> TIntArrayList unionMode(InfoByHwMode<T> setA,
InfoByHwMode<T> setB) {
TIntArrayList res = new TIntArrayList();
TIntHashSet unique = new TIntHashSet();
unique.addAll(setA.map.keySet());
unique.addAll(setB.map.keySet());
boolean hasDefault = unique.contains(DefaultMode);
unique.forEach(m -> {
if (m != DefaultMode)
res.add(m);
return true;
});
if (hasDefault)
res.add(DefaultMode);
return res;
}
/**
* Make sure that for each type in {@code small} set, there exists a larger type in
* {@code big} set.
*
* @param small
* @param big
* @return Return true if given set has changed.
*/
public boolean enforceSmallerThan(TypeSetByHwMode small, TypeSetByHwMode big) {
if (tp.hasError()) return false;
boolean changed = false;
if (small.isEmpty())
changed |= enforceAny(small);
if (big.isEmpty())
changed |= enforceAny(big);
Util.assertion(small.hasDefault() && big.hasDefault());
TIntArrayList modes = unionMode(small, big);
for (int i = 0, e = modes.size(); i < e; i++) {
int m = modes.get(i);
MachineValueTypeSet s = small.get(m);
MachineValueTypeSet b = big.get(m);
if (anyOf(s.iterator(), IsIntegerOrPtr) &&
anyOf(s.iterator(), IsIntegerOrPtr)) {
Predicate<MVT> NotInt = vt -> !IsIntegerOrPtr.test(vt);
changed |= s.eraseIf(NotInt) | b.eraseIf(NotInt);
} else if (anyOf(s.iterator(), IsFloatingPoint) &&
anyOf(b.iterator(), IsFloatingPoint)) {
Predicate<MVT> NotFP = vt -> !IsFloatingPoint.test(vt);
changed |= s.eraseIf(NotFP) | b.eraseIf(NotFP);
} else if (s.isEmpty() || b.isEmpty()) {
changed = !s.isEmpty() || !b.isEmpty();
s.clear();
b.clear();
} else {
tp.error("Incompatible types");
return changed;
}
if (noneOf(s.iterator(), IsVector) ||
noneOf(b.iterator(), IsVector)) {
changed |= s.eraseIf(IsVector) | b.eraseIf(IsVector);
}
}
BiPredicate<MVT, MVT> lt = (a, b) -> {
return a.getScalarSizeInBits() < b.getScalarSizeInBits() ||
(a.getScalarSizeInBits() == b.getScalarSizeInBits() &&
a.getSizeInBits() < b.getSizeInBits());
};
BiPredicate<MVT, MVT> le = (a, b) -> {
// This function is used when removing elements: when a vector is compared
// to a non-vector, it should return false (to avoid removal).
if (a.isVector() != b.isVector())
return false;
boolean res = a.getScalarSizeInBits() <= b.getScalarSizeInBits() ||
a.getSizeInBits() < b.getSizeInBits();
return res;
};
for (int i = 0, e = modes.size(); i < e; i++) {
int m = modes.get(i);
MachineValueTypeSet s = small.get(m);
MachineValueTypeSet b = big.get(m);
// Remove any element less or equal than minS in set b.
MVT minS = Util.minIf(s.iterator(), IsScalar, lt);
if (minS != null)
changed |= b.eraseIf((vt) -> {
return le.test(vt, minS);
});
// Remove any element great or equal than maxS in set s.
MVT maxS = Util.maxIf(b.iterator(), IsScalar, lt);
if (maxS != null)
changed |= s.eraseIf((vt) -> {
return le.test(maxS, vt);
});
// MinV = min vector in Small, remove all vectors from Big that are
// smaller-or-equal than MinV.
MVT minV = Util.minIf(s.iterator(), IsVector, lt);
if (minV != null)
changed |= b.eraseIf(vt -> {
return le.test(vt, minV);
});
MVT maxV = Util.maxIf(b.iterator(), IsVector, lt);
if (maxV != null)
changed |= s.eraseIf(vt -> {
return le.test(maxV, vt);
});
}
return changed;
}
/**
* 1. Ensure that for each type T in vec, T is a vector type, and that
* for each type U in elt, U is a scalar type.
* 2. Ensure that for each scalar type U in elt, there exits a vector
* type T in vec, such that U is the element type of T.
*
* @param vec
* @param elt
* @return
*/
public boolean enforceVectorEltTypeIs(TypeSetByHwMode vec,
TypeSetByHwMode elt) {
if (tp.hasError())
return false;
boolean changed = false;
if (vec.isEmpty())
changed = enforceVector(vec);
if (elt.isEmpty())
changed |= enforceScalar(elt);
TIntArrayList list = unionMode(vec, elt);
for (int i = 0, sz = list.size(); i < sz; i++) {
int m = list.get(i);
MachineValueTypeSet vs = vec.get(m), es = elt.get(m);
changed |= vs.eraseIf(IsScalar);
changed |= es.eraseIf(IsVector);
Util.assertion(!vs.isEmpty() && !es.isEmpty());
HashSet<MVT> an = new HashSet<>(), bn = new HashSet<>();
vs.forEach(vt -> an.add(vt.getVectorElementType()));
es.forEach(vt -> bn.add(vt));
changed |= vs.eraseIf(vt -> !bn.contains(vt.getVectorElementType()));
changed |= es.eraseIf(vt -> !an.contains(vt));
}
return changed;
}
public boolean enforceVectorEltTypeIs(TypeSetByHwMode vec,
ValueTypeByHwMode elt) {
TypeSetByHwMode set = new TypeSetByHwMode(elt);
return enforceVectorEltTypeIs(vec, set);
}
public boolean enforceVectorSubVectorTypesIs(TypeSetByHwMode vec,
TypeSetByHwMode sub) {
if (tp.hasError())
return false;
BiPredicate<MVT, MVT> IsSubVec = (b, p) -> {
if (!b.isVector() || !p.isVector())
return false;
if (b.isScalableVector() != p.isScalableVector())
return false;
if (!b.getVectorElementType().equals(p.getVectorElementType()))
return false;
return b.getVectorNumElements() < p.getVectorNumElements();
};
BiPredicate<MachineValueTypeSet, MVT> NoSubV = (s, t) -> {
for (Iterator<MVT> itr = s.iterator(); itr.hasNext(); )
if (IsSubVec.test(t, itr.next()))
return false;
return true;
};
BiPredicate<MachineValueTypeSet, MVT> NoSupV = (s, t) -> {
for (Iterator<MVT> itr = s.iterator(); itr.hasNext(); )
if (IsSubVec.test(itr.next(), t))
return false;
return true;
};
boolean changed = false;
if (vec.isEmpty())
changed |= enforceVector(vec);
if (sub.isEmpty())
changed |= enforceVector(sub);
TIntArrayList list = unionMode(vec, sub);
for (int i = 0, e = list.size(); i < e; i++) {
int m = list.get(i);
MachineValueTypeSet vecSet = vec.get(m), subSet = sub.get(m);
changed |= vecSet.eraseIf(IsScalar);
changed |= subSet.eraseIf(vt -> NoSubV.test(vecSet, vt));
changed |= vecSet.eraseIf(vt -> NoSupV.test(subSet, vt));
}
return changed;
}
public boolean enforceSameNumElts(TypeSetByHwMode v,
TypeSetByHwMode w) {
if (tp.hasError())
return false;
boolean changed = false;
if (v.isEmpty())
changed |= enforceAny(v);
if (w.isEmpty())
changed |= enforceAny(w);
BiPredicate<TIntHashSet, MVT> NoLength = (lengths, vt) ->
!lengths.contains(vt.isVector() ? vt.getVectorNumElements() : 0);
TIntArrayList list = unionMode(v, w);
for (int i = 0, e = list.size(); i < e; i++) {
int m = list.get(i);
MachineValueTypeSet vs = v.get(m), ws = w.get(m);
TIntHashSet an = new TIntHashSet(), bn = new TIntHashSet();
vs.forEach(vt -> an.add(vt.isVector() ? vt.getVectorNumElements() : 0));
ws.forEach(vt -> an.add(vt.isVector() ? vt.getVectorNumElements() : 0));
changed |= vs.eraseIf(vt -> NoLength.test(bn, vt));
changed |= ws.eraseIf(vt -> NoLength.test(an, vt));
}
return changed;
}
public boolean enforceSameSize(TypeSetByHwMode a, TypeSetByHwMode b) {
if (tp.hasError())
return false;
boolean changed = false;
if (a.isEmpty())
changed |= enforceAny(a);
if (b.isEmpty())
changed |= enforceAny(b);
BiPredicate<TIntHashSet, MVT> NoSize = (size, vt) -> {
return !size.contains(vt.getSizeInBits());
};
TIntArrayList list = unionMode(a, b);
for (int i = 0, e = list.size(); i < e; i++) {
int m = list.get(i);
MachineValueTypeSet as = a.get(m), bs = b.get(m);
TIntHashSet an = new TIntHashSet(), bn = new TIntHashSet();
as.forEach(vt -> an.add(vt.getSizeInBits()));
bs.forEach(vt -> an.add(vt.getSizeInBits()));
changed |= as.eraseIf(vt -> NoSize.test(bn, vt));
changed |= bs.eraseIf(vt -> NoSize.test(an, vt));
}
return changed;
}
public void expandOverloads(TypeSetByHwMode vts) {
TypeSetByHwMode legalSet = getLegalType();
boolean hasDefaultDef = legalSet.hasDefault();
Util.assertion(legalSet.isDefaultOnly());
MachineValueTypeSet legalTypes = legalSet.get(DefaultMode);
for (Map.Entry<Integer, MachineValueTypeSet> itr : vts.map.entrySet()) {
MachineValueTypeSet vt = itr.getValue();
expandOverloads(vt, legalTypes);
}
}
public void expandOverloads(MachineValueTypeSet out,
MachineValueTypeSet legal) {
HashSet<MVT> ovs = new HashSet<>();
for (Iterator<MVT> itr = out.iterator(); itr.hasNext(); ) {
MVT vt = itr.next();
if (!vt.isOverloaded())
continue;
ovs.add(vt);
out.erase(vt);
}
for (MVT ov : ovs) {
switch (ov.simpleVT) {
case MVT.iPTRAny:
out.insert(new MVT(MVT.iPTR));
case MVT.iAny:
for (int i = MVT.FIRST_INTEGER_VALUETYPE;
i < MVT.LAST_INTEGER_VALUETYPE; i++) {
if (legal.count(new MVT(i)))
out.insert(new MVT(i));
}
for (int i = MVT.FIRST_INTEGER_VECTOR_VALUETYPE;
i < MVT.LAST_INTEGER_VECTOR_VALUETYPE; i++) {
if (legal.count(new MVT(i)))
out.insert(new MVT(i));
}
return;
case MVT.fAny:
for (int i = MVT.FIRST_FP_VALUETYPE;
i < MVT.LAST_FP_VALUETYPE; i++) {
if (legal.count(new MVT(i)))
out.insert(new MVT(i));
}
for (int i = MVT.FIRST_FP_VECTOR_VALUETYPE;
i < MVT.LAST_FP_VECTOR_VALUETYPE; i++) {
if (legal.count(new MVT(i)))
out.insert(new MVT(i));
}
return;
case MVT.vAny:
for (int i = MVT.FIRST_VECTOR_VALUETYPE;
i < MVT.LAST_VECTOR_VALUETYPE; i++) {
if (legal.count(new MVT(i)))
out.insert(new MVT(i));
}
return;
default:
break;
}
}
}
private TypeSetByHwMode getLegalType() {
if (!isLegalTypeCached) {
TypeSetByHwMode set = tp.getDAGPatterns().getLegalValueTypes();
MachineValueTypeSet vts = legalCache.getOrCreate(DefaultMode);
for (Map.Entry<Integer, MachineValueTypeSet> pair : set.map.entrySet()) {
vts.insert(pair.getValue());
}
isLegalTypeCached = true;
}
Util.assertion(legalCache.isDefaultOnly(), "Default-only allowed!");
return legalCache;
}
}
| |
/**
* Copyright (c) 2012, Ben Fortuna
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* o Neither the name of Ben Fortuna nor the names of any other contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.fortuna.ical4j.model;
import java.util.TimeZone;
import java.util.Date;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import net.fortuna.ical4j.model.CalendarDateFormatFactory;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* $Id$ [06-Apr-2004]
*
*/
public class CalendarDateFormatFactoryTest extends TestCase {
private static final boolean STRICT = false;
private static final boolean LENIENT = true;
private String pattern;
private boolean lenient;
private java.util.TimeZone[] timeZones;
private String[] values;
/**
* @param testMethod
* @param pattern
* @param lenient
* @param values
*/
public CalendarDateFormatFactoryTest(String testMethod, String pattern, boolean lenient, String[] values) {
this(testMethod, pattern, lenient, null, values);
}
/**
* @param pattern
* @param lenient
* @param timeZones
* @param values
*/
public CalendarDateFormatFactoryTest(String testMethod, String pattern, boolean lenient, java.util.TimeZone[] timeZones,
String[] values) {
super(testMethod);
this.pattern = pattern;
this.lenient = lenient;
this.timeZones = timeZones;
this.values = values;
}
public void testFallbackToSimpleDateFormat() throws Exception {
SimpleDateFormat f = new SimpleDateFormat("HH");
assertEquals(f, CalendarDateFormatFactory.getInstance("HH"));
}
private DateFormat getCalendarFormatForPattern(String pattern) {
DateFormat cdf = CalendarDateFormatFactory.getInstance(pattern);
assertTrue("didn't get calendar format for pattern: " + pattern,
cdf.getClass().getName().startsWith(CalendarDateFormatFactory.class.getName()));
return cdf;
}
/**
* @throws ParseException
*/
public void testParseSuccess() throws ParseException {
SimpleDateFormat sdf = new SimpleDateFormat(pattern);
// Date instances are always in UTC..
sdf.setTimeZone(TimeZone.getTimeZone("Etc/UTC"));
DateFormat cdf = getCalendarFormatForPattern(pattern);
sdf.setLenient(lenient);
cdf.setLenient(lenient);
for (int i = 0; i < timeZones.length; i++) {
assertNotNull(timeZones[i]);
cdf.setTimeZone(timeZones[i]);
sdf.setTimeZone(cdf.getTimeZone());
DateFormat clone = (DateFormat) cdf.clone();
for (int j = 0; j < values.length; j++) {
Date cdfResult = cdf.parse(values[j]);
Date sdfResult = sdf.parse(values[j]);
Date cloneResult = clone.parse(values[j]);
assertEquals(sdfResult, cdfResult);
assertEquals(sdfResult, cloneResult);
// also test the formatter!
Date d = sdf.parse(values[j]);
assertEquals(sdf.format(d), cdf.format(d));
assertEquals(sdf.format(d), clone.format(d));
}
}
}
/**
*
*/
public void testParseFailure() {
SimpleDateFormat sdf = new SimpleDateFormat(pattern);
DateFormat cdf = getCalendarFormatForPattern(pattern);
sdf.setLenient(lenient);
cdf.setLenient(lenient);
DateFormat clone = (DateFormat) cdf.clone();
for (int i = 0; i < values.length; i++) {
Exception sdfException = null;
try {
// sanity check, make sure simple date formatter fails too
sdf.parse(values[i]);
// CalendarDateFormats are a bit more strict than SimpleDateFormat..
// fail("bad test -- expected simple date formatter to fail for value: " + values[i]);
} catch (Exception e) {
sdfException = e;
}
try {
cdf.parse(values[i]);
fail("expected a parse exception for value: " + values[i]);
} catch (Exception e) {
if (sdfException != null) {
assertEquals(sdfException.getClass().getName(), e.getClass().getName());
}
}
try {
clone.parse(values[i]);
fail("expected a parse exception for value: " + values[i]);
} catch (Exception e) {
if (sdfException != null) {
assertEquals(sdfException.getClass().getName(), e.getClass().getName());
}
}
}
}
/**
* @return
*/
public static TestSuite suite() {
TestSuite suite = new TestSuite();
java.util.TimeZone[] tz = {TimeZone.getDefault(), TimeZone.getTimeZone("GMT"), TimeZone.getTimeZone("US/Eastern"), TimeZone.getTimeZone("US/Pacific")};
suite.addTest(new CalendarDateFormatFactoryTest("testParseFailure", "yyyyMMdd'T'HHmmss", STRICT, new String[] {"1", "20081201T231370", "20081601T000000"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "yyyyMMdd'T'HHmmss", LENIENT, tz, new String[] {"20081201T231370", "20081601T000000", "20081201T000000xyz"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "yyyyMMdd'T'HHmmss", STRICT, tz, new String[] {"00010215T023456", "20081201T000000"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseFailure", "yyyyMMdd'T'HHmmss'Z'", STRICT, new String[] {"1", "20081201T000000", "20083101T000000Z"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "yyyyMMdd'T'HHmmss'Z'", LENIENT, tz, new String[] {"20083101T000000Z", "20081201T000000Zxyz"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "yyyyMMdd'T'HHmmss'Z'", STRICT, tz, new String[] {"20081201T000000Z"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseFailure", "yyyyMMdd", STRICT, new String[] {"1", "20081301"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "yyyyMMdd", LENIENT, tz, new String[] {"20081301", "20081201xyz"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseFailure", "HHmmss", STRICT, new String[] {"1", "260000"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "HHmmss", LENIENT, tz, new String[] {"260000"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "HHmmss", STRICT, tz, new String[] {"021234", "233456"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseFailure", "HHmmss'Z'", STRICT, new String[] {"1", "123456", "261234Z"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "HHmmss'Z'", LENIENT, tz, new String[] {"261234Z"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "HHmmss'Z'", STRICT, tz, new String[] {"021234Z"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseFailure", "HHmmss'Z'", STRICT, tz, new String[] {"233456Zzxy"}));
suite.addTest(new CalendarDateFormatFactoryTest("testParseSuccess", "HHmmss'Z'", LENIENT, tz, new String[] {"233456Zzxy"}));
return suite;
}
}
| |
/*
* MIT License
*
* Copyright (c) 2020 Choko (choko@curioswitch.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.curioswitch.curiostack.aws.sdk.core;
import com.linecorp.armeria.client.ClientOptions;
import com.linecorp.armeria.client.WebClient;
import com.linecorp.armeria.common.HttpData;
import com.linecorp.armeria.common.HttpHeaderNames;
import com.linecorp.armeria.common.HttpHeaders;
import com.linecorp.armeria.common.HttpMethod;
import com.linecorp.armeria.common.HttpObject;
import com.linecorp.armeria.common.HttpRequest;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.RequestHeaders;
import com.linecorp.armeria.common.RequestHeadersBuilder;
import com.linecorp.armeria.common.ResponseHeaders;
import io.netty.buffer.Unpooled;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nullable;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import software.amazon.awssdk.http.SdkHttpMethod;
import software.amazon.awssdk.http.SdkHttpRequest;
import software.amazon.awssdk.http.SdkHttpResponse;
import software.amazon.awssdk.http.async.AsyncExecuteRequest;
import software.amazon.awssdk.http.async.SdkAsyncHttpClient;
import software.amazon.awssdk.http.async.SdkAsyncHttpResponseHandler;
import software.amazon.awssdk.utils.AttributeMap;
import software.amazon.awssdk.utils.async.DelegatingSubscriber;
public class ArmeriaSdkHttpClient implements SdkAsyncHttpClient {
private final WebClient client;
ArmeriaSdkHttpClient(WebClient client) {
this.client = client;
}
@Override
public CompletableFuture<Void> execute(AsyncExecuteRequest executeRequest) {
SdkHttpRequest httpRequest = executeRequest.request();
SdkAsyncHttpResponseHandler handler = executeRequest.responseHandler();
RequestHeadersBuilder headersBuilder =
RequestHeaders.builder(convert(httpRequest.method()), httpRequest.getUri().toString());
executeRequest
.requestContentPublisher()
.contentLength()
.ifPresent(
contentLength ->
headersBuilder.add(HttpHeaderNames.CONTENT_LENGTH, contentLength.toString()));
for (Map.Entry<String, List<String>> header : httpRequest.headers().entrySet()) {
headersBuilder.add(header.getKey(), header.getValue());
}
Publisher<HttpData> requestStream =
delegate ->
executeRequest
.requestContentPublisher()
.subscribe(new SdkToHttpDataSubscriber(delegate));
HttpRequest request = HttpRequest.of(headersBuilder.build(), requestStream);
HttpResponse response = client.execute(request);
response.subscribe(new ResponseSubscriber(handler));
CompletableFuture<Void> completionFuture = response.whenComplete();
completionFuture.whenComplete(
(unused, t) -> {
if (t != null) {
// Subscriber.onError, SdkAsyncHttpResponseHandler.onError, the returned future, and any
// thrown exception are all ways of communicating errors to the SDK. This seems like two
// too many but cover all the bases just in case.
handler.onError(t);
}
});
return completionFuture;
}
@Override
public String clientName() {
return "ArmeriaAsync";
}
@Override
public void close() {}
// TODO(choko): Implement
public static class Builder implements SdkAsyncHttpClient.Builder<ArmeriaSdkHttpClient.Builder> {
@Nullable private ClientOptions options;
@Override
public SdkAsyncHttpClient buildWithDefaults(AttributeMap serviceDefaults) {
return new ArmeriaSdkHttpClient(WebClient.builder().build());
}
}
private static HttpMethod convert(SdkHttpMethod method) {
switch (method) {
case GET:
return HttpMethod.GET;
case POST:
return HttpMethod.POST;
case PUT:
return HttpMethod.PUT;
case DELETE:
return HttpMethod.DELETE;
case HEAD:
return HttpMethod.HEAD;
case PATCH:
return HttpMethod.PATCH;
case OPTIONS:
return HttpMethod.OPTIONS;
default:
try {
return HttpMethod.valueOf(method.name());
} catch (IllegalArgumentException unused) {
throw new IllegalArgumentException(
"Unknown SdkHttpMethod: "
+ method
+ ". Cannot convert to an Armeria request. This could only practically happen if "
+ "the HTTP standard has new methods added and is very unlikely.");
}
}
}
private static SdkHttpResponse convert(ResponseHeaders headers) {
SdkHttpResponse.Builder builder =
SdkHttpResponse.builder()
.statusCode(headers.status().code())
.statusText(headers.status().reasonPhrase());
fillHeaders(headers, builder);
return builder.build();
}
private static void fillHeaders(HttpHeaders headers, SdkHttpResponse.Builder builder) {
headers.forEach((name, value) -> builder.appendHeader(name.toString(), value));
}
private static class SdkToHttpDataSubscriber extends DelegatingSubscriber<ByteBuffer, HttpData> {
private SdkToHttpDataSubscriber(Subscriber<? super HttpData> delegate) {
super(delegate);
}
@Override
public void onNext(ByteBuffer byteBuffer) {
subscriber.onNext(HttpData.wrap(Unpooled.wrappedBuffer(byteBuffer)));
}
}
private static class ResponseSubscriber implements Subscriber<HttpObject>, Publisher<ByteBuffer> {
private final SdkAsyncHttpResponseHandler handler;
private boolean startedStream;
@MonotonicNonNull private Subscription subscription;
@MonotonicNonNull private Subscriber<? super ByteBuffer> sdkSubscriber;
private ResponseSubscriber(SdkAsyncHttpResponseHandler handler) {
this.handler = handler;
}
@Override
public void onSubscribe(Subscription subscription) {
this.subscription = subscription;
subscription.request(1);
}
@Override
public void onNext(HttpObject obj) {
if (!startedStream) {
assert obj instanceof ResponseHeaders;
startedStream = true;
handler.onHeaders(convert((ResponseHeaders) obj));
// We've only requested one object, the headers so far. No more objects will be signaled
// until handler subscribes and requests more objects so we don't have to worry about
// buffering.
handler.onStream(this);
} else {
notifyObject(obj);
}
}
@Override
public void onError(Throwable t) {
sdkSubscriber.onError(t);
}
@Override
public void onComplete() {
sdkSubscriber.onComplete();
}
@Override
public void subscribe(Subscriber<? super ByteBuffer> subscriber) {
this.sdkSubscriber = subscriber;
// The SDK subscriber will request objects, controlling the stream from here.
subscriber.onSubscribe(subscription);
}
private void notifyObject(HttpObject obj) {
if (obj instanceof HttpData) {
HttpData data = (HttpData) obj;
// We can't subscribe with pooled objects since there is no SDK callback that would let us
// release them so can just wrap the array here.
sdkSubscriber.onNext(ByteBuffer.wrap(data.array()));
} else {
// Trailers. Documentation doesn't make clear whether the SDK actually can handle trailers
// but it also doesn't say the callback can only be called once so just try calling it again
// with the trailers.
assert obj instanceof HttpHeaders;
SdkHttpResponse.Builder builder = SdkHttpResponse.builder();
fillHeaders((HttpHeaders) obj, builder);
handler.onHeaders(builder.build());
}
}
}
}
| |
/*
* Copyright (c) 2003-2016, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.framework;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Proxy;
import java.net.URL;
import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.Vector;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleException;
import org.osgi.framework.BundleReference;
/**
* Classloader for bundle JAR files.
*
* @author Jan Stein, Philippe Laporte, Mats-Ola Persson, Gunna Ekolin
* @author Vilmos Nebehaj (Android application support)
*/
final public class BundleClassLoader extends ClassLoader implements BundleReference {
final static int ONLY_FIRST = 1;
final static int LIST = 2;
final static int ONLY_RECURSE = 4;
final static int RECURSE = 256;
final static int LOCAL = 512;
/**
* Framework class loader
*/
final FrameworkContext fwCtx;
/**
* Handle to secure operations.
*/
final PermissionOps secure;
/**
* Bundle classloader protect domain.
*/
final ProtectionDomain protectionDomain;
/**
* Archive that we load code from.
*/
BundleArchive archive;
/**
* Imported and Exported java packages.
*/
BundlePackages bpkgs;
/**
* Bundle class path for this classloader.
*/
final private BundleClassPath classPath;
// Array of bundles for which a classload is triggering activation.
private static ThreadLocal<ArrayList<BundleImpl>> tlBundlesToActivate = new ThreadLocal<ArrayList<BundleImpl>>();
Debug debug;
/**
* Create class loader for specified bundle.
*/
BundleClassLoader(final BundleGeneration gen) throws BundleException {
// otherwise getResource will bypass OUR parent
super(gen.bundle.fwCtx.parentClassLoader);
fwCtx = gen.bundle.fwCtx;
debug = fwCtx.debug;
secure = fwCtx.perm;
protectionDomain = gen.getProtectionDomain();
bpkgs = gen.bpkgs;
archive = gen.archive;
classPath = new BundleClassPath(archive, gen);
if (debug.classLoader) {
debug.println(this + " Created new classloader");
}
}
/**
* Find bundle class to load. First check if this load comes from an imported
* package. Otherwise load class from our bundle.
*
* @see java.lang.ClassLoader#findClass
*/
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
if (name.startsWith("java.")) {
return fwCtx.parentClassLoader.loadClass(name);
}
if (fwCtx.isBootDelegated(name)) {
try {
final Class<?> bootDelegationCls = fwCtx.parentClassLoader.loadClass(name);
if (debug.classLoader && bootDelegationCls != null) {
debug.println(this + " findClass: " + name + " boot delegation: " + bootDelegationCls);
}
return bootDelegationCls;
} catch (final ClassNotFoundException e) {
}
}
String path;
String pkg;
final int pos = name.lastIndexOf('.');
if (pos != -1) {
path = name.replace('.', '/');
pkg = name.substring(0, pos);
} else {
path = name;
pkg = null;
}
Class<?> res = (Class<?>) secure.callSearchFor(this, name, pkg, path + ".class",
classSearch, ONLY_FIRST, this, null);
if (res != null) {
return res;
}
if (!fwCtx.props.STRICTBOOTCLASSLOADING) {
if (isBootClassContext(name)) {
if (debug.classLoader) {
debug.println(this + " trying parent loader for class=" + name
+ ", since it was loaded on the system loader itself");
}
res = fwCtx.parentClassLoader.loadClass(name);
if (res != null) {
if (debug.classLoader) {
debug.println(this + " loaded " + name + " from " + fwCtx.parentClassLoader);
}
}
return res;
}
}
throw new ClassNotFoundException(name);
}
/**
* Find native library code to load.
*
* @see java.lang.ClassLoader#findLibrary
*/
@Override
protected String findLibrary(String name) {
final String res = secure.callFindLibrary0(this, name);
if (debug.classLoader) {
debug.println(this + " Find library: " + name + (res != null ? " OK" : " FAIL"));
}
return res;
}
/**
* Returns an Enumeration of all the resources with the given name.
*
* @see java.lang.ClassLoader#findResources
*/
@Override
protected Enumeration<URL> findResources(String name) {
// Step 1 and 2 are done by getResources
return getBundleResources(name, false);
}
/**
* Finds the resource with the given name.
*
* @see java.lang.ClassLoader#findResource
*/
@Override
protected URL findResource(String name) {
final Enumeration<URL> res = getBundleResources(name, true);
if (res != null) {
return res.nextElement();
} else {
return null;
}
}
/**
* Wrapper class around SecurityManager which exposes the getClassLoader()
* method.
*/
static class SecurityManagerExposer extends SecurityManager {
@Override
public Class<?>[] getClassContext() {
return super.getClassContext();
}
}
static protected SecurityManagerExposer smex = new SecurityManagerExposer();
/**
* @return <code>true</code> if the given class is not loaded by a bundle
* class loader, <code>false</false> otherwise.
*/
private boolean isNonBundleClass(Class<?> cls) {
return (this.getClass().getClassLoader() != cls.getClassLoader())
&& !ClassLoader.class.isAssignableFrom(cls) && !Class.class.equals(cls)
&& !Proxy.class.equals(cls);
}
/**
* Check if the current call is made from a class loaded on the boot class
* path (or rather, on a class loaded from something else than a bundle class
* loader)
*
* @param name
* The name of the class to load.
*/
public boolean isBootClassContext(String name) {
Class<?>[] classStack = smex.getClassContext();
if (classStack == null) { // Android 4.0 returns null
// TODO: Find a cheaper and better solution
try {
final StackTraceElement[] classNames = new Throwable().getStackTrace();
classStack = new Class[classNames.length];
for (int i = 1; i < classNames.length; i++)
classStack[i] = Class.forName(classNames[i].getClassName());
} catch (final ClassNotFoundException e) {
return false;
}
}
for (int i = 1; i < classStack.length; i++) {
final Class<?> currentCls = classStack[i];
if (isNonBundleClass(currentCls)) {
final ClassLoader currentCL = currentCls.getClassLoader();
// If any of the classloaders for the caller's class is
// a BundleClassLoader, we're not in a VM class context
// ANDROID FIX, android-7/8 unexpectedly returns
// java.lang.BootClassLoader as the ClassLoader for the
// BootClassLoader Class other jvm's return null
for (ClassLoader cl = currentCL; cl != null && cl != cl.getClass().getClassLoader(); cl = cl.getClass()
.getClassLoader()) {
if (BundleClassLoader.class.isInstance(cl)) {
return false;
}
}
return !Bundle.class.isInstance(classStack[i - 1]);
}
}
return false;
}
/**
* Find Class and load it. This function is abstract in PJava 1.2 so we define
* it here to work as closely as it can to Java 2. Should work okey if we
* don't use the Java 2 stuff.
*
* @param name
* the name of the class
* @param resolve
* if <code>true</code> then resolve the class
* @return the resulting <code>Class</code> object
* @exception ClassNotFoundException
* if the class could not be found
* @see java.lang.ClassLoader#loadClass
*/
@Override
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
Class<?> c = findLoadedClass(name);
if (c == null) {
c = findClass(name);
} else if (secure.getClassLoaderOf(c) == this) {
// Handle bundles that are lazely started after having been
// stopped. In this case the triggering classes will already
// be loaded. Only consider classes loaded by this classloader
final BundleImpl b = (BundleImpl) getBundle();
if (b.triggersActivationCls(name)) {
if (debug.lazy_activation) {
debug.println(this + " lazy activation of #" + b.id + " triggered by loadClass("
+ name + ")");
}
final ArrayList<BundleImpl> bundlesToActivate = tlBundlesToActivate.get();
if (null == bundlesToActivate) {
// Not part of a load chain; activate bundle here.
if (debug.lazy_activation) {
debug.println(this + " requesting lazy activation of #" + b.id);
}
try {
secure.callFinalizeActivation(b);
} catch (final BundleException e) {
fwCtx.frameworkError(b, e);
}
} else {
// add bundle to list of bundles to activate when the
// initiator class has been loaded.
boolean bundlePresent = false;
for (int i = 0, size = bundlesToActivate.size(); i < size; i++) {
final BundleImpl tmp = bundlesToActivate.get(i);
if (tmp.id == b.id) {
bundlePresent = true;
break;
}
}
if (!bundlePresent) {
bundlesToActivate.add(b);
if (debug.lazy_activation) {
debug.println(this + " added #" + b.id + " to list of bundles to be activated.");
}
}
}
}
}
if (resolve) {
resolveClass(c);
}
return c;
}
/**
* Finds the resource with the given name. This is defined a little different
* in PJava 1.2 versus Java 2. So we first try to use the super() version and
* if it fails we try to find it in the local bundle.
*
* @param name
* resource name
* @return an URL to resource, or <code>null</code> if the resource could not
* be found or the caller doesn't have adequate privileges to get the
* resource.
* @see java.lang.ClassLoader#getResource
*/
@Override
public URL getResource(String name) {
if (debug.classLoader) {
debug.println(this + " getResource: " + name);
}
URL res = null;
if (name.startsWith("java/")) {
res = fwCtx.parentClassLoader.getResource(name);
if (debug.classLoader) {
debug.println(this + " getResource: " + name + " file in java pkg: " + res);
}
return res;
}
if (fwCtx.isBootDelegatedResource(name)) {
res = fwCtx.parentClassLoader.getResource(name);
if (res != null) {
if (debug.classLoader) {
debug.println(this + " getResource: " + name + " boot delegation: " + res);
}
return res;
}
}
res = findResource(name);
if (debug.classLoader) {
debug.println(this + " getResource: " + name + " bundle space: " + res);
}
return res;
}
// We would like to use the following implementation of
// getResources() but that method is final in JDK 1.4
// thus we can not redefine it here.
/**
* Finds all the resources with the given name. A resource is some data
* (images, audio, text, etc) that can be accessed by class code in a way that
* is independent of the location of the code.
*
* <p>
* The name of a resource is a <tt>/</tt>-separated path name that identifies
* the resource.
*
* @param name
* resource name
* @return An enumeration of {@link java.net.URL <tt>URL</tt>} objects for the
* resource. If no resources could be found, the enumeration will be
* empty. Resources that the class loader doesn't have access to will
* not be in the enumeration.
*
* @see java.lang.ClassLoader#getResources
* @see org.osgi.framework.Bundle#getResources(String name)
*
*/
public Enumeration<URL> getResourcesOSGi(String name) throws IOException {
if (debug.classLoader) {
debug.println(this + " getResources: " + name);
}
final int start = name.startsWith("/") ? 1 : 0;
if (name.substring(start).startsWith("java/")) {
return fwCtx.parentClassLoader.getResources(name);
}
Enumeration<URL> res = null;
if (fwCtx.isBootDelegatedResource(name)) {
res = fwCtx.parentClassLoader.getResources(name);
}
if (res == null || !res.hasMoreElements()) {
res = findResources(name);
}
return res;
}
/**
* Finds the resource with the given name and returns the InputStream. The
* method is overridden to make sure it does the right thing.
*
* @param name
* resource name
* @return an InputStream to resource, or <code>null</code> if the resource
* could not be found or the caller doesn't have adequate privileges
* to get the resource.
* @see java.lang.ClassLoader#getResourceAsStream
*/
@Override
public InputStream getResourceAsStream(String name) {
try {
final URL url = getResource(name);
if (url != null) {
return url.openStream();
}
} catch (final IOException ignore) {
}
return null;
}
/**
* Return a string representing this object
*
* @return A message string.
*/
@Override
public String toString() {
return "BundleClassLoader("
// +"fw=" +bpkgs.bundle.fwCtx.hashCode()
+ "id=" + bpkgs.bg.bundle.id + ",gen=" + bpkgs.bg.generation + ")";
}
// Implements BundleReference
public Bundle getBundle() {
return bpkgs.bg.bundle;
}
//
// BundleClassLoader specific
//
/**
* Close down this classloader. We don't give out any new classes. Perhaps we
* should block all classloads.
*/
void close() {
archive = null;
if (debug.classLoader) {
debug.println(this + " Cleared archives");
}
}
/**
* Get all the resources with the given name in this bundle.
*
*/
Enumeration<URL> getBundleResources(String name, boolean onlyFirst) {
if (debug.classLoader) {
debug.println(this + " Find bundle resource" + (onlyFirst ? "" : "s") + ": " + name);
}
String pkg = null;
final int pos = name.lastIndexOf('/');
if (pos > 0) {
final int start = name.startsWith("/") ? 1 : 0;
pkg = name.substring(start, pos).replace('/', '.');
} else {
pkg = null;
}
@SuppressWarnings("unchecked")
final Enumeration<URL> res = (Enumeration<URL>) secure.callSearchFor(this, null, pkg, name,
resourceSearch,
onlyFirst ? ONLY_FIRST
: 0, this,
null);
return res;
}
/**
* Get bundle package handler.
*
*/
BundlePackages getBpkgs() {
return bpkgs;
}
/**
* Attach fragment to classloader.
*
* @throws BundleException
*
*/
void attachFragment(BundleGeneration gen) throws BundleException {
if (debug.classLoader) {
debug.println(this + " fragment attached update classpath");
}
classPath.attachFragment(gen);
}
Collection<String> listResources(String path, String filePattern, int options) {
if (debug.classLoader) {
debug.println(this + " List bundle resources: " + path + ", pattern=" + filePattern);
}
if (path.startsWith("/")) {
path = path.substring(1);
}
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
// TODO handle . within path
String pkg = path.replace('/', '.');
@SuppressWarnings("unchecked")
final Set<String> res = (Set<String>)
secure.callSearchFor(this, filePattern, pkg, path, listSearch,
(options << 8) | LIST, this, null);
return res;
}
//
// Private
//
/**
* Searches for and loads classes and resources according to OSGi search
* order. When lazy activation of bundles are used this method will detect and
* perform the activation. The actual searching and loading is done in
* {@link #searchFor0()}
*
* @param name
* Name of class or pattern we are looking for, null if we look for a
* resource
* @param pkg
* Package name for item
* @param path
* File path to item searched ("/" separated)
* @param action
* Action to be taken when item is found
* @param options
* Options controlling what should be included in search result.
*
* @return Object returned from action class.
*/
Object searchFor(String name, String pkg, String path, SearchAction action, int options,
BundleClassLoader requestor, HashSet<BundleClassLoader> visited) {
try {
final BundleImpl b = (BundleImpl) getBundle();
boolean initiator = false;
ArrayList<BundleImpl> bundlesToActivate = null;
if (action == classSearch) {
boolean bundlePresent = false;
bundlesToActivate = tlBundlesToActivate.get();
initiator = bundlesToActivate == null;
if (initiator) {
bundlesToActivate = new ArrayList<BundleImpl>();
tlBundlesToActivate.set(bundlesToActivate);
} else {
bundlePresent = bundlesToActivate.contains(b);
}
if (!bundlePresent && b.triggersActivationPkg(pkg)) {
bundlesToActivate.add(b);
if (debug.lazy_activation) {
debug.println(this + " lazy activation of #" + b.id + " triggered by searchFor("
+ name + ")");
}
}
}
final Object res = searchFor0(name, pkg, path, action, options, requestor, visited);
if (initiator) {
tlBundlesToActivate.set(null);
for (int i = bundlesToActivate.size() - 1; i >= 0; i--) {
final BundleImpl tmp = bundlesToActivate.get(i);
if (debug.lazy_activation) {
debug.println(this + " requesting lazy activation of #" + tmp.id);
}
try {
tmp.finalizeActivation();
} catch (final BundleException e) {
fwCtx.frameworkError(tmp, e);
}
}
}
return res;
} catch (final Error te) {
tlBundlesToActivate.set(null);
throw te;
}
}
/**
* Search for classloader to use according to OSGi search order.
*
* 3 If the class or resource is in a package that is imported using
* Import-Package or was imported dynamically in a previous load, then the
* request is delegated to the exporting bundles class loader; otherwise the
* search continues with the next step. If the request is delegated to an
* exporting class loader and the class or resource is not found, then the
* search terminates and the request fails.
*
* 4 If the class or resource is in a package that is imported from one or
* more other bundles using Require-Bundle, the request is delegated to the
* class loaders of the other bundles, in the order in which they are
* specified in this bundles manifest. If the class or resource is not found,
* then the search continues with the next step.
*
* 5 The bundles own internal bundle class path is searched. If the class or
* resource is not found, then the search continues with the next step.
*
* 6 Each attached fragment's internal bundle class path is searched. The
* fragments are searched in ascending bundle ID order. If the class or
* resource is not found, then the search continues with the next step.
*
* 7 If the class or resource is in a package that is exported by the bundle
* or the package is imported by the bundle (using Import-Package or
* Require-Bundle), then the search ends and the class or resource is not
* found.
*
* 8 Otherwise, if the class or resource is in a package that is imported
* using DynamicImport-Package, then a dynamic import of the package is now
* attempted. An exporter must conform to any implied package constraints. If
* an appropriate exporter is found, a wire is established so that future
* loads of the package are handled in Step 3. If a dynamic wire is not
* established, then the request fails.
*
* 9 If the dynamic import of the package is established, the request is
* delegated to the exporting bundle's class loader. If the request is
* delegated to an exporting class loader and the class or resource is not
* found, then the search terminates and the request fails.
*
* @param name
* Name of class or null if we look for a resource
* @param pkg
* Package name for item
* @param path
* File path to item searched ("/" separated)
* @param action
* Action to be taken when item is found
* @param onlyFirst
* Stop search when first matching item is found.
*
* @return Object returned from action class.
*/
Object searchFor0(String name, String pkg, String path, SearchAction action, int options,
BundleClassLoader requestor, HashSet<BundleClassLoader> visited) {
BundlePackages pbp;
Iterator<ExportPkg> ep;
// TODO, Should this be an action method
if (action == classSearch && requestor != this) {
final Class<?> c = findLoadedClass(name);
if (c != null) {
return c;
}
}
final boolean list = (options & LIST) != 0;
final boolean local = (options & LOCAL) != 0;
final boolean recurse = (options & RECURSE) != 0;
Object answer = null;
if (debug.classLoader) {
debug.println(this + " Search for: " + path);
}
/* 3 */
if (pkg != null) {
pbp = bpkgs.getProviderBundlePackages(pkg);
if (pbp != null) {
final ClassLoader cl = pbp.getClassLoader();
if (!local || cl == this) {
if (isSystemBundle(pbp.bg.bundle)) {
answer = frameworkSearchFor(cl, name, path, action);
if (!recurse) {
return answer;
}
} else {
final BundleClassLoader bcl = (BundleClassLoader) cl;
// Second check avoids a loop when a required bundle imports a
// package from its requiring host that it self should
// provide contents for to the requiring bundle.
if (bcl != this && (visited == null || (bcl != null && !visited.contains(bcl)))) {
if (bcl != null) {
if (debug.classLoader) {
debug.println(this + " Import search: " + path + " from #" + pbp.bg.bundle.id);
}
answer = secure.callSearchFor(bcl, name, pkg, path, action, options & ~RECURSE,
requestor, visited);
} else {
if (debug.classLoader) {
debug.println(this + " No import found: " + path);
}
}
if (!recurse) {
return answer;
}
}
}
}
if (cl != this) {
// Import checked we don't need to list any more in this directory.
options |= ONLY_RECURSE;
}
} else if (!local) {
/* 4 */
final ArrayList<BundleGeneration> pl = bpkgs.getRequiredBundleGenerations(pkg);
if (pl != null) {
if (visited == null) {
visited = new HashSet<BundleClassLoader>();
}
visited.add(this);
for (final BundleGeneration pbg : pl) {
final ClassLoader cl = pbg.getClassLoader();
if (cl instanceof BundleClassLoader) {
final BundleClassLoader bcl = (BundleClassLoader)cl;
if (bcl != null && !visited.contains(bcl)) {
if (debug.classLoader) {
debug.println(this + " Required bundle search: " + path + " from #"
+ pbg.bundle.id);
}
answer = secure.callSearchFor(bcl, name, pkg, path, action, options,
requestor, visited);
}
} else {
answer = frameworkSearchFor(cl, name, path, action);
}
if (answer != null) {
if (list || recurse) {
break;
} else {
return answer;
}
}
}
if (debug.classLoader && answer == null) {
debug.println(this + " Required bundle search: "
+ "Not found, continuing with local search.");
}
}
}
ep = bpkgs.getExports(pkg);
} else {
ep = null;
}
/* 5 + 6 */
if (this != requestor && ep != null) {
// TODO should we block resources?
if (action == classSearch) {
boolean blocked = true;
while (ep.hasNext()) {
if (ep.next().checkFilter(name)) {
blocked = false;
break;
}
}
if (blocked) {
if (debug.classLoader) {
debug.println(this + " Filter check blocked search for: " + name);
}
return null;
}
}
}
final Vector<FileArchive> av = classPath.componentExists(path, (options & ONLY_FIRST) != 0,
(options & LIST) != 0);
if (av != null || recurse) {
try {
Object res = action.get(av, path, name, pkg, options, requestor, this);
if (answer != null) {
if (res != null) {
@SuppressWarnings("unchecked")
Collection<Object> ca = (Collection<Object>) answer;
@SuppressWarnings("unchecked")
Collection<Object> cr = (Collection<Object>) res;
ca.addAll(cr);
}
} else {
answer = res;
}
return answer;
} catch (final ClassFormatError cfe) {
// TODO: OSGI43 WeavingHook CT has some specific demands that
// ClassFormatErrors are thrown that doesn't seem to be in the spec
throw cfe;
} catch (final IOException ioe) {
fwCtx.frameworkError(bpkgs.bg.bundle, ioe);
return null;
}
}
/* 7 */
if (ep != null || (options & LIST) != 0) {
return null;
}
/* 8 */
if (pkg != null) {
pbp = bpkgs.getDynamicProviderBundlePackages(pkg);
if (pbp != null) {
/* 9 */
if (isSystemBundle(pbp.bg.bundle)) {
try {
return fwCtx.systemBundle.getClassLoader().loadClass(name);
} catch (final ClassNotFoundException e) {
// continue
}
} else {
final BundleClassLoader cl = (BundleClassLoader) pbp.getClassLoader();
if (cl != null) {
if (debug.classLoader) {
debug.println(this + " Dynamic import search: " + path + " from #"
+ pbp.bg.bundle.id);
}
return secure.callSearchFor(cl, name, pkg, path, action, options, requestor,
visited);
}
}
}
if (debug.classLoader) {
debug.println(this + " No dynamic import: " + path);
}
}
return null;
}
/**
* Get resources/classes from the framework.
* Rewrite this since this solution will leak
* resources that aren't boot delegated.
*
* @param cl
* @param name
* @param path
* @param action
* @return
*/
private Object frameworkSearchFor(final ClassLoader cl, String name, String path,
SearchAction action) {
if (action == classSearch) {
try {
return cl.loadClass(name);
} catch (final ClassNotFoundException e) {
}
} else if (action == resourceSearch) {
try {
return cl.getResources(path);
} catch (IOException e) {
}
} else if (action == listSearch) {
// TODO, listSearch
throw new UnsupportedOperationException("listResources not available on system bundle");
}
return null;
}
private static boolean isSystemBundle(BundleImpl bundle) {
return bundle == bundle.fwCtx.systemBundle;
}
/**
* Search action
*/
interface SearchAction {
public abstract Object get(Vector<FileArchive> items, String path, String name, String pkg,
int options, BundleClassLoader requestor, BundleClassLoader cl)
throws IOException;
}
/**
* Search action for class searching
*/
static final SearchAction classSearch = new SearchAction() {
public Object get(Vector<FileArchive> items, String path, String name, String pkg,
int options, BundleClassLoader requestor, BundleClassLoader cl)
throws IOException {
byte[] bytes = items.get(0).getClassBytes(path);
if (bytes != null) {
if (cl.debug.classLoader) {
cl.debug.println("classLoader(#" + cl.bpkgs.bg.bundle.id + ") - load class: " + name);
}
synchronized (cl) {
Class<?> c = cl.findLoadedClass(name);
if (c == null) {
if (pkg != null) {
if (cl.getPackage(pkg) == null) {
cl.definePackage(pkg, null, null, null, null, null, null, null);
}
}
WeavingHooks.WovenClassImpl wc = null;
if (cl != null && cl.bpkgs != null && cl.bpkgs.bg != null
&& cl.bpkgs.bg.bundle != null) {
wc = new WeavingHooks.WovenClassImpl(cl.bpkgs.bg.bundle, name, bytes);
try {
cl.fwCtx.weavingHooks.callHooks(wc);
if (wc.hasAdditionalDynamicImports()) {
cl.bpkgs.parseDynamicImports(wc.getDynamicImportsAsString());
}
bytes = wc.getBytes();
} catch (final ClassFormatError cfe) {
throw cfe;
} catch (final Throwable t) {
final ClassFormatError cfe =
new ClassFormatError("Failed to call WeavingHooks for " + name);
cfe.initCause(t);
throw cfe;
}
}
try {
if (cl.protectionDomain == null) {
// Kaffe can't handle null protectiondomain
c = cl.defineClass(name, bytes, 0, bytes.length);
} else {
c = cl.defineClass(name, bytes, 0, bytes.length, cl.protectionDomain);
}
} finally {
if (wc != null) {
wc.setDefinedClass(c);
}
}
}
return c;
}
} else {
return items.get(0).loadClassBytes(name, cl);
}
}
};
/**
* Search action for resource searching
*/
static final SearchAction resourceSearch = new SearchAction() {
public Object get(Vector<FileArchive> items, String path, String name, String pkg,
int options, BundleClassLoader requestor, BundleClassLoader cl)
throws IOException {
final Vector<URL> answer = new Vector<URL>();
for (int i = 0; i < items.size(); i++) {
final FileArchive fa = items.elementAt(i);
final URL url = fa.getBundleGeneration().getURL(fa.getSubId(), path);
if (url != null) {
if (cl.debug.classLoader) {
cl.debug.println("classLoader(#" + cl.bpkgs.bg.bundle.id + ") - found: " + path
+ " -> " + url);
}
answer.addElement(url);
} else {
return null;
}
}
return answer.elements();
}
};
/**
* Search action for listResource searching
*/
static final SearchAction listSearch = new SearchAction() {
public Object get(Vector<FileArchive> items, String path, String name, String pkg,
int options, BundleClassLoader requestor, BundleClassLoader cl)
throws IOException
{
Set<String> answer = new HashSet<String>();
boolean onlyRecurse = (options & ONLY_RECURSE) != 0;
HashSet<String> scanned = new HashSet<String>();
for (String subPkg : cl.bpkgs.getSubProvider(pkg)) {
if ((options & RECURSE) != 0) {
String next = path.length() > 0 ? path + "/" + subPkg : subPkg;
@SuppressWarnings("unchecked")
Set<String> subAnswer = (Set<String>) cl.searchFor(name, next.replace('/', '.'),
next, listSearch,
options & ~ONLY_RECURSE,
requestor, null);
if (subAnswer != null) {
answer.addAll(subAnswer);
}
}
if (!onlyRecurse && (name == null || Util.filterMatch(name, subPkg))) {
answer.add(path + "/" + subPkg);
}
scanned.add(subPkg + "/");
}
if (items != null) {
for (FileArchive fa : items) {
for (String e : fa.listDir(path)) {
if (scanned.contains(e)) {
if (cl.debug.classLoader) {
cl.debug.println("classLoader(#" + cl.bpkgs.bg.bundle.id + ") - list search skip: " + e);
}
continue;
} else if (cl.debug.classLoader) {
cl.debug.println("classLoader(#" + cl.bpkgs.bg.bundle.id +
") - list search check: " + e + (onlyRecurse ? " (scan)" : ""));
}
if (e.endsWith("/")) {
e = e.substring(0, e.length() - 1);
if ((options & RECURSE) != 0) {
String next = path.length() > 0 ? path + "/" + e : e;
@SuppressWarnings("unchecked")
Set<String> subAnswer = (Set<String>) cl.searchFor(name,
next.replace('/', '.'),
next, listSearch,
options & ~ONLY_RECURSE,
requestor, null);
if (subAnswer != null) {
answer.addAll(subAnswer);
}
}
}
if (!onlyRecurse && (name == null || Util.filterMatch(name, e))) {
answer.add(path + "/" + e);
if (cl.debug.classLoader) {
cl.debug.println("classLoader(#" + cl.bpkgs.bg.bundle.id + ") - list search match: " + e);
}
}
}
}
}
return answer;
}
};
/**
* Find native library code to load. This method is called from
* findLibrary(name) within a doPriviledged-block via the secure object.
*
*/
String findLibrary0(final String name) {
return classPath.getNativeLibrary(name);
}
/**
* Check if we have native code
*/
Set<BundleGeneration> hasNativeRequirements() {
return classPath.hasNativeRequirements();
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.slicer;
import com.intellij.codeInspection.dataFlow.JavaMethodContractUtil;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.MethodSignatureUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ArrayUtilRt;
import com.intellij.util.ObjectUtils;
import com.intellij.util.Processor;
import com.siyeh.ig.psiutils.ExpressionUtils;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
/**
* @author cdr
*/
class SliceForwardUtil {
static boolean processUsagesFlownFromThe(@NotNull PsiElement element,
@NotNull final JavaSliceUsage parent,
@NotNull final Processor<? super SliceUsage> processor) {
PsiExpression expression = getMethodCallTarget(element);
if (expression != null &&
!SliceUtil.createAndProcessSliceUsage(expression, parent, parent.getSubstitutor(), parent.indexNesting, "", processor)) {
return false;
}
Pair<PsiElement, PsiSubstitutor> pair = getAssignmentTarget(element, parent);
if (pair != null) {
PsiElement target = pair.getFirst();
final PsiSubstitutor substitutor = pair.getSecond();
if (target instanceof PsiParameter) {
PsiParameter parameter = (PsiParameter)target;
PsiElement declarationScope = parameter.getDeclarationScope();
if (declarationScope instanceof PsiMethod) {
final PsiMethod method = (PsiMethod)declarationScope;
final int parameterIndex = method.getParameterList().getParameterIndex(parameter);
Processor<PsiMethod> myProcessor = override -> {
if (!parent.getScope().contains(override)) return true;
final PsiSubstitutor superSubstitutor = method == override
? substitutor
: MethodSignatureUtil.getSuperMethodSignatureSubstitutor(method.getSignature(substitutor),
override.getSignature(substitutor));
PsiParameter[] parameters = override.getParameterList().getParameters();
if (parameters.length <= parameterIndex) return true;
PsiParameter actualParam = parameters[parameterIndex];
return SliceUtil.createAndProcessSliceUsage(actualParam, parent, superSubstitutor, parent.indexNesting, "", processor);
};
if (!myProcessor.process(method)) return false;
return OverridingMethodsSearch.search(method, parent.getScope().toSearchScope(), true).forEach(myProcessor);
}
}
return SliceUtil.createAndProcessSliceUsage(target, parent, parent.getSubstitutor(),parent.indexNesting, "", processor);
}
if (element instanceof PsiReferenceExpression) {
PsiReferenceExpression ref = (PsiReferenceExpression)element;
PsiElement resolved = ref.resolve();
if (!(resolved instanceof PsiVariable)) return true;
final PsiVariable variable = (PsiVariable)resolved;
return processAssignedFrom(variable, ref, parent, processor);
}
if (element instanceof PsiVariable) {
return processAssignedFrom(element, element, parent, processor);
}
if (element instanceof PsiMethod) {
return processAssignedFrom(element, element, parent, processor);
}
return true;
}
private static boolean processAssignedFrom(@NotNull PsiElement from,
@NotNull PsiElement context,
@NotNull JavaSliceUsage parent,
@NotNull final Processor<? super SliceUsage> processor) {
if (from instanceof PsiLocalVariable) {
return searchReferencesAndProcessAssignmentTarget(from, context, parent, processor);
}
if (from instanceof PsiParameter) {
PsiParameter parameter = (PsiParameter)from;
PsiElement scope = parameter.getDeclarationScope();
Collection<PsiParameter> parametersToAnalyze = new THashSet<>();
if (scope instanceof PsiMethod) {
final PsiMethod method = (PsiMethod)scope;
int index = method.getParameterList().getParameterIndex(parameter);
Collection<PsiMethod> superMethods = new THashSet<>(Arrays.asList(method.findDeepestSuperMethods()));
superMethods.add(method);
for (Iterator<PsiMethod> iterator = superMethods.iterator(); iterator.hasNext(); ) {
ProgressManager.checkCanceled();
PsiMethod superMethod = iterator.next();
if (!parent.params.scope.contains(superMethod)) {
iterator.remove();
}
}
final THashSet<PsiMethod> implementors = new THashSet<>(superMethods);
for (PsiMethod superMethod : superMethods) {
ProgressManager.checkCanceled();
if (!OverridingMethodsSearch.search(superMethod, parent.getScope().toSearchScope(), true).forEach(sub -> {
ProgressManager.checkCanceled();
implementors.add(sub);
return true;
})) return false;
}
for (PsiMethod implementor : implementors) {
ProgressManager.checkCanceled();
if (!parent.params.scope.contains(implementor)) continue;
if (implementor instanceof PsiCompiledElement) implementor = (PsiMethod)implementor.getNavigationElement();
PsiParameter[] parameters = implementor.getParameterList().getParameters();
if (index != -1 && index < parameters.length) {
parametersToAnalyze.add(parameters[index]);
}
}
}
else {
parametersToAnalyze.add(parameter);
}
for (final PsiParameter psiParameter : parametersToAnalyze) {
ProgressManager.checkCanceled();
if (!searchReferencesAndProcessAssignmentTarget(psiParameter, null, parent, processor)) return false;
}
return true;
}
if (from instanceof PsiField) {
return searchReferencesAndProcessAssignmentTarget(from, null, parent, processor);
}
if (from instanceof PsiMethod) {
PsiMethod method = (PsiMethod)from;
Collection<PsiMethod> superMethods = new THashSet<>(Arrays.asList(method.findDeepestSuperMethods()));
superMethods.add(method);
final Set<PsiReference> processed = new THashSet<>(); //usages of super method and overridden method can overlap
for (final PsiMethod containingMethod : superMethods) {
if (!MethodReferencesSearch.search(containingMethod, parent.getScope().toSearchScope(), true).forEach(reference -> {
ProgressManager.checkCanceled();
synchronized (processed) {
if (!processed.add(reference)) return true;
}
PsiElement element = reference.getElement().getParent();
return processAssignmentTarget(element, parent, processor);
})) {
return false;
}
}
}
return true;
}
private static boolean searchReferencesAndProcessAssignmentTarget(@NotNull PsiElement element,
@Nullable final PsiElement context,
@NotNull JavaSliceUsage parent,
@NotNull Processor<? super SliceUsage> processor) {
return ReferencesSearch.search(element).forEach(reference -> {
PsiElement element1 = reference.getElement();
if (context != null && element1.getTextOffset() < context.getTextOffset()) return true;
return processAssignmentTarget(element1, parent, processor);
});
}
private static boolean processAssignmentTarget(@NotNull PsiElement element,
@NotNull JavaSliceUsage parent,
@NotNull Processor<? super SliceUsage> processor) {
if (!parent.params.scope.contains(element)) return true;
if (element instanceof PsiCompiledElement) element = element.getNavigationElement();
if (element.getLanguage() != JavaLanguage.INSTANCE) {
return SliceUtil.createAndProcessSliceUsage(element, parent, EmptySubstitutor.getInstance(), parent.indexNesting, "", processor);
}
Pair<PsiElement, PsiSubstitutor> pair = getAssignmentTarget(element, parent);
if (pair != null) {
return SliceUtil.createAndProcessSliceUsage(element, parent, pair.getSecond(), parent.indexNesting, "", processor);
}
if (parent.params.showInstanceDereferences && isDereferenced(element)) {
SliceUsage usage = new JavaSliceDereferenceUsage(element.getParent(), parent, parent.getSubstitutor());
return processor.process(usage);
}
return true;
}
private static PsiExpression getMethodCallTarget(PsiElement element) {
element = complexify(element);
PsiMethodCallExpression call = null;
if (element.getParent() instanceof PsiExpressionList) {
call = ObjectUtils.tryCast(element.getParent().getParent(), PsiMethodCallExpression.class);
}
PsiExpression value = JavaMethodContractUtil.findReturnedValue(call);
return value == element ? call : null;
}
private static boolean isDereferenced(@NotNull PsiElement element) {
if (!(element instanceof PsiReferenceExpression)) return false;
PsiElement parent = element.getParent();
if (!(parent instanceof PsiReferenceExpression)) return false;
return ((PsiReferenceExpression)parent).getQualifierExpression() == element;
}
private static Pair<PsiElement,PsiSubstitutor> getAssignmentTarget(@NotNull PsiElement element, @NotNull JavaSliceUsage parentUsage) {
element = complexify(element);
PsiElement target = null;
PsiSubstitutor substitutor = parentUsage.getSubstitutor();
//assignment
PsiElement parent = element.getParent();
if (parent instanceof PsiAssignmentExpression) {
PsiAssignmentExpression assignment = (PsiAssignmentExpression)parent;
if (element.equals(assignment.getRExpression())) {
PsiElement left = assignment.getLExpression();
if (left instanceof PsiReferenceExpression) {
JavaResolveResult result = ((PsiReferenceExpression)left).advancedResolve(false);
target = result.getElement();
substitutor = result.getSubstitutor();
}
}
}
else if (parent instanceof PsiVariable) {
PsiVariable variable = (PsiVariable)parent;
PsiElement initializer = variable.getInitializer();
if (element.equals(initializer)) {
target = variable;
}
}
//method call
else if (parent instanceof PsiExpressionList && parent.getParent() instanceof PsiCallExpression) {
PsiExpression[] expressions = ((PsiExpressionList)parent).getExpressions();
int index = ArrayUtilRt.find(expressions, element);
PsiCallExpression methodCall = (PsiCallExpression)parent.getParent();
JavaResolveResult result = methodCall.resolveMethodGenerics();
PsiMethod method = (PsiMethod)result.getElement();
if (index != -1 && method != null) {
PsiParameter[] parameters = method.getParameterList().getParameters();
if (index < parameters.length) {
target = parameters[index];
substitutor = result.getSubstitutor();
}
}
}
else if (parent instanceof PsiReturnStatement) {
PsiReturnStatement statement = (PsiReturnStatement)parent;
if (element.equals(statement.getReturnValue())) {
target = PsiTreeUtil.getParentOfType(statement, PsiMethod.class);
}
}
else if (element instanceof PsiExpression){
PsiMethodCallExpression call = ExpressionUtils.getCallForQualifier((PsiExpression)element);
PsiExpression maybeQualifier = JavaMethodContractUtil.findReturnedValue(call);
if (maybeQualifier == element) {
target = call;
}
}
return target == null ? null : Pair.create(target, substitutor);
}
@NotNull
static PsiElement complexify(@NotNull PsiElement element) {
PsiElement parent = element.getParent();
if (parent instanceof PsiParenthesizedExpression && element.equals(((PsiParenthesizedExpression)parent).getExpression())) {
return complexify(parent);
}
if (parent instanceof PsiTypeCastExpression && element.equals(((PsiTypeCastExpression)parent).getOperand())) {
return complexify(parent);
}
return element;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller;
import com.google.common.util.concurrent.ServiceManager;
import com.google.gson.Gson;
import com.google.inject.*;
import com.google.inject.name.Named;
import com.google.inject.persist.Transactional;
import com.sun.jersey.spi.container.servlet.ServletContainer;
import org.apache.ambari.eventdb.webservice.WorkflowJsonService;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.StateRecoveryManager;
import org.apache.ambari.server.StaticallyInject;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
import org.apache.ambari.server.api.AmbariErrorHandler;
import org.apache.ambari.server.api.AmbariPersistFilter;
import org.apache.ambari.server.api.MethodOverrideFilter;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.api.services.KeyService;
import org.apache.ambari.server.api.services.PersistKeyValueImpl;
import org.apache.ambari.server.api.services.PersistKeyValueService;
import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.internal.*;
import org.apache.ambari.server.controller.utilities.DatabaseChecker;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.PersistenceType;
import org.apache.ambari.server.orm.dao.*;
import org.apache.ambari.server.orm.entities.MetainfoEntity;
import org.apache.ambari.server.resources.ResourceManager;
import org.apache.ambari.server.resources.api.rest.GetResource;
import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
import org.apache.ambari.server.security.CertificateManager;
import org.apache.ambari.server.security.SecurityFilter;
import org.apache.ambari.server.security.SecurityHeaderFilter;
import org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter;
import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
import org.apache.ambari.server.security.authorization.AmbariLocalUserDetailsService;
import org.apache.ambari.server.security.authorization.Users;
import org.apache.ambari.server.security.authorization.internal.AmbariInternalAuthenticationProvider;
import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator;
import org.apache.ambari.server.security.unsecured.rest.CertificateDownload;
import org.apache.ambari.server.security.unsecured.rest.CertificateSign;
import org.apache.ambari.server.security.unsecured.rest.ConnectionInfo;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.topology.AmbariContext;
import org.apache.ambari.server.topology.BlueprintFactory;
import org.apache.ambari.server.topology.TopologyManager;
import org.apache.ambari.server.topology.TopologyRequestFactoryImpl;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.view.ViewRegistry;
import org.apache.velocity.app.Velocity;
import org.eclipse.jetty.server.*;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.servlets.GzipFilter;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.request.RequestContextListener;
import org.springframework.web.context.support.GenericWebApplicationContext;
import org.springframework.web.filter.DelegatingFilterProxy;
import javax.crypto.BadPaddingException;
import javax.servlet.DispatcherType;
import java.io.File;
import java.net.Authenticator;
import java.net.BindException;
import java.net.PasswordAuthentication;
import java.util.EnumSet;
import java.util.Map;
@Singleton
public class DemoServer {
private static Logger LOG = LoggerFactory.getLogger(DemoServer.class);
// Set velocity logger
protected static final String VELOCITY_LOG_CATEGORY = "VelocityLogger";
/**
* Dispatcher types for webAppContext.addFilter.
*/
public static final EnumSet<DispatcherType> DISPATCHER_TYPES = EnumSet
.of(DispatcherType.REQUEST);
static {
Velocity.setProperty("runtime.log.logsystem.log4j.logger", VELOCITY_LOG_CATEGORY);
}
private Server server = null;
public volatile boolean running = true; // true while controller runs
final String CONTEXT_PATH = "/";
// final String SPRING_CONTEXT_LOCATION = "classpath:/webapp/WEB-INF/spring-security.xml";
final String SPRING_CONTEXT_LOCATION = "webapp/WEB-INF/spring-security.xml";
final String DISABLED_ENTRIES_SPLITTER = "\\|";
@Inject
Configuration configs;
@Inject
CertificateManager certMan;
@Inject
Injector injector;
@Inject
AmbariMetaInfo ambariMetaInfo;
@Inject
MetainfoDAO metainfoDAO;
@Inject
@Named("dbInitNeeded")
boolean dbInitNeeded;
/**
* Guava service manager singleton (bound with {@link
* Scopes#SINGLETON}).
*/
@Inject
private ServiceManager serviceManager;
/**
* The singleton view registry.
*/
@Inject
ViewRegistry viewRegistry;
/**
* The handler list for deployed web apps.
*/
@Inject
AmbariHandlerList handlerList;
/**
* Session manager.
*/
@Inject
SessionManager sessionManager;
/**
* Session ID manager.
*/
@Inject
SessionIdManager sessionIdManager;
@Inject
DelegatingFilterProxy springSecurityFilter;
public String getServerOsType() {
return configs.getServerOsType();
}
private static AmbariManagementController clusterController = null;
public static AmbariManagementController getController() {
return clusterController;
}
@SuppressWarnings("deprecation")
public void run() throws Exception {
performStaticInjection();
initDB();
// Set jetty thread pool
QueuedThreadPool qtp1 = new QueuedThreadPool(configs
.getAgentThreadPoolSize());
qtp1.setName("qtp-ambari-agent");
QueuedThreadPool qtp2 = new QueuedThreadPool(configs
.getClientThreadPoolSize());
qtp2.setName("qtp-client");
server = new Server(qtp2);
server.setSessionIdManager(sessionIdManager);
Server serverForAgent = new Server(qtp1);
DatabaseChecker.checkDBVersion();
DatabaseChecker.checkDBConsistency();
try {
//
ClassPathXmlApplicationContext parentSpringAppContext = new ClassPathXmlApplicationContext();
parentSpringAppContext.refresh();
ConfigurableListableBeanFactory factory = parentSpringAppContext
.
getBeanFactory();
factory.registerSingleton("guiceInjector", injector);
factory.registerSingleton("passwordEncoder", injector
.getInstance(PasswordEncoder.class));
factory.registerSingleton("ambariLocalUserService", injector
.getInstance(AmbariLocalUserDetailsService.class));
factory.registerSingleton("ambariLdapAuthenticationProvider", injector
.getInstance(AmbariLdapAuthenticationProvider.class));
factory.registerSingleton("ambariLdapDataPopulator", injector
.getInstance(AmbariLdapDataPopulator.class));
factory.registerSingleton("ambariAuthorizationFilter", injector
.getInstance(AmbariAuthorizationFilter.class));
factory.registerSingleton("ambariInternalAuthenticationProvider", injector
.getInstance(AmbariInternalAuthenticationProvider.class));
//Spring Security xml config depends on this Bean
String[] contextLocations = {SPRING_CONTEXT_LOCATION};
ClassPathXmlApplicationContext springAppContext = new ClassPathXmlApplicationContext(contextLocations, parentSpringAppContext);
//setting ambari web context
ServletContextHandler root = new ServletContextHandler(
ServletContextHandler.SECURITY |
ServletContextHandler.SESSIONS);
configureRootHandler(root);
configureSessionManager(sessionManager);
root.getSessionHandler().setSessionManager(sessionManager);
GenericWebApplicationContext springWebAppContext = new GenericWebApplicationContext();
springWebAppContext
.setServletContext(root.getServletContext());
springWebAppContext.setParent(springAppContext);
root.getServletContext()
.setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, springWebAppContext);
certMan.initRootCert();
// the agent communication (heartbeats, registration, etc)
// is stateless
// and does not use sessions.
ServletContextHandler agentroot = new ServletContextHandler(serverForAgent, "/", ServletContextHandler.NO_SESSIONS);
if (configs.isAgentApiGzipped()) {
configureHandlerCompression(agentroot);
}
ServletHolder rootServlet = root
.addServlet(DefaultServlet.class, "/");
rootServlet.setInitParameter("dirAllowed", "false");
rootServlet.setInitOrder(1);
/* Configure default servlet for agent server */
rootServlet = agentroot.addServlet(DefaultServlet.class, "/");
rootServlet.setInitOrder(1);
// Conditionally adds security-related headers to all HTTP
// responses.
root.addFilter(new FilterHolder(injector
.getInstance(SecurityHeaderFilter.class)), "/*", DISPATCHER_TYPES);
//session-per-request strategy for api and agents
root.addFilter(new FilterHolder(injector
.getInstance(AmbariPersistFilter.class)), "/api/*", DISPATCHER_TYPES);
// root.addFilter(new FilterHolder(injector.getInstance
// (AmbariPersistFilter.class)), "/proxy/*", DISPATCHER_TYPES);
root.addFilter(new FilterHolder(new MethodOverrideFilter()), "/api/*", DISPATCHER_TYPES);
// root.addFilter(new FilterHolder(new MethodOverrideFilter
// ()), "/proxy/*", DISPATCHER_TYPES);
// register listener to capture request context
root.addEventListener(new RequestContextListener());
agentroot.addFilter(new FilterHolder(injector
.getInstance(AmbariPersistFilter.class)), "/agent/*", DISPATCHER_TYPES);
agentroot
.addFilter(SecurityFilter.class, "/*", DISPATCHER_TYPES);
if (configs.getApiAuthentication()) {
root.addFilter(new FilterHolder(springSecurityFilter), "/api/*", DISPATCHER_TYPES);
// root.addFilter(new FilterHolder(springSecurityFilter)
// , "/proxy/*", DISPATCHER_TYPES);
}
//Secured connector for 2-way auth
SslContextFactory contextFactoryTwoWay = new SslContextFactory();
disableInsecureProtocols(contextFactoryTwoWay);
// SslSelectChannelConnector sslConnectorTwoWay =
// new
// SslSelectChannelConnector
// (contextFactoryTwoWay);
// sslConnectorTwoWay.setPort(configs
// .getTwoWayAuthPort());
Map<String, String> configsMap = configs.getConfigsMap();
String keystore =
configsMap.get(Configuration.SRVR_KSTR_DIR_KEY) +
File.separator +
configsMap.get(Configuration.KSTR_NAME_KEY);
String truststore =
configsMap.get(Configuration.SRVR_KSTR_DIR_KEY) +
File.separator +
configsMap.get(Configuration.TSTR_NAME_KEY);
String srvrCrtPass = configsMap
.get(Configuration.SRVR_CRT_PASS_KEY);
contextFactoryTwoWay.setKeyStorePath(keystore);
contextFactoryTwoWay.setKeyStorePassword(srvrCrtPass);
contextFactoryTwoWay.setKeyManagerPassword(srvrCrtPass);
contextFactoryTwoWay.setTrustStorePath(truststore);
contextFactoryTwoWay.setTrustStorePassword(srvrCrtPass);
contextFactoryTwoWay.setKeyStoreType(configsMap
.get(Configuration.KSTR_TYPE_KEY));
contextFactoryTwoWay.setTrustStoreType(configsMap
.get(Configuration.TSTR_TYPE_KEY));
contextFactoryTwoWay.setNeedClientAuth(configs.getTwoWaySsl());
ServerConnector sslConnectorTwoWay = new ServerConnector(serverForAgent, contextFactoryTwoWay);
sslConnectorTwoWay.setPort(configs.getTwoWayAuthPort());
//SSL Context Factory
SslContextFactory contextFactoryOneWay = new SslContextFactory(true);
contextFactoryOneWay.setKeyStorePath(keystore);
contextFactoryOneWay.setTrustStorePath(truststore);
contextFactoryOneWay.setKeyStorePassword(srvrCrtPass);
contextFactoryOneWay.setKeyManagerPassword(srvrCrtPass);
contextFactoryOneWay.setTrustStorePassword(srvrCrtPass);
contextFactoryOneWay.setKeyStoreType(configsMap
.get(Configuration.KSTR_TYPE_KEY));
contextFactoryOneWay.setTrustStoreType(configsMap
.get(Configuration.TSTR_TYPE_KEY));
contextFactoryOneWay.setNeedClientAuth(false);
disableInsecureProtocols(contextFactoryOneWay);
//Secured connector for 1-way auth
ServerConnector sslConnectorOneWay = new ServerConnector(serverForAgent, contextFactoryOneWay);
sslConnectorOneWay.setPort(configs.getOneWayAuthPort());
sslConnectorOneWay.setAcceptQueueSize(2);
sslConnectorTwoWay.setAcceptQueueSize(2);
serverForAgent
.setConnectors(new Connector[]{sslConnectorOneWay, sslConnectorTwoWay});
ServletHolder sh = new ServletHolder(ServletContainer.class);
sh.setInitParameter("com.sun.jersey.config.property" +
".resourceConfigClass",
"com.sun.jersey.api.core" + ".PackagesResourceConfig");
sh.setInitParameter("com.sun.jersey.config.property.packages",
"org.apache.ambari.server.api.rest;" +
"org.apache.ambari.server.api.services;" +
"org.apache.ambari.eventdb.webservice;" +
"org.apache.ambari.server.api");
sh.setInitParameter("com.sun.jersey.api.json" +
".POJOMappingFeature", "true");
root.addServlet(sh, "/api/v1/*");
sh.setInitOrder(2);
SecurityContextHolder
.setStrategyName(SecurityContextHolder.MODE_INHERITABLETHREADLOCAL);
viewRegistry.readViewArchives();
handlerList.addHandler(root);
server.setHandler(handlerList);
ServletHolder agent = new ServletHolder(ServletContainer.class);
agent.setInitParameter("com.sun.jersey.config.property" +
".resourceConfigClass",
"com.sun.jersey.api.core" + ".PackagesResourceConfig");
agent.setInitParameter(
"com.sun.jersey.config.property" + ".packages",
"org.apache.ambari.server.agent.rest;" +
"org.apache.ambari.server.api");
agent.setInitParameter("com.sun.jersey.api.json" +
".POJOMappingFeature", "true");
agentroot.addServlet(agent, "/agent/v1/*");
agent.setInitOrder(3);
ServletHolder cert = new ServletHolder(ServletContainer.class);
cert.setInitParameter("com.sun.jersey.config.property" +
".resourceConfigClass",
"com.sun.jersey.api.core" + ".PackagesResourceConfig");
cert.setInitParameter(
"com.sun.jersey.config.property" + ".packages",
"org.apache.ambari.server.security.unsecured.rest;" +
"org.apache.ambari.server.api");
cert.setInitParameter("com.sun.jersey.api.json" +
".POJOMappingFeature", "true");
agentroot.addServlet(cert, "/*");
cert.setInitOrder(4);
/*
ServletHolder proxy = new ServletHolder(ServletContainer.class);
proxy.setInitParameter("com.sun.jersey.config.property
.resourceConfigClass",
"com.sun.jersey.api.core
.PackagesResourceConfig");
proxy.setInitParameter("com.sun.jersey.config.property.packages",
"org.apache.ambari.server.proxy");
proxy.setInitParameter("com.sun.jersey.api.json
.POJOMappingFeature", "true");
root.addServlet(proxy, "/proxy/*");
proxy.setInitOrder(5);
*/
ServletHolder resources = new ServletHolder(ServletContainer.class);
resources.setInitParameter("com.sun.jersey.config.property" +
".resourceConfigClass",
"com.sun.jersey.api" + ".core.PackagesResourceConfig");
resources.setInitParameter(
"com.sun.jersey.config.property" + ".packages",
"org.apache.ambari.server" + ".resources.api.rest;");
root.addServlet(resources, "/resources/*");
resources.setInitOrder(5);
if (configs.csrfProtectionEnabled()) {
sh.setInitParameter("com.sun.jersey.spi.container" +
".ContainerRequestFilters", "org.apache.ambari" +
".server.api.AmbariCsrfProtectionFilter");
/* proxy.setInitParameter("com.sun.jersey.spi.container
.ContainerRequestFilters",
"org.apache.ambari.server.api
.AmbariCsrfProtectionFilter"); */
}
/* Configure the API server to use the NIO connectors */
ServerConnector apiConnector;
if (configs.getApiSSLAuthentication()) {
String httpsKeystore = configsMap
.get(Configuration.CLIENT_API_SSL_KSTR_DIR_NAME_KEY) +
File.separator + configsMap
.get(Configuration.CLIENT_API_SSL_KSTR_NAME_KEY);
String httpsTruststore = configsMap
.get(Configuration.CLIENT_API_SSL_KSTR_DIR_NAME_KEY) +
File.separator + configsMap
.get(Configuration.CLIENT_API_SSL_TSTR_NAME_KEY);
LOG.info(
"API SSL Authentication is turned on. Keystore -" +
" " +
httpsKeystore);
String httpsCrtPass = configsMap
.get(Configuration.CLIENT_API_SSL_CRT_PASS_KEY);
SslContextFactory contextFactoryApi = new SslContextFactory();
disableInsecureProtocols(contextFactoryApi);
contextFactoryApi.setKeyStorePath(httpsKeystore);
contextFactoryApi.setTrustStorePath(httpsTruststore);
contextFactoryApi.setKeyStorePassword(httpsCrtPass);
contextFactoryApi.setKeyManagerPassword(httpsCrtPass);
contextFactoryApi.setTrustStorePassword(httpsCrtPass);
contextFactoryApi.setKeyStoreType(configsMap
.get(Configuration.CLIENT_API_SSL_KSTR_TYPE_KEY));
contextFactoryApi.setTrustStoreType(configsMap
.get(Configuration.CLIENT_API_SSL_KSTR_TYPE_KEY));
ServerConnector sapiConnector = new ServerConnector(server, contextFactoryApi);
sapiConnector.setIdleTimeout(configs
.getConnectionMaxIdleTime());
sapiConnector.setPort(configs.getClientSSLApiPort());
apiConnector = sapiConnector;
} else {
apiConnector = new ServerConnector(server);
apiConnector.setPort(configs.getClientApiPort());
apiConnector.setIdleTimeout(configs
.getConnectionMaxIdleTime());
}
server.addConnector(apiConnector);
server.setStopAtShutdown(true);
serverForAgent.setStopAtShutdown(true);
springAppContext.start();
String osType = getServerOsType();
if (osType == null || osType.isEmpty()) {
throw new RuntimeException(
Configuration.OS_VERSION_KEY + " is not " +
" set in the ambari.properties file");
}
//Start action scheduler
LOG.info("********* Initializing Clusters **********");
Clusters clusters = injector.getInstance(Clusters.class);
StringBuilder clusterDump = new StringBuilder();
clusters.debugDump(clusterDump);
LOG.info("********* Current Clusters State *********");
LOG.info(clusterDump.toString());
// LOG.info("********* Reconciling Alert Definitions **********");
// ambariMetaInfo.reconcileAlertDefinitions(clusters);
LOG.info("********* Initializing ActionManager **********");
ActionManager manager = injector
.getInstance(ActionManager.class);
LOG.info("********* Initializing Controller **********");
AmbariManagementController controller = injector
.getInstance(AmbariManagementController.class);
LOG.info("********* Initializing Scheduled Request Manager " +
"**********");
ExecutionScheduleManager executionScheduleManager = injector
.getInstance(ExecutionScheduleManager.class);
clusterController = controller;
StateRecoveryManager recoveryManager = injector
.getInstance(StateRecoveryManager.class);
recoveryManager.doWork();
/*
* Start the server after controller state is recovered.
*/
server.start();
// serverForAgent.start();
LOG.info("********* Started Server **********");
manager.start();
LOG.info("********* Started ActionManager **********");
executionScheduleManager.start();
LOG.info("********* Started Scheduled Request Manager " +
"**********");
serviceManager.startAsync();
LOG.info("********* Started Services **********");
server.join();
LOG.info("Joined the Server");
} catch (BadPaddingException bpe) {
LOG.error("Bad keystore or private key password. " +
"HTTPS certificate re-importing may be required.");
throw bpe;
} catch (BindException bindException) {
LOG.error("Could not bind to server port - instance may " +
"already be running. " +
"Terminating this instance.", bindException);
throw bindException;
}
}
/**
* Disables insecure protocols and cipher suites (exact list is defined
* at server properties)
*/
private void disableInsecureProtocols(SslContextFactory factory) {
if (!configs.getSrvrDisabledCiphers().isEmpty()) {
String[] masks = configs.getSrvrDisabledCiphers()
.split(DISABLED_ENTRIES_SPLITTER);
factory.setExcludeCipherSuites(masks);
}
if (!configs.getSrvrDisabledProtocols().isEmpty()) {
String[] masks = configs.getSrvrDisabledProtocols()
.split(DISABLED_ENTRIES_SPLITTER);
factory.setExcludeProtocols(masks);
}
}
/**
* Performs basic configuration of root handler with static values
* and values
* from configuration file.
*
* @param root root handler
*/
protected void configureRootHandler(ServletContextHandler root) {
configureHandlerCompression(root);
root.setContextPath(CONTEXT_PATH);
root.setErrorHandler(injector
.getInstance(AmbariErrorHandler.class));
root.setMaxFormContentSize(-1);
/* Configure web app context */
root.setResourceBase(configs.getWebAppDir());
}
/**
* Performs GZIP compression configuration of the context handler
* with static values and values from configuration file
*
* @param context handler
*/
protected void configureHandlerCompression(
ServletContextHandler context) {
if (configs.isApiGzipped()) {
FilterHolder gzipFilter = context
.addFilter(GzipFilter.class, "/*", EnumSet
.of(DispatcherType.REQUEST));
gzipFilter.setInitParameter("methods", "GET,POST,PUT,DELETE");
gzipFilter.setInitParameter("mimeTypes",
"text/html,text/plain,text/xml,text/css," +
"application/x-javascript," +
"application/xml," +
"application/x-www-form-urlencoded," +
"application/javascript,application/json");
gzipFilter.setInitParameter("minGzipSize", configs
.getApiGzipMinSize());
}
}
/**
* Performs basic configuration of session manager with static
* values and values from
* configuration file.
*
* @param sessionManager session manager
*/
protected void configureSessionManager(SessionManager sessionManager) {
// use AMBARISESSIONID instead of JSESSIONID to avoid conflicts
// with
// other services (like HDFS) that run on the same context but a
// different
// port
sessionManager.getSessionCookieConfig().setName("AMBARISESSIONID");
sessionManager.getSessionCookieConfig().setHttpOnly(true);
if (configs.getApiSSLAuthentication()) {
sessionManager.getSessionCookieConfig().setSecure(true);
}
// each request that does not use AMBARISESSIONID will create a new
// HashedSession in Jetty; these MUST be reaped after inactivity
// in order
// to prevent a memory leak
int sessionInactivityTimeout = configs
.getHttpSessionInactiveTimeout();
sessionManager.setMaxInactiveInterval(sessionInactivityTimeout);
}
/**
* Creates default users if in-memory database is used
*/
@Transactional
protected void initDB() throws AmbariException {
if (configs.getPersistenceType() == PersistenceType.IN_MEMORY ||
dbInitNeeded) {
LOG.info("Database init needed - creating default data");
Users users = injector.getInstance(Users.class);
users.createUser("admin", "admin");
users.createUser("user", "user");
MetainfoEntity schemaVersion = new MetainfoEntity();
schemaVersion
.setMetainfoName(Configuration.SERVER_VERSION_KEY);
schemaVersion
.setMetainfoValue(ambariMetaInfo.getServerVersion());
metainfoDAO.create(schemaVersion);
}
}
public void stop() throws Exception {
try {
server.stop();
} catch (Exception e) {
LOG.error("Error stopping the server", e);
}
}
/**
* Sets up proxy authentication. This must be done before the
* server is
* initialized since <code>AmbariMetaInfo</code> requires potential URL
* lookups that may need the proxy.
*/
static void setupProxyAuth() {
final String proxyUser = System.getProperty("http.proxyUser");
final String proxyPass = System.getProperty("http.proxyPassword");
// to skip some hosts from proxy, pipe-separate names using, i.e.:
// -Dhttp.nonProxyHosts=*.domain.com|host.internal.net
if (null != proxyUser && null != proxyPass) {
LOG.info("Proxy authentication enabled");
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(proxyUser, proxyPass
.toCharArray());
}
});
} else {
LOG.debug("Proxy authentication not specified");
}
}
/**
* Deprecated. Instead, use {@link StaticallyInject}.
* <p/>
* Static injection replacement to wait Persistence Service start.
*
* @see StaticallyInject
*/
@Deprecated
public void performStaticInjection() {
//AgentResource.init(injector.getInstance(HeartBeatHandler.class));
CertificateDownload
.init(injector.getInstance(CertificateManager.class));
ConnectionInfo.init(injector.getInstance(Configuration.class));
CertificateSign
.init(injector.getInstance(CertificateManager.class));
GetResource.init(injector.getInstance(ResourceManager.class));
PersistKeyValueService
.init(injector.getInstance(PersistKeyValueImpl.class));
KeyService.init(injector.getInstance(PersistKeyValueImpl.class));
// BootStrapResource.init(injector.getInstance(BootStrapImpl.class));
// StackAdvisorResourceProvider
// .init(injector.getInstance(StackAdvisorHelper.class));
StageUtils.setGson(injector.getInstance(Gson.class));
StageUtils.setTopologyManager(injector
.getInstance(TopologyManager.class));
WorkflowJsonService.setDBProperties(injector
.getInstance(Configuration.class));
SecurityFilter.init(injector.getInstance(Configuration.class));
// StackDefinedPropertyProvider.init(injector);
AbstractControllerResourceProvider
.init(injector.getInstance(ResourceProviderFactory.class));
BlueprintResourceProvider.init(injector
.getInstance(BlueprintFactory.class), injector
.getInstance(BlueprintDAO.class), injector
.getInstance(Gson.class));
// StackDependencyResourceProvider.init(ambariMetaInfo);
ClusterResourceProvider
.init(injector.getInstance(TopologyManager.class), injector
.getInstance(TopologyRequestFactoryImpl.class));
HostResourceProvider.setTopologyManager(injector
.getInstance(TopologyManager.class));
BlueprintFactory.init(injector.getInstance(BlueprintDAO.class));
BaseClusterRequest
.init(injector.getInstance(BlueprintFactory.class));
AmbariContext
.init(injector.getInstance(HostRoleCommandFactory.class));
PermissionResourceProvider
.init(injector.getInstance(PermissionDAO.class));
ViewPermissionResourceProvider
.init(injector.getInstance(PermissionDAO.class));
PrivilegeResourceProvider
.init(injector.getInstance(PrivilegeDAO.class), injector
.getInstance(UserDAO.class), injector
.getInstance(GroupDAO.class), injector
.getInstance(PrincipalDAO.class), injector
.getInstance(PermissionDAO.class), injector
.getInstance(ResourceDAO.class));
UserPrivilegeResourceProvider
.init(injector.getInstance(UserDAO.class), injector
.getInstance(ClusterDAO.class), injector
.getInstance(GroupDAO.class), injector
.getInstance(ViewInstanceDAO.class));
ClusterPrivilegeResourceProvider
.init(injector.getInstance(ClusterDAO.class));
AmbariPrivilegeResourceProvider
.init(injector.getInstance(ClusterDAO.class));
ActionManager.setTopologyManager(injector
.getInstance(TopologyManager.class));
}
public static void main(String[] args) throws Exception {
Injector injector = Guice.createInjector(new ControllerModule());
DemoServer server = null;
try {
LOG.info("Getting the controller");
setupProxyAuth();
injector.getInstance(GuiceJpaInitializer.class);
server = injector.getInstance(DemoServer.class);
CertificateManager certMan = injector
.getInstance(CertificateManager.class);
certMan.initRootCert();
ViewRegistry.initInstance(server.viewRegistry);
ComponentSSLConfiguration.instance().init(server.configs);
server.run();
} catch (Throwable t) {
LOG.error("Failed to run the Ambari Server", t);
if (server != null) {
server.stop();
}
System.exit(-1);
}
}
}
| |
package com.bazaarvoice.emodb.sor.client;
import com.bazaarvoice.emodb.auth.apikey.ApiKeyRequest;
import com.bazaarvoice.emodb.auth.proxy.Credential;
import com.bazaarvoice.emodb.client.EmoClient;
import com.bazaarvoice.emodb.client.EmoClientException;
import com.bazaarvoice.emodb.client.EmoResponse;
import com.bazaarvoice.emodb.client.uri.EmoUriBuilder;
import com.bazaarvoice.emodb.common.api.ServiceUnavailableException;
import com.bazaarvoice.emodb.common.api.Ttls;
import com.bazaarvoice.emodb.common.api.UnauthorizedException;
import com.bazaarvoice.emodb.common.json.JsonStreamProcessingException;
import com.bazaarvoice.emodb.common.json.RisonHelper;
import com.bazaarvoice.emodb.common.uuid.TimeUUIDs;
import com.bazaarvoice.emodb.sor.api.Audit;
import com.bazaarvoice.emodb.sor.api.AuditSizeLimitException;
import com.bazaarvoice.emodb.sor.api.AuthDataStore;
import com.bazaarvoice.emodb.sor.api.Change;
import com.bazaarvoice.emodb.sor.api.Coordinate;
import com.bazaarvoice.emodb.sor.api.DeltaSizeLimitException;
import com.bazaarvoice.emodb.sor.api.FacadeOptions;
import com.bazaarvoice.emodb.sor.api.ReadConsistency;
import com.bazaarvoice.emodb.sor.api.StashNotAvailableException;
import com.bazaarvoice.emodb.sor.api.Table;
import com.bazaarvoice.emodb.sor.api.TableExistsException;
import com.bazaarvoice.emodb.sor.api.TableOptions;
import com.bazaarvoice.emodb.sor.api.UnknownPlacementException;
import com.bazaarvoice.emodb.sor.api.UnknownTableException;
import com.bazaarvoice.emodb.sor.api.UnpublishedDatabusEvent;
import com.bazaarvoice.emodb.sor.api.Update;
import com.bazaarvoice.emodb.sor.api.WriteConsistency;
import com.bazaarvoice.emodb.sor.delta.Delta;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.PeekingIterator;
import org.apache.commons.codec.binary.Base64;
import javax.annotation.Nullable;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import java.net.URI;
import java.time.Duration;
import java.time.Instant;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Data store client implementation that routes System of Record API calls to the EmoDB service. The actual HTTP
* communication is managed by the {@link EmoClient} implementation to allow for flexible usage by variety of HTTP client
* implementations, such as Jersey.
*/
public class DataStoreClient implements AuthDataStore {
/** Must match the service name in the EmoService class. */
/*package*/ static final String BASE_SERVICE_NAME = "emodb-sor-1";
/** Must match the @Path annotation on the DataStoreResource class. */
public static final String SERVICE_PATH = "/sor/1";
private static final MediaType APPLICATION_X_JSON_DELTA_TYPE = new MediaType("application", "x.json-delta");
private static final Duration UPDATE_ALL_REQUEST_DURATION = Duration.ofSeconds(1);
private final EmoClient _client;
private final UriBuilder _dataStore;
public DataStoreClient(URI endPoint, EmoClient client) {
_client = checkNotNull(client, "client");
_dataStore = EmoUriBuilder.fromUri(endPoint);
}
@Override
public Iterator<Table> listTables(String apiKey, @Nullable String fromTableExclusive, long limit) {
checkArgument(limit > 0, "Limit must be >0");
try {
URI uri = _dataStore.clone()
.segment("_table")
.queryParam("from", optional(fromTableExclusive))
.queryParam("limit", limit)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Iterator<Table>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Iterator<UnpublishedDatabusEvent> listUnpublishedDatabusEvents(String apiKey, @Nullable Date fromInclusive, @Nullable Date toExclusive) {
try {
URI uri = _dataStore.clone()
.segment("_unpublishedevents")
.queryParam("from", optional(fromInclusive))
.queryParam("to", optional(toExclusive))
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Iterator<UnpublishedDatabusEvent>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public void createTable(String apiKey, String table, TableOptions options, Map<String, ?> template, Audit audit) throws TableExistsException {
checkNotNull(table, "table");
checkNotNull(options, "options");
checkNotNull(template, "template");
checkNotNull(audit, "audit");
URI uri = _dataStore.clone()
.segment("_table", table)
.queryParam("options", RisonHelper.asORison(options))
.queryParam("audit", RisonHelper.asORison(audit))
.build();
for (int attempt = 0; ; attempt++) {
try {
_client.resource(uri)
.type(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.put(template);
return;
} catch (EmoClientException e) {
// The SoR returns a 301 response when we need to make this request against a different data center.
// Follow the redirect a few times but don't loop forever.
if (e.getResponse().getStatus() == Response.Status.MOVED_PERMANENTLY.getStatusCode() && attempt < 5) {
uri = e.getResponse().getLocation();
continue;
}
throw convertException(e);
}
}
}
@Override
public void dropTable(String apiKey, String table, Audit audit) throws UnknownTableException {
checkNotNull(table, "table");
checkNotNull(audit, "audit");
URI uri = _dataStore.clone()
.segment("_table", table)
.build();
EmoResponse response = _client.resource(uri)
.queryParam("audit", RisonHelper.asORison(audit))
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.delete(EmoResponse.class);
if (response.getStatus() != Response.Status.OK.getStatusCode()) {
throw convertException(new EmoClientException(response));
}
}
@Override
public void purgeTableUnsafe(String apiKey, String table, Audit audit) {
throw new UnsupportedOperationException("Purging a table requires administrator privileges.");
}
@Override
public boolean getTableExists(String apiKey, String table) {
checkNotNull(table, "table");
URI uri = _dataStore.clone()
.segment("_table", table)
.build();
EmoResponse response = _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.head();
if (response.getStatus() == Response.Status.OK.getStatusCode()) {
return true;
} else if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode() &&
UnknownTableException.class.getName().equals(response.getFirstHeader("X-BV-Exception"))) {
return false;
} else {
throw convertException(new EmoClientException(response));
}
}
public boolean isTableAvailable(String apiKey, String table) {
checkNotNull(table, "table");
return getTableMetadata(apiKey, table).getAvailability() != null;
}
@Override
public Table getTableMetadata(String apiKey, String table) {
checkNotNull(table, "table");
try {
URI uri = _dataStore.clone()
.segment("_table", table, "metadata")
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(Table.class);
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Map<String, Object> getTableTemplate(String apiKey, String table) {
checkNotNull(table, "table");
try {
URI uri = _dataStore.clone()
.segment("_table", table)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Map<String,Object>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public void setTableTemplate(String apiKey, String table, Map<String, ?> template, Audit audit) {
checkNotNull(table, "table");
checkNotNull(template, "template");
checkNotNull(audit, "audit");
URI uri = _dataStore.clone()
.segment("_table", table, "template")
.queryParam("audit", RisonHelper.asORison(audit))
.build();
for (int attempt = 0; ; attempt++) {
try {
_client.resource(uri)
.type(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.put(template);
return;
} catch (EmoClientException e) {
// The SoR returns a 301 response when we need to make this request against a different data center.
// Follow the redirect a few times but don't loop forever.
if (e.getResponse().getStatus() == Response.Status.MOVED_PERMANENTLY.getStatusCode() && attempt < 5) {
uri = e.getResponse().getLocation();
continue;
}
throw convertException(e);
}
}
}
@Override
public TableOptions getTableOptions(String apiKey, String table) {
checkNotNull(table, "table");
try {
URI uri = _dataStore.clone()
.segment("_table", table, "options")
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(TableOptions.class);
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public long getTableApproximateSize(String apiKey, String table) {
checkNotNull(table, "table");
try {
URI uri = _dataStore.clone()
.segment("_table", table, "size")
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(Long.class);
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public long getTableApproximateSize(String apiKey, String table, int limit) throws UnknownTableException {
checkNotNull(table, "table");
checkNotNull(limit);
checkArgument(limit > 0, "limit must be greater than 0");
try {
URI uri = _dataStore.clone()
.segment("_table", table, "size")
.queryParam("limit", limit)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(Long.class);
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Map<String, Object> get(String apiKey, String table, String key) {
return get(apiKey, table, key, ReadConsistency.STRONG);
}
@Override
public Map<String, Object> get(String apiKey, String table, String key, ReadConsistency consistency) {
checkNotNull(table, "table");
checkNotNull(key, "key");
checkNotNull(consistency, "consistency");
try {
URI uri = _dataStore.clone()
.segment(table, key)
.queryParam("consistency", consistency)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Map<String,Object>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Iterator<Change> getTimeline(String apiKey, String table, String key, boolean includeContentData, boolean includeAuditInformation,
@Nullable UUID start, @Nullable UUID end, boolean reversed, long limit, ReadConsistency consistency) {
checkNotNull(table, "table");
checkNotNull(key, "key");
if (start != null && end != null) {
if (reversed) {
checkArgument(TimeUUIDs.compare(start, end) >= 0, "Start must be >=End for reversed ranges");
} else {
checkArgument(TimeUUIDs.compare(start, end) <= 0, "Start must be <=End");
}
}
checkArgument(limit > 0, "Limit must be >0");
checkNotNull(consistency, "consistency");
try {
URI uri = _dataStore.clone()
.segment(table, key, "timeline")
.queryParam("data", includeContentData)
.queryParam("audit", includeAuditInformation)
.queryParam("start", optional(start))
.queryParam("end", optional(end))
.queryParam("reversed", reversed)
.queryParam("limit", limit)
.queryParam("consistency", consistency)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Iterator<Change>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Iterator<Map<String, Object>> scan(String apiKey, String table, @Nullable String fromKeyExclusive,
long limit, boolean includeDeletes, ReadConsistency consistency) {
checkNotNull(table, "table");
checkArgument(limit > 0, "Limit must be >0");
checkNotNull(consistency, "consistency");
try {
URI uri = _dataStore.clone()
.segment(table)
.queryParam("from", optional(fromKeyExclusive))
.queryParam("limit", limit)
.queryParam("includeDeletes", includeDeletes)
.queryParam("consistency", consistency)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Iterator<Map<String,Object>>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Collection<String> getSplits(String apiKey, String table, int desiredRecordsPerSplit) {
checkNotNull(table, "table");
checkArgument(desiredRecordsPerSplit > 0, "DesiredRecordsPerSplit must be >0");
try {
URI uri = _dataStore.clone()
.segment("_split", table)
.queryParam("size", desiredRecordsPerSplit)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<List<String>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Iterator<Map<String, Object>> getSplit(String apiKey, String table, String split, @Nullable String fromKeyExclusive,
long limit, boolean includeDeletes, ReadConsistency consistency) {
checkNotNull(table, "table");
checkNotNull(split, "split");
checkArgument(limit > 0, "Limit must be >0");
checkNotNull(consistency, "consistency");
try {
URI uri = _dataStore.clone()
.segment("_split", table, split)
.queryParam("from", optional(fromKeyExclusive))
.queryParam("limit", limit)
.queryParam("includeDeletes", includeDeletes)
.queryParam("consistency", consistency)
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Iterator<Map<String,Object>>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Iterator<Map<String, Object>> multiGet(String apiKey, List<Coordinate> coordinates) {
return multiGet(apiKey, coordinates, ReadConsistency.STRONG);
}
@Override
public Iterator<Map<String, Object>> multiGet(String apiKey, final List<Coordinate> coordinates, ReadConsistency consistency) {
checkNotNull(coordinates, "coordinates");
checkNotNull(consistency, "consistency");
try {
UriBuilder uriBuilder = _dataStore.clone().segment("_multiget").queryParam("consistency", consistency);
for(Coordinate coordinate : coordinates) {
uriBuilder.queryParam("id", coordinate.toString());
}
URI uri = uriBuilder.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<Iterator<Map<String,Object>>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public void update(String apiKey, String table, String key, UUID changeId, Delta delta, Audit audit) {
update(apiKey, table, key, changeId, delta, audit, WriteConsistency.STRONG);
}
@Override
public void update(String apiKey, String table, String key, UUID changeId, Delta delta, Audit audit, WriteConsistency consistency) {
update(apiKey, table, key, changeId, delta, audit, consistency, false, ImmutableSet.<String>of());
}
private void update(String apiKey, String table, String key, UUID changeId, Delta delta, Audit audit, WriteConsistency consistency,
boolean facade, Set<String> tags) {
checkNotNull(table, "table");
checkNotNull(key, "key");
checkNotNull(delta, "delta");
checkNotNull(audit, "audit");
checkNotNull(consistency, "consistency");
try {
UriBuilder uriBuilder = _dataStore.clone()
.segment(facade ? "_facade" : "", table, key)
.queryParam("changeId", (changeId != null) ? changeId : TimeUUIDs.newUUID())
.queryParam("audit", RisonHelper.asORison(audit))
.queryParam("consistency", consistency);
for(String tag : tags) {
uriBuilder.queryParam("tag", tag);
}
_client.resource(uriBuilder.build())
.type(APPLICATION_X_JSON_DELTA_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.post(delta.toString());
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public void updateAll(String apiKey, Iterable<Update> updates) {
updateAll(apiKey, updates, false, ImmutableSet.<String>of());
}
@Override
public void updateAll(String apiKey, Iterable<Update> updates, Set<String> tags) {
updateAll(apiKey, updates, false, tags);
}
private void updateAll(String apiKey, Iterable<Update> updates, boolean facade, Set<String> tags) {
// This method takes an Iterable instead of an Iterator so it can be retried (by Ostrich etc.) if it fails.
// If just one update, use the slightly more compact single record REST api.
if (updates instanceof Collection && ((Collection) updates).size() == 1) {
Update update = Iterables.getOnlyElement(updates);
update(apiKey, update.getTable(), update.getKey(), update.getChangeId(), update.getDelta(), update.getAudit(),
update.getConsistency(), facade, tags);
return;
}
// Otherwise, use the streaming API to send multiple updates per HTTP request. Break the updates into batches
// such that this makes approximately one HTTP request per second. The goal is to make requests big enough to
// get the performance benefits of batching while being small enough that they show up with regularity in the
// request logs--don't want an hour long POST that doesn't show up in the request log until the end of the hour.
Iterator<Update> updatesIter = updates.iterator();
for (long batchIdx = 0; updatesIter.hasNext(); batchIdx++) {
PeekingIterator<Update> batchIter = TimeLimitedIterator.create(updatesIter, UPDATE_ALL_REQUEST_DURATION, 1);
// Grab the first update, assume it's representative (but note it may not be) and copy some of its
// attributes into the URL query parameters for the *sole* purpose of making the server request logs easier
// to read. The server ignores the query parameters--only the body of the POST actually matters.
Update first = batchIter.peek();
try {
UriBuilder uriBuilder = _dataStore.clone()
.segment(facade ? "_facade" : "", "_stream")
.queryParam("batch", batchIdx)
.queryParam("table", first.getTable())
.queryParam("key", first.getKey())
.queryParam("audit", RisonHelper.asORison(first.getAudit()))
.queryParam("consistency", first.getConsistency());
for(String tag : tags) {
uriBuilder.queryParam("tag", tag);
}
_client.resource(uriBuilder.build())
.type(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.post(batchIter);
} catch (EmoClientException e) {
throw convertException(e);
}
}
}
@Override
public void createFacade(String apiKey, String table, FacadeOptions options, Audit audit)
throws TableExistsException {
checkNotNull(table, "table");
checkNotNull(options, "options");
checkNotNull(audit, "audit");
URI uri = _dataStore.clone()
.segment("_facade", table)
.queryParam("options", RisonHelper.asORison(options))
.queryParam("audit", RisonHelper.asORison(audit))
.build();
for (int attempt = 0; ; attempt++) {
try {
_client.resource(uri)
.type(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.put();
return;
} catch (EmoClientException e) {
// The SoR returns a 301 response when we need to make this request against a different data center.
// Follow the redirect a few times but don't loop forever.
if (e.getResponse().getStatus() == Response.Status.MOVED_PERMANENTLY.getStatusCode() && attempt < 5) {
uri = e.getResponse().getLocation();
continue;
}
throw convertException(e);
}
}
}
@Override
public void dropFacade(String apiKey, String table, String dataCenter, Audit audit)
throws UnknownTableException {
throw new UnsupportedOperationException("Dropping a facade requires administrator privileges.");
}
@Override
public void updateAllForFacade(String apiKey, Iterable<Update> updates) {
updateAll(apiKey, updates, true, ImmutableSet.<String>of());
}
@Override
public void updateAllForFacade(@Credential String apiKey, Iterable<Update> updates, Set<String> tags) {
updateAll(apiKey, updates, true, tags);
}
@Override
public void compact(String apiKey, String table, String key, @Nullable Duration ttlOverride, ReadConsistency readConsistency, WriteConsistency writeConsistency) {
checkNotNull(table, "table");
checkNotNull(key, "key");
checkNotNull(readConsistency, "readConsistency");
checkNotNull(writeConsistency, "writeConsistency");
try {
Integer ttlOverrideSeconds = (ttlOverride != null) ? Ttls.toSeconds(ttlOverride, 0, Integer.MAX_VALUE) : null;
URI uri = _dataStore.clone()
.segment(table, key, "compact")
.queryParam("ttl", (ttlOverrideSeconds != null) ? new Object[]{ttlOverrideSeconds} : new Object[0])
.queryParam("readConsistency", readConsistency)
.queryParam("writeConsistency", writeConsistency)
.build();
_client.resource(uri)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.post();
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public Collection<String> getTablePlacements(String apiKey) {
try {
URI uri = _dataStore.clone()
.segment("_tableplacement")
.build();
return _client.resource(uri)
.accept(MediaType.APPLICATION_JSON_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(new TypeReference<List<String>>(){});
} catch (EmoClientException e) {
throw convertException(e);
}
}
@Override
public URI getStashRoot(String apiKey)
throws StashNotAvailableException {
try {
URI uri = _dataStore.clone()
.segment("_stashroot")
.build();
String stashRoot = _client.resource(uri)
.accept(MediaType.TEXT_PLAIN_TYPE)
.header(ApiKeyRequest.AUTHENTICATION_HEADER, apiKey)
.get(String.class);
return URI.create(stashRoot);
} catch (EmoClientException e) {
throw convertException(e);
}
}
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
private RuntimeException convertException(EmoClientException e) {
EmoResponse response = e.getResponse();
String exceptionType = response.getFirstHeader("X-BV-Exception");
if (response.getStatus() == Response.Status.BAD_REQUEST.getStatusCode()) {
if (IllegalArgumentException.class.getName().equals(exceptionType)) {
return new IllegalArgumentException(response.getEntity(String.class), e);
} else if (JsonStreamProcessingException.class.getName().equals(exceptionType)) {
return new JsonStreamProcessingException(response.getEntity(String.class));
} else if (DeltaSizeLimitException.class.getName().equals(exceptionType)) {
return response.getEntity(DeltaSizeLimitException.class);
} else if (AuditSizeLimitException.class.getName().equals(exceptionType)) {
return response.getEntity(AuditSizeLimitException.class);
}
} else if (response.getStatus() == Response.Status.CONFLICT.getStatusCode() &&
TableExistsException.class.getName().equals(exceptionType)) {
if (response.hasEntity()) {
return (RuntimeException) response.getEntity(TableExistsException.class).initCause(e);
} else {
return (RuntimeException) new TableExistsException().initCause(e);
}
} else if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode() &&
UnknownTableException.class.getName().equals(exceptionType)) {
if (response.hasEntity()) {
return (RuntimeException) response.getEntity(UnknownTableException.class).initCause(e);
} else {
return (RuntimeException) new UnknownTableException().initCause(e);
}
} else if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode() &&
UnknownPlacementException.class.getName().equals(exceptionType)) {
if (response.hasEntity()) {
return (RuntimeException) response.getEntity(UnknownPlacementException.class).initCause(e);
} else {
return (RuntimeException) new UnknownPlacementException().initCause(e);
}
} else if (response.getStatus() == Response.Status.NOT_FOUND.getStatusCode() &&
StashNotAvailableException.class.getName().equals(exceptionType)) {
if (response.hasEntity()) {
return (RuntimeException) response.getEntity(StashNotAvailableException.class).initCause(e);
} else {
return (RuntimeException) new StashNotAvailableException().initCause(e);
}
} else if (response.getStatus() == Response.Status.MOVED_PERMANENTLY.getStatusCode() &&
UnsupportedOperationException.class.getName().equals(exceptionType)) {
return new UnsupportedOperationException("Permanent redirect: " + response.getLocation(), e);
} else if (response.getStatus() == Response.Status.FORBIDDEN.getStatusCode() &&
UnauthorizedException.class.getName().equals(exceptionType)) {
if (response.hasEntity()) {
return (RuntimeException) response.getEntity(UnauthorizedException.class).initCause(e);
} else {
return (RuntimeException) new UnauthorizedException().initCause(e);
}
} else if (response.getStatus() == Response.Status.SERVICE_UNAVAILABLE.getStatusCode() &&
ServiceUnavailableException.class.getName().equals(exceptionType)) {
if (response.hasEntity()) {
return (RuntimeException) response.getEntity(ServiceUnavailableException.class).initCause(e);
} else {
return (RuntimeException) new ServiceUnavailableException().initCause(e);
}
}
return e;
}
private String basicAuthCredentials(String credentials) {
return String.format("Basic %s", Base64.encodeBase64String(credentials.getBytes(Charsets.UTF_8)));
}
private Object[] optional(Object queryArg) {
return (queryArg != null) ? new Object[]{queryArg} : new Object[0];
}
}
| |
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.cli.porcelain;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import jline.Terminal;
import jline.console.ConsoleReader;
import org.fusesource.jansi.Ansi;
import org.fusesource.jansi.Ansi.Color;
import org.geogit.api.GeoGIT;
import org.geogit.api.ObjectId;
import org.geogit.api.Platform;
import org.geogit.api.Ref;
import org.geogit.api.RevCommit;
import org.geogit.api.RevPerson;
import org.geogit.api.SymRef;
import org.geogit.api.plumbing.ForEachRef;
import org.geogit.api.plumbing.ParseTimestamp;
import org.geogit.api.plumbing.RefParse;
import org.geogit.api.plumbing.RevParse;
import org.geogit.api.plumbing.diff.DiffEntry;
import org.geogit.api.porcelain.DiffOp;
import org.geogit.api.porcelain.LogOp;
import org.geogit.cli.AbstractCommand;
import org.geogit.cli.AnsiDecorator;
import org.geogit.cli.CLICommand;
import org.geogit.cli.GeogitCLI;
import org.geotools.util.Range;
import com.beust.jcommander.Parameters;
import com.beust.jcommander.ParametersDelegate;
import com.beust.jcommander.internal.Lists;
import com.beust.jcommander.internal.Maps;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
/**
* Shows the commit logs.
* <p>
* CLI proxy for {@link org.geogit.api.porcelain.LogOp}
* <p>
* Usage:
* <ul>
* <li> {@code geogit log [<options>]}
* </ul>
*
* @see org.geogit.api.porcelain.LogOp
*/
@Parameters(commandNames = "log", commandDescription = "Show commit logs")
public class Log extends AbstractCommand implements CLICommand {
public enum LOG_DETAIL {
SUMMARY, NAMES_ONLY, STATS, NOTHING
};
@ParametersDelegate
public final LogArgs args = new LogArgs();
private Map<ObjectId, String> refs;
private GeoGIT geogit;
private ConsoleReader console;
private boolean useColor;
/**
* Executes the log command using the provided options.
*
* @param cli
* @throws IOException
* @see org.geogit.cli.AbstractCommand#runInternal(org.geogit.cli.GeogitCLI)
*/
@Override
public void runInternal(GeogitCLI cli) throws Exception {
final Platform platform = cli.getPlatform();
Preconditions.checkState(cli.getGeogit() != null, "Not a geogit repository: "
+ platform.pwd().getAbsolutePath());
Preconditions.checkArgument(!(args.summary && args.oneline),
"--summary and --oneline cannot be used together");
Preconditions.checkArgument(!(args.stats && args.oneline),
"--stats and --oneline cannot be used together");
Preconditions.checkArgument(!(args.stats && args.oneline),
"--name-only and --oneline cannot be used together");
geogit = cli.getGeogit();
LogOp op = geogit.command(LogOp.class).setTopoOrder(args.topo)
.setFirstParentOnly(args.firstParent);
refs = Maps.newHashMap();
if (args.decoration) {
Optional<Ref> head = geogit.command(RefParse.class).setName(Ref.HEAD).call();
refs.put(head.get().getObjectId(), Ref.HEAD);
ImmutableSet<Ref> set = geogit.command(ForEachRef.class).call();
for (Ref ref : set) {
ObjectId id = ref.getObjectId();
if (refs.containsKey(id)) {
refs.put(id, refs.get(id) + ", " + ref.getName());
} else {
refs.put(id, ref.getName());
}
}
}
if (args.all) {
ImmutableSet<Ref> refs = geogit.command(ForEachRef.class).call();
List<ObjectId> list = Lists.newArrayList();
for (Ref ref : refs) {
list.add(ref.getObjectId());
}
Optional<Ref> head = geogit.command(RefParse.class).setName(Ref.HEAD).call();
if (head.isPresent()) {
Ref ref = head.get();
if (ref instanceof SymRef) {
ObjectId id = ref.getObjectId();
list.remove(id);
list.add(id);// put the HEAD ref in the last position, to give it preference
}
}
for (ObjectId id : list) {
op.addCommit(id);
}
} else if (args.branch != null) {
Optional<Ref> obj = geogit.command(RefParse.class).setName(args.branch).call();
Preconditions.checkArgument(obj.isPresent(), "Wrong branch name: " + args.branch);
op.addCommit(obj.get().getObjectId());
}
if (args.author != null && !args.author.isEmpty()) {
op.setAuthor(args.author);
}
if (args.committer != null && !args.committer.isEmpty()) {
op.setCommiter(args.committer);
}
if (args.skip != null) {
op.setSkip(args.skip.intValue());
}
if (args.limit != null) {
op.setLimit(args.limit.intValue());
}
if (args.since != null || args.until != null) {
Date since = new Date(0);
Date until = new Date();
if (args.since != null) {
since = new Date(geogit.command(ParseTimestamp.class).setString(args.since).call());
}
if (args.until != null) {
until = new Date(geogit.command(ParseTimestamp.class).setString(args.until).call());
if (args.all) {
throw new IllegalStateException(
"Cannot specify 'until' commit when listing all branches");
}
}
op.setTimeRange(new Range<Date>(Date.class, since, until));
}
if (!args.sinceUntilPaths.isEmpty()) {
List<String> sinceUntil = ImmutableList.copyOf((Splitter.on("..")
.split(args.sinceUntilPaths.get(0))));
Preconditions.checkArgument(sinceUntil.size() == 1 || sinceUntil.size() == 2,
"Invalid refSpec format, expected [<until>]|[<since>..<until>]: %s",
args.sinceUntilPaths.get(0));
String sinceRefSpec;
String untilRefSpec;
if (sinceUntil.size() == 1) {
// just until was given
sinceRefSpec = null;
untilRefSpec = sinceUntil.get(0);
} else {
sinceRefSpec = sinceUntil.get(0);
untilRefSpec = sinceUntil.get(1);
}
if (sinceRefSpec != null) {
Optional<ObjectId> since;
since = geogit.command(RevParse.class).setRefSpec(sinceRefSpec).call();
Preconditions.checkArgument(since.isPresent(), "Object not found '%s'",
sinceRefSpec);
op.setSince(since.get());
}
if (untilRefSpec != null) {
if (args.all) {
throw new IllegalStateException(
"Cannot specify 'until' commit when listing all branches");
}
Optional<ObjectId> until;
until = geogit.command(RevParse.class).setRefSpec(untilRefSpec).call();
Preconditions.checkArgument(until.isPresent(), "Object not found '%s'",
sinceRefSpec);
op.setUntil(until.get());
}
}
if (!args.pathNames.isEmpty()) {
for (String s : args.pathNames) {
op.addPath(s);
}
}
Iterator<RevCommit> log = op.call();
console = cli.getConsole();
Terminal terminal = console.getTerminal();
switch (args.color) {
case never:
useColor = false;
break;
case always:
useColor = true;
break;
default:
useColor = terminal.isAnsiSupported();
}
if (!log.hasNext()) {
console.println("No commits to show");
console.flush();
return;
}
LogEntryPrinter printer;
if (args.oneline) {
printer = new OneLineConverter();
} else {
LOG_DETAIL detail;
if (args.summary) {
detail = LOG_DETAIL.SUMMARY;
} else if (args.names) {
detail = LOG_DETAIL.NAMES_ONLY;
} else if (args.stats) {
detail = LOG_DETAIL.STATS;
} else {
detail = LOG_DETAIL.NOTHING;
}
printer = new StandardConverter(detail, geogit.getPlatform());
}
while (log.hasNext()) {
printer.print(log.next());
console.flush();
}
}
interface LogEntryPrinter {
/**
* @param geogit
* @param console
* @param entry
* @throws IOException
*/
void print(RevCommit commit) throws IOException;
}
private class OneLineConverter implements LogEntryPrinter {
@Override
public void print(RevCommit commit) throws IOException {
Ansi ansi = AnsiDecorator.newAnsi(useColor);
ansi.fg(Color.YELLOW).a(getIdAsString(commit.getId())).reset();
String message = Strings.nullToEmpty(commit.getMessage());
String title = Splitter.on('\n').split(message).iterator().next();
ansi.a(" ").a(title);
console.println(ansi.toString());
}
}
/**
* @param useColor
* @param showSummary
* @return
*/
private class StandardConverter implements LogEntryPrinter {
private SimpleDateFormat DATE_FORMAT;
private long now;
private LOG_DETAIL detail;
public StandardConverter(final LOG_DETAIL detail, final Platform platform) {
now = platform.currentTimeMillis();
DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z");
this.detail = detail;
}
@Override
public void print(RevCommit commit) throws IOException {
Ansi ansi = AnsiDecorator.newAnsi(useColor);
ansi.a("Commit: ").fg(Color.YELLOW).a(getIdAsString(commit.getId())).reset().newline();
if (commit.getParentIds().size() > 1) {
ansi.a("Merge: ");
for (ObjectId parent : commit.getParentIds()) {
ansi.a(parent.toString().substring(0, 7)).a(" ");
}
ansi.newline();
}
ansi.a("Author: ").fg(Color.GREEN).a(formatPerson(commit.getAuthor())).reset()
.newline();
final long timestamp = commit.getAuthor().getTimestamp();
final int timeZoneOffset = commit.getAuthor().getTimeZoneOffset();
String friendlyString = estimateSince(now, timestamp);
DATE_FORMAT.getCalendar().getTimeZone().setRawOffset(timeZoneOffset);
String formattedDate = DATE_FORMAT.format(timestamp);
ansi.a("Date: (").fg(Color.RED).a(friendlyString).reset().a(") ").a(formattedDate)
.newline();
ansi.a("Subject: ").a(commit.getMessage()).newline();
if ((detail.equals(LOG_DETAIL.NAMES_ONLY)) && commit.getParentIds().size() == 1) {
ansi.a("Affected paths:").newline();
Iterator<DiffEntry> diff = geogit.command(DiffOp.class)
.setOldVersion(commit.parentN(0).get()).setNewVersion(commit.getId())
.call();
DiffEntry diffEntry;
while (diff.hasNext()) {
diffEntry = diff.next();
ansi.a("\t" + diffEntry.newPath()).newline();
}
}
if (detail.equals(LOG_DETAIL.STATS) && commit.getParentIds().size() == 1) {
Iterator<DiffEntry> diff = geogit.command(DiffOp.class)
.setOldVersion(commit.parentN(0).get()).setNewVersion(commit.getId())
.call();
int adds = 0, deletes = 0, changes = 0;
DiffEntry diffEntry;
while (diff.hasNext()) {
diffEntry = diff.next();
switch (diffEntry.changeType()) {
case ADDED:
++adds;
break;
case REMOVED:
++deletes;
break;
case MODIFIED:
++changes;
break;
}
}
ansi.a("Changes:");
ansi.fg(Color.GREEN).a(adds).reset().a(" features added, ").fg(Color.YELLOW)
.a(changes).reset().a(" changed, ").fg(Color.RED).a(deletes).reset()
.a(" deleted.").reset().newline();
}
console.println(ansi.toString());
if (detail.equals(LOG_DETAIL.SUMMARY) && commit.getParentIds().size() == 1) {
ansi.a("Changes:").newline();
Iterator<DiffEntry> diff = geogit.command(DiffOp.class)
.setOldVersion(commit.parentN(0).get()).setNewVersion(commit.getId())
.call();
DiffEntry diffEntry;
while (diff.hasNext()) {
diffEntry = diff.next();
if (detail.equals(LOG_DETAIL.SUMMARY)) {
new FullDiffPrinter(true, false).print(geogit, console, diffEntry);
}
}
}
}
}
/**
* Converts a RevPerson for into a readable string.
*
* @param person the person to format.
* @return the formatted string
* @see RevPerson
*/
private String formatPerson(RevPerson person) {
StringBuilder sb = new StringBuilder();
sb.append(person.getName().or("<name not set>"));
if (person.getEmail().isPresent()) {
sb.append(" <").append(person.getEmail().get()).append(">");
}
return sb.toString();
}
/**
* Converts a timestamp into a readable string that represents the rough time since that
* timestamp.
*
* @param now
* @param timestamp
* @return
*/
private String estimateSince(final long now, long timestamp) {
long diff = now - timestamp;
final long seconds = 1000;
final long minutes = seconds * 60;
final long hours = minutes * 60;
final long days = hours * 24;
final long weeks = days * 7;
final long months = days * 30;
final long years = days * 365;
if (diff > years) {
return diff / years + " years ago";
}
if (diff > months) {
return diff / months + " months ago";
}
if (diff > weeks) {
return diff / weeks + " weeks ago";
}
if (diff > days) {
return diff / days + " days ago";
}
if (diff > hours) {
return diff / hours + " hours ago";
}
if (diff > minutes) {
return diff / minutes + " minutes ago";
}
if (diff > seconds) {
return diff / seconds + " seconds ago";
}
return "just now";
}
/**
* Returns an Id as a string, decorating or abbreviating it if needed
*
* @param id
* @return
*/
private String getIdAsString(ObjectId id) {
StringBuilder sb = new StringBuilder();
if (args.abbrev) {
sb.append(id.toString().substring(0, 7));
} else {
sb.append(id.toString());
}
if (refs.containsKey(id)) {
sb.append(" (");
sb.append(refs.get(id));
sb.append(")");
}
return sb.toString();
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.sql.avatica;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.inject.Inject;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.logger.Logger;
import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.PlannerFactory;
import org.apache.calcite.avatica.MetaImpl;
import org.apache.calcite.avatica.MissingResultsException;
import org.apache.calcite.avatica.NoSuchConnectionException;
import org.apache.calcite.avatica.NoSuchStatementException;
import org.apache.calcite.avatica.QueryState;
import org.apache.calcite.avatica.remote.TypedValue;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
public class DruidMeta extends MetaImpl
{
private static final Logger log = new Logger(DruidMeta.class);
private static final Set<String> SKIP_PROPERTIES = ImmutableSet.of("user", "password");
private final PlannerFactory plannerFactory;
private final ScheduledExecutorService exec;
private final AvaticaServerConfig config;
// Used to track logical connections.
private final Map<String, DruidConnection> connections = new ConcurrentHashMap<>();
// Number of connections reserved in "connections". May be higher than the actual number of connections at times,
// such as when we're reserving space to open a new one.
private final AtomicInteger connectionCount = new AtomicInteger();
@Inject
public DruidMeta(final PlannerFactory plannerFactory, final AvaticaServerConfig config)
{
super(null);
this.plannerFactory = Preconditions.checkNotNull(plannerFactory, "plannerFactory");
this.config = config;
this.exec = Executors.newSingleThreadScheduledExecutor(
new ThreadFactoryBuilder()
.setNameFormat(String.format("DruidMeta@%s-ScheduledExecutor", Integer.toHexString(hashCode())))
.setDaemon(true)
.build()
);
}
@Override
public void openConnection(final ConnectionHandle ch, final Map<String, String> info)
{
// Build connection context.
final ImmutableMap.Builder<String, Object> context = ImmutableMap.builder();
for (Map.Entry<String, String> entry : info.entrySet()) {
if (!SKIP_PROPERTIES.contains(entry.getKey())) {
context.put(entry);
}
}
openDruidConnection(ch.id, context.build());
}
@Override
public void closeConnection(final ConnectionHandle ch)
{
final DruidConnection druidConnection = connections.remove(ch.id);
if (druidConnection != null) {
connectionCount.decrementAndGet();
druidConnection.close();
}
}
@Override
public ConnectionProperties connectionSync(final ConnectionHandle ch, final ConnectionProperties connProps)
{
// getDruidConnection re-syncs it.
getDruidConnection(ch.id);
return connProps;
}
@Override
public StatementHandle createStatement(final ConnectionHandle ch)
{
final DruidStatement druidStatement = getDruidConnection(ch.id).createStatement();
return new StatementHandle(ch.id, druidStatement.getStatementId(), null);
}
@Override
public StatementHandle prepare(
final ConnectionHandle ch,
final String sql,
final long maxRowCount
)
{
final StatementHandle statement = createStatement(ch);
final DruidStatement druidStatement = getDruidStatement(statement);
statement.signature = druidStatement.prepare(plannerFactory, sql, maxRowCount).getSignature();
return statement;
}
@Deprecated
@Override
public ExecuteResult prepareAndExecute(
final StatementHandle h,
final String sql,
final long maxRowCount,
final PrepareCallback callback
) throws NoSuchStatementException
{
// Avatica doesn't call this.
throw new UnsupportedOperationException("Deprecated");
}
@Override
public ExecuteResult prepareAndExecute(
final StatementHandle statement,
final String sql,
final long maxRowCount,
final int maxRowsInFirstFrame,
final PrepareCallback callback
) throws NoSuchStatementException
{
// Ignore "callback", this class is designed for use with LocalService which doesn't use it.
final DruidStatement druidStatement = getDruidStatement(statement);
final Signature signature = druidStatement.prepare(plannerFactory, sql, maxRowCount).getSignature();
final Frame firstFrame = druidStatement.execute()
.nextFrame(
DruidStatement.START_OFFSET,
getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame)
);
return new ExecuteResult(
ImmutableList.of(
MetaResultSet.create(
statement.connectionId,
statement.id,
false,
signature,
firstFrame
)
)
);
}
@Override
public ExecuteBatchResult prepareAndExecuteBatch(
final StatementHandle statement,
final List<String> sqlCommands
) throws NoSuchStatementException
{
// Batch statements are used for bulk updates, but we don't support updates.
throw new UnsupportedOperationException("Batch statements not supported");
}
@Override
public ExecuteBatchResult executeBatch(
final StatementHandle statement,
final List<List<TypedValue>> parameterValues
) throws NoSuchStatementException
{
// Batch statements are used for bulk updates, but we don't support updates.
throw new UnsupportedOperationException("Batch statements not supported");
}
@Override
public Frame fetch(
final StatementHandle statement,
final long offset,
final int fetchMaxRowCount
) throws NoSuchStatementException, MissingResultsException
{
return getDruidStatement(statement).nextFrame(offset, getEffectiveMaxRowsPerFrame(fetchMaxRowCount));
}
@Deprecated
@Override
public ExecuteResult execute(
final StatementHandle statement,
final List<TypedValue> parameterValues,
final long maxRowCount
) throws NoSuchStatementException
{
// Avatica doesn't call this.
throw new UnsupportedOperationException("Deprecated");
}
@Override
public ExecuteResult execute(
final StatementHandle statement,
final List<TypedValue> parameterValues,
final int maxRowsInFirstFrame
) throws NoSuchStatementException
{
Preconditions.checkArgument(parameterValues.isEmpty(), "Expected parameterValues to be empty");
final DruidStatement druidStatement = getDruidStatement(statement);
final Signature signature = druidStatement.getSignature();
final Frame firstFrame = druidStatement.execute()
.nextFrame(
DruidStatement.START_OFFSET,
getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame)
);
return new ExecuteResult(
ImmutableList.of(
MetaResultSet.create(
statement.connectionId,
statement.id,
false,
signature,
firstFrame
)
)
);
}
@Override
public Iterable<Object> createIterable(
final StatementHandle statement,
final QueryState state,
final Signature signature,
final List<TypedValue> parameterValues,
final Frame firstFrame
)
{
// Avatica calls this but ignores the return value.
return null;
}
@Override
public void closeStatement(final StatementHandle h)
{
// connections.get, not getDruidConnection, since we want to silently ignore nonexistent statements
final DruidConnection druidConnection = connections.get(h.connectionId);
if (druidConnection != null) {
final DruidStatement druidStatement = druidConnection.getStatement(h.id);
if (druidStatement != null) {
druidStatement.close();
}
}
}
@Override
public boolean syncResults(
final StatementHandle sh,
final QueryState state,
final long offset
) throws NoSuchStatementException
{
final DruidStatement druidStatement = getDruidStatement(sh);
final boolean isDone = druidStatement.isDone();
final long currentOffset = druidStatement.getCurrentOffset();
if (currentOffset != offset) {
throw new ISE("Requested offset[%,d] does not match currentOffset[%,d]", offset, currentOffset);
}
return !isDone;
}
@Override
public void commit(final ConnectionHandle ch)
{
// We don't support writes, just ignore commits.
}
@Override
public void rollback(final ConnectionHandle ch)
{
// We don't support writes, just ignore rollbacks.
}
@Override
public Map<DatabaseProperty, Object> getDatabaseProperties(final ConnectionHandle ch)
{
return ImmutableMap.of();
}
@Override
public MetaResultSet getCatalogs(final ConnectionHandle ch)
{
final String sql = "SELECT\n"
+ " DISTINCT CATALOG_NAME AS TABLE_CAT\n"
+ "FROM\n"
+ " INFORMATION_SCHEMA.SCHEMATA\n"
+ "ORDER BY\n"
+ " TABLE_CAT\n";
return sqlResultSet(ch, sql);
}
@Override
public MetaResultSet getSchemas(
final ConnectionHandle ch,
final String catalog,
final Pat schemaPattern
)
{
final List<String> whereBuilder = new ArrayList<>();
if (catalog != null) {
whereBuilder.add("SCHEMATA.CATALOG_NAME = " + Calcites.escapeStringLiteral(catalog));
}
if (schemaPattern.s != null) {
whereBuilder.add("SCHEMATA.SCHEMA_NAME LIKE " + Calcites.escapeStringLiteral(schemaPattern.s));
}
final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder);
final String sql = "SELECT\n"
+ " SCHEMA_NAME AS TABLE_SCHEM,\n"
+ " CATALOG_NAME AS TABLE_CATALOG\n"
+ "FROM\n"
+ " INFORMATION_SCHEMA.SCHEMATA\n"
+ where + "\n"
+ "ORDER BY\n"
+ " TABLE_CATALOG, TABLE_SCHEM\n";
return sqlResultSet(ch, sql);
}
@Override
public MetaResultSet getTables(
final ConnectionHandle ch,
final String catalog,
final Pat schemaPattern,
final Pat tableNamePattern,
final List<String> typeList
)
{
final List<String> whereBuilder = new ArrayList<>();
if (catalog != null) {
whereBuilder.add("TABLES.TABLE_CATALOG = " + Calcites.escapeStringLiteral(catalog));
}
if (schemaPattern.s != null) {
whereBuilder.add("TABLES.TABLE_SCHEMA LIKE " + Calcites.escapeStringLiteral(schemaPattern.s));
}
if (tableNamePattern.s != null) {
whereBuilder.add("TABLES.TABLE_NAME LIKE " + Calcites.escapeStringLiteral(tableNamePattern.s));
}
if (typeList != null) {
final List<String> escapedTypes = new ArrayList<>();
for (String type : typeList) {
escapedTypes.add(Calcites.escapeStringLiteral(type));
}
whereBuilder.add("TABLES.TABLE_TYPE IN (" + Joiner.on(", ").join(escapedTypes) + ")");
}
final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder);
final String sql = "SELECT\n"
+ " TABLE_CATALOG AS TABLE_CAT,\n"
+ " TABLE_SCHEMA AS TABLE_SCHEM,\n"
+ " TABLE_NAME AS TABLE_NAME,\n"
+ " TABLE_TYPE AS TABLE_TYPE,\n"
+ " CAST(NULL AS VARCHAR) AS REMARKS,\n"
+ " CAST(NULL AS VARCHAR) AS TYPE_CAT,\n"
+ " CAST(NULL AS VARCHAR) AS TYPE_SCHEM,\n"
+ " CAST(NULL AS VARCHAR) AS TYPE_NAME,\n"
+ " CAST(NULL AS VARCHAR) AS SELF_REFERENCING_COL_NAME,\n"
+ " CAST(NULL AS VARCHAR) AS REF_GENERATION\n"
+ "FROM\n"
+ " INFORMATION_SCHEMA.TABLES\n"
+ where + "\n"
+ "ORDER BY\n"
+ " TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, TABLE_NAME\n";
return sqlResultSet(ch, sql);
}
@Override
public MetaResultSet getColumns(
final ConnectionHandle ch,
final String catalog,
final Pat schemaPattern,
final Pat tableNamePattern,
final Pat columnNamePattern
)
{
final List<String> whereBuilder = new ArrayList<>();
if (catalog != null) {
whereBuilder.add("COLUMNS.TABLE_CATALOG = " + Calcites.escapeStringLiteral(catalog));
}
if (schemaPattern.s != null) {
whereBuilder.add("COLUMNS.TABLE_SCHEMA LIKE " + Calcites.escapeStringLiteral(schemaPattern.s));
}
if (tableNamePattern.s != null) {
whereBuilder.add("COLUMNS.TABLE_NAME LIKE " + Calcites.escapeStringLiteral(tableNamePattern.s));
}
if (columnNamePattern.s != null) {
whereBuilder.add("COLUMNS.COLUMN_NAME LIKE " + Calcites.escapeStringLiteral(columnNamePattern.s));
}
final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder);
final String sql = "SELECT\n"
+ " TABLE_CATALOG AS TABLE_CAT,\n"
+ " TABLE_SCHEMA AS TABLE_SCHEM,\n"
+ " TABLE_NAME AS TABLE_NAME,\n"
+ " COLUMN_NAME AS COLUMN_NAME,\n"
+ " CAST(JDBC_TYPE AS INTEGER) AS DATA_TYPE,\n"
+ " DATA_TYPE AS TYPE_NAME,\n"
+ " -1 AS COLUMN_SIZE,\n"
+ " -1 AS BUFFER_LENGTH,\n"
+ " -1 AS DECIMAL_DIGITS,\n"
+ " -1 AS NUM_PREC_RADIX,\n"
+ " CASE IS_NULLABLE WHEN 'YES' THEN 1 ELSE 0 END AS NULLABLE,\n"
+ " CAST(NULL AS VARCHAR) AS REMARKS,\n"
+ " COLUMN_DEFAULT AS COLUMN_DEF,\n"
+ " -1 AS SQL_DATA_TYPE,\n"
+ " -1 AS SQL_DATETIME_SUB,\n"
+ " -1 AS CHAR_OCTET_LENGTH,\n"
+ " CAST(ORDINAL_POSITION AS INTEGER) AS ORDINAL_POSITION,\n"
+ " IS_NULLABLE AS IS_NULLABLE,\n"
+ " CAST(NULL AS VARCHAR) AS SCOPE_CATALOG,\n"
+ " CAST(NULL AS VARCHAR) AS SCOPE_SCHEMA,\n"
+ " CAST(NULL AS VARCHAR) AS SCOPE_TABLE,\n"
+ " -1 AS SOURCE_DATA_TYPE,\n"
+ " 'NO' AS IS_AUTOINCREMENT,\n"
+ " 'NO' AS IS_GENERATEDCOLUMN\n"
+ "FROM\n"
+ " INFORMATION_SCHEMA.COLUMNS\n"
+ where + "\n"
+ "ORDER BY\n"
+ " TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION\n";
return sqlResultSet(ch, sql);
}
@Override
public MetaResultSet getTableTypes(final ConnectionHandle ch)
{
final String sql = "SELECT\n"
+ " DISTINCT TABLE_TYPE AS TABLE_TYPE\n"
+ "FROM\n"
+ " INFORMATION_SCHEMA.TABLES\n"
+ "ORDER BY\n"
+ " TABLE_TYPE\n";
return sqlResultSet(ch, sql);
}
@VisibleForTesting
void closeAllConnections()
{
for (String connectionId : ImmutableSet.copyOf(connections.keySet())) {
closeConnection(new ConnectionHandle(connectionId));
}
}
private DruidConnection openDruidConnection(final String connectionId, final Map<String, Object> context)
{
if (connectionCount.incrementAndGet() > config.getMaxConnections()) {
// O(connections) but we don't expect this to happen often (it's a last-ditch effort to clear out
// abandoned connections) or to have too many connections.
final Iterator<Map.Entry<String, DruidConnection>> entryIterator = connections.entrySet().iterator();
while (entryIterator.hasNext()) {
final Map.Entry<String, DruidConnection> entry = entryIterator.next();
if (entry.getValue().closeIfEmpty()) {
entryIterator.remove();
// Removed a connection, decrement the counter.
connectionCount.decrementAndGet();
break;
}
}
if (connectionCount.get() > config.getMaxConnections()) {
// We aren't going to make a connection after all.
connectionCount.decrementAndGet();
throw new ISE("Too many connections, limit is[%,d]", config.getMaxConnections());
}
}
final DruidConnection putResult = connections.putIfAbsent(
connectionId,
new DruidConnection(connectionId, config.getMaxStatementsPerConnection(), context)
);
if (putResult != null) {
// Didn't actually insert the connection.
connectionCount.decrementAndGet();
throw new ISE("Connection[%s] already open.", connectionId);
}
log.debug("Connection[%s] opened.", connectionId);
// Call getDruidConnection to start the timeout timer.
return getDruidConnection(connectionId);
}
/**
* Get a connection, or throw an exception if it doesn't exist. Also refreshes the timeout timer.
*
* @param connectionId connection id
*
* @return the connection
*
* @throws NoSuchConnectionException if the connection id doesn't exist
*/
@Nonnull
private DruidConnection getDruidConnection(final String connectionId)
{
final DruidConnection connection = connections.get(connectionId);
if (connection == null) {
throw new NoSuchConnectionException(connectionId);
}
return connection.sync(
exec.schedule(
() -> {
log.debug("Connection[%s] timed out.", connectionId);
closeConnection(new ConnectionHandle(connectionId));
},
new Interval(new DateTime(), config.getConnectionIdleTimeout()).toDurationMillis(),
TimeUnit.MILLISECONDS
)
);
}
@Nonnull
private DruidStatement getDruidStatement(final StatementHandle statement)
{
final DruidConnection connection = getDruidConnection(statement.connectionId);
final DruidStatement druidStatement = connection.getStatement(statement.id);
Preconditions.checkState(druidStatement != null, "Statement[%s] does not exist", statement.id);
return druidStatement;
}
private MetaResultSet sqlResultSet(final ConnectionHandle ch, final String sql)
{
final StatementHandle statement = createStatement(ch);
try {
final ExecuteResult result = prepareAndExecute(statement, sql, -1, -1, null);
final MetaResultSet metaResultSet = Iterables.getOnlyElement(result.resultSets);
if (!metaResultSet.firstFrame.done) {
throw new ISE("Expected all results to be in a single frame!");
}
return metaResultSet;
}
catch (Exception e) {
throw Throwables.propagate(e);
}
finally {
closeStatement(statement);
}
}
private int getEffectiveMaxRowsPerFrame(int clientMaxRowsPerFrame)
{
// no configured row limit, use the client provided limit
if (config.getMaxRowsPerFrame() < 0) {
return clientMaxRowsPerFrame;
}
// client provided no row limit, use the configured row limit
if (clientMaxRowsPerFrame < 0) {
return config.getMaxRowsPerFrame();
}
return Math.min(clientMaxRowsPerFrame, config.getMaxRowsPerFrame());
}
}
| |
/*******************************************************************************
* Copyright 2012-present Pixate, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.pixate.freestyle.styling.parsing;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import java.util.Stack;
import android.content.Context;
import com.pixate.freestyle.cg.math.PXDimension;
import com.pixate.freestyle.parsing.Lexeme;
import com.pixate.freestyle.parsing.PXParserBase;
import com.pixate.freestyle.styling.PXDeclaration;
import com.pixate.freestyle.styling.PXRuleSet;
import com.pixate.freestyle.styling.PXStylesheet;
import com.pixate.freestyle.styling.PXStylesheet.PXStyleSheetOrigin;
import com.pixate.freestyle.styling.animation.PXKeyframe;
import com.pixate.freestyle.styling.animation.PXKeyframeBlock;
import com.pixate.freestyle.styling.combinators.PXAdjacentSiblingCombinator;
import com.pixate.freestyle.styling.combinators.PXChildCombinator;
import com.pixate.freestyle.styling.combinators.PXCombinatorBase;
import com.pixate.freestyle.styling.combinators.PXDescendantCombinator;
import com.pixate.freestyle.styling.combinators.PXSiblingCombinator;
import com.pixate.freestyle.styling.media.PXMediaExpression;
import com.pixate.freestyle.styling.media.PXMediaExpressionGroup;
import com.pixate.freestyle.styling.media.PXNamedMediaExpression;
import com.pixate.freestyle.styling.selectors.PXAttributeSelector;
import com.pixate.freestyle.styling.selectors.PXAttributeSelectorOperator;
import com.pixate.freestyle.styling.selectors.PXClassSelector;
import com.pixate.freestyle.styling.selectors.PXIdSelector;
import com.pixate.freestyle.styling.selectors.PXNotPseudoClass;
import com.pixate.freestyle.styling.selectors.PXPseudoClassFunction;
import com.pixate.freestyle.styling.selectors.PXPseudoClassPredicate;
import com.pixate.freestyle.styling.selectors.PXPseudoClassSelector;
import com.pixate.freestyle.styling.selectors.PXSelector;
import com.pixate.freestyle.styling.selectors.PXTypeSelector;
import com.pixate.freestyle.styling.selectors.PXAttributeSelectorOperator.PXAttributeSelectorOperatorType;
import com.pixate.freestyle.styling.selectors.PXPseudoClassFunction.PXPseudoClassFunctionType;
import com.pixate.freestyle.styling.selectors.PXPseudoClassPredicate.PXPseudoClassPredicateType;
import com.pixate.freestyle.util.CollectionUtil;
import com.pixate.freestyle.util.IOUtil;
import com.pixate.freestyle.util.PXLog;
import com.pixate.freestyle.util.StringUtil;
/**
* Pixate stylesheet parser.
*/
public class PXStylesheetParser extends PXParserBase<PXStylesheetTokenType> {
private static String TAG = PXStylesheetParser.class.getSimpleName();
private static EnumSet<PXStylesheetTokenType> SELECTOR_SEQUENCE_SET;
private static EnumSet<PXStylesheetTokenType> SELECTOR_OPERATOR_SET;
private static EnumSet<PXStylesheetTokenType> SELECTOR_SET;
private static EnumSet<PXStylesheetTokenType> TYPE_SELECTOR_SET;
private static EnumSet<PXStylesheetTokenType> SELECTOR_EXPRESSION_SET;
private static EnumSet<PXStylesheetTokenType> TYPE_NAME_SET;
private static EnumSet<PXStylesheetTokenType> ATTRIBUTE_OPERATOR_SET;
private static EnumSet<PXStylesheetTokenType> DECLARATION_DELIMITER_SET;
private static EnumSet<PXStylesheetTokenType> KEYFRAME_SELECTOR_SET;
private static EnumSet<PXStylesheetTokenType> NAMESPACE_SET;
private static EnumSet<PXStylesheetTokenType> IMPORT_SET;
private static EnumSet<PXStylesheetTokenType> QUERY_VALUE_SET;
private static EnumSet<PXStylesheetTokenType> ARCHAIC_PSEUDO_ELEMENTS_SET;
//@formatter:off
static {
TYPE_NAME_SET = EnumSet.of(
PXStylesheetTokenType.IDENTIFIER,
PXStylesheetTokenType.STAR);
TYPE_SELECTOR_SET = EnumSet.of(PXStylesheetTokenType.PIPE);
TYPE_SELECTOR_SET.addAll(TYPE_NAME_SET);
SELECTOR_EXPRESSION_SET = EnumSet.of(
PXStylesheetTokenType.ID,
PXStylesheetTokenType.CLASS,
PXStylesheetTokenType.LBRACKET,
PXStylesheetTokenType.COLON,
PXStylesheetTokenType.NOT_PSEUDO_CLASS,
PXStylesheetTokenType.LINK_PSEUDO_CLASS,
PXStylesheetTokenType.VISITED_PSEUDO_CLASS,
PXStylesheetTokenType.HOVER_PSEUDO_CLASS,
PXStylesheetTokenType.ACTIVE_PSEUDO_CLASS,
PXStylesheetTokenType.FOCUS_PSEUDO_CLASS,
PXStylesheetTokenType.TARGET_PSEUDO_CLASS,
PXStylesheetTokenType.LANG_PSEUDO_CLASS,
PXStylesheetTokenType.ENABLED_PSEUDO_CLASS,
PXStylesheetTokenType.CHECKED_PSEUDO_CLASS,
PXStylesheetTokenType.INDETERMINATE_PSEUDO_CLASS,
PXStylesheetTokenType.ROOT_PSEUDO_CLASS,
PXStylesheetTokenType.NTH_CHILD_PSEUDO_CLASS,
PXStylesheetTokenType.NTH_LAST_CHILD_PSEUDO_CLASS,
PXStylesheetTokenType.NTH_OF_TYPE_PSEUDO_CLASS,
PXStylesheetTokenType.NTH_LAST_OF_TYPE_PSEUDO_CLASS,
PXStylesheetTokenType.FIRST_CHILD_PSEUDO_CLASS,
PXStylesheetTokenType.LAST_CHILD_PSEUDO_CLASS,
PXStylesheetTokenType.FIRST_OF_TYPE_PSEUDO_CLASS,
PXStylesheetTokenType.LAST_OF_TYPE_PSEUDO_CLASS,
PXStylesheetTokenType.ONLY_CHILD_PSEUDO_CLASS,
PXStylesheetTokenType.ONLY_OF_TYPE_PSEUDO_CLASS,
PXStylesheetTokenType.EMPTY_PSEUDO_CLASS);
SELECTOR_OPERATOR_SET = EnumSet.of(
PXStylesheetTokenType.PLUS,
PXStylesheetTokenType.GREATER_THAN,
PXStylesheetTokenType.TILDE);
SELECTOR_SEQUENCE_SET = EnumSet.of(
PXStylesheetTokenType.PIPE,
PXStylesheetTokenType.IDENTIFIER,
PXStylesheetTokenType.STAR);
SELECTOR_SEQUENCE_SET.addAll(SELECTOR_EXPRESSION_SET);
SELECTOR_SEQUENCE_SET.addAll(SELECTOR_OPERATOR_SET);
SELECTOR_SET = EnumSet.of(PXStylesheetTokenType.PIPE);
SELECTOR_SET.addAll(TYPE_NAME_SET);
SELECTOR_SET.addAll(SELECTOR_EXPRESSION_SET);
ATTRIBUTE_OPERATOR_SET = EnumSet.of(
PXStylesheetTokenType.STARTS_WITH,
PXStylesheetTokenType.ENDS_WITH,
PXStylesheetTokenType.CONTAINS,
PXStylesheetTokenType.EQUAL,
PXStylesheetTokenType.LIST_CONTAINS,
PXStylesheetTokenType.EQUALS_WITH_HYPHEN);
DECLARATION_DELIMITER_SET = EnumSet.of(
PXStylesheetTokenType.SEMICOLON,
PXStylesheetTokenType.RCURLY);
KEYFRAME_SELECTOR_SET = EnumSet.of(
PXStylesheetTokenType.IDENTIFIER,
PXStylesheetTokenType.PERCENTAGE);
NAMESPACE_SET = EnumSet.of(
PXStylesheetTokenType.STRING,
PXStylesheetTokenType.URL);
IMPORT_SET = EnumSet.of(
PXStylesheetTokenType.STRING,
PXStylesheetTokenType.URL);
QUERY_VALUE_SET = EnumSet.of(
PXStylesheetTokenType.IDENTIFIER,
PXStylesheetTokenType.NUMBER,
PXStylesheetTokenType.LENGTH,
PXStylesheetTokenType.STRING);
ARCHAIC_PSEUDO_ELEMENTS_SET = EnumSet.of(
PXStylesheetTokenType.FIRST_LINE_PSEUDO_ELEMENT,
PXStylesheetTokenType.FIRST_LETTER_PSEUDO_ELEMENT,
PXStylesheetTokenType.BEFORE_PSEUDO_ELEMENT,
PXStylesheetTokenType.AFTER_PSEUDO_ELEMENT);
}
//@formatter:on
private PXStylesheetLexer lexer;
private PXStylesheet currentStyleSheet;
private Stack<String> activeImports;
private Stack<PXStylesheetLexer> lexerStack;
// Application context
private Context context;
/**
* Constructs a new parser.
*/
public PXStylesheetParser() {
this(null);
}
/**
* Constructs a new parser with a {@link Context}.
*
* @param context
*/
public PXStylesheetParser(Context context) {
lexer = new PXStylesheetLexer();
this.context = context;
}
/**
* Sets the {@link Context} that will be used by this parser (e.g. the
* application context).
*
* @param context
*/
public void setContext(Context context) {
this.context = context;
}
/**
* Returns the {@link Context} that is used by this parser.
*
* @return A {@link Context}
*/
public Context getContext() {
return context;
}
/**
* Parse the style-sheet.
*
* @param source
* @param origin
* @param fileName
* @return
*/
public PXStylesheet parse(String source, PXStyleSheetOrigin origin, String fileName) {
// add the source file name to prevent @imports from importing it as
// well
addImportName(fileName);
// parse
PXStylesheet result = parse(source, origin);
// associate file path on resulting stylesheet
result.setFilePath(fileName);
return result;
}
/**
* Parse the style-sheet.
*
* @param source
* @param origin
* @return
*/
public PXStylesheet parse(String source, PXStyleSheetOrigin origin) {
// clear errors
clearErrors();
// create stylesheet
currentStyleSheet = new PXStylesheet(origin);
// setup lexer and prime it
lexer.setSource(source);
advance();
try {
while (currentLexeme != null && currentLexeme.getType() != PXStylesheetTokenType.EOF) {
switch (currentLexeme.getType()) {
case IMPORT:
parseImport();
break;
case NAMESPACE:
parseNamespace();
break;
case KEYFRAMES:
parseKeyframes();
break;
case MEDIA:
parseMedia();
break;
case FONT_FACE:
parseFontFace();
break;
default:
// TODO: check for valid tokens to error out sooner?
parseRuleSet();
break;
}
}
} catch (Exception e) {
addError(e.getMessage());
}
// clear out any import refs
activeImports = null;
return currentStyleSheet;
}
public PXStylesheet parseInlineCSS(String css) {
// clear errors
clearErrors();
// create stylesheet
currentStyleSheet = new PXStylesheet(PXStyleSheetOrigin.INLINE);
// setup lexer and prime it
lexer.setSource(css);
advance();
try {
// build placeholder rule set
PXRuleSet ruleSet = new PXRuleSet();
// parse declarations
List<PXDeclaration> declarations = parseDeclarations();
// add declarations to rule set
for (PXDeclaration declaration : declarations) {
ruleSet.addDeclaration(declaration);
}
// save rule set
currentStyleSheet.addRuleSet(ruleSet);
} catch (Exception e) {
addError(e.getMessage());
}
return currentStyleSheet;
}
@SuppressWarnings("unused")
/* Unused, consider deletion. */
private PXSelector parseSelectorString(String source) {
// clear errors
clearErrors();
// setup lexer and prime it
lexer.setSource(source);
advance();
try {
return parseSelector();
} catch (Exception e) {
addError(e.getMessage());
}
return null;
}
// level 1
private void parseFontFace() {
assertTypeAndAdvance(PXStylesheetTokenType.FONT_FACE);
// process declaration block
if (isType(PXStylesheetTokenType.LCURLY)) {
List<PXDeclaration> declarations = parseDeclarationBlock();
// TODO: we probably shouldn't load font right here
for (PXDeclaration declaration : declarations) {
if ("src".equals(declaration.getName())) {
// Load a font and hold it in the fonts registry
// Shalom FIXME - We need access to the application's
// AssetManager!
// PXFontRegistry.getTypeface(declaration.getURLValue());
}
}
}
}
private void parseImport() {
assertTypeAndAdvance(PXStylesheetTokenType.IMPORT);
assertTypeInSet(IMPORT_SET);
String path = null;
switch (currentLexeme.getType()) {
case STRING: {
String string = currentLexeme.getValue().toString();
if (string.length() > 2) {
path = string.substring(1, string.length() - 2);
}
break;
}
case URL:
path = currentLexeme.getValue().toString();
break;
default:
break;
}
if (path != null) {
// advance over @import argument
advance();
// calculate resource name and file extension
// int dotIndex = path.lastIndexOf(".");
// String pathMinusExtension = dotIndex > -1 ? path.substring(0,
// dotIndex) : path;
// String extension = dotIndex > -1 ? path.substring(dotIndex +
// 1).toLowerCase()
// : StringUtil.EMPTY;
if (context == null) {
addError("Error parsing an import. The application context is null.");
advance();
} else if (!activeImports.contains(path)) {
// we need to go ahead and process the trailing semicolon so we
// have the current lexeme in case we push it below
advance();
addImportName(path);
// Note: We always take the import css from the assets.
String source = null;
try {
source = IOUtil.read(context.getAssets().open(path));
} catch (IOException e) {
PXLog.e(TAG, e, e.getMessage());
}
if (!StringUtil.isEmpty(source)) {
lexer.pushLexeme((PXStylesheetLexeme) currentLexeme);
pushSource(source);
advance();
}
} else {
String message = String.format(
"import cycle detected trying to import '%s':\n%s ->\n%s", path,
CollectionUtil.toString(activeImports, " ->\n"), path);
addError(message);
// NOTE: we do this here so we'll still have the current file on
// the active imports stack. This handles the
// case of a file ending with an @import statement, causing
// advance to pop it from the active imports stack
advance();
}
}
}
private void parseMedia() {
assertTypeAndAdvance(PXStylesheetTokenType.MEDIA);
// TODO: support media types, NOT, and ONLY. Skipping for now
while (isType(PXStylesheetTokenType.IDENTIFIER)) {
advance();
}
// 'and' may appear here
advanceIfIsType(PXStylesheetTokenType.AND);
// parse optional expressions
if (isType(PXStylesheetTokenType.LPAREN)) {
parseMediaExpressions();
}
// parse body
if (isType(PXStylesheetTokenType.LCURLY)) {
try {
advance();
while (currentLexeme != null
&& currentLexeme.getType() != PXStylesheetTokenType.EOF
&& !isType(PXStylesheetTokenType.RCURLY)) {
parseRuleSet();
}
advanceIfIsType(PXStylesheetTokenType.RCURLY,
"Expected @media body closing curly brace");
} finally {
// reset active media query to none
currentStyleSheet.setActiveMediaQuery(null);
}
}
}
private void parseRuleSet() {
List<PXSelector> selectors;
// parse selectors
try {
selectors = parseSelectorGroup();
} catch (Exception e) {
// emit error
addError(e.getMessage());
// use flag to indicate we have no selectors
selectors = null;
// advance to '{'
advanceToType(PXStylesheetTokenType.LCURLY);
}
// here for error recovery
if (!isType(PXStylesheetTokenType.LCURLY)) {
addError("Expected a left curly brace to begin a declaration block");
// advance to '{'
advanceToType(PXStylesheetTokenType.LCURLY);
}
// parse declaration block
if (isType(PXStylesheetTokenType.LCURLY)) {
List<PXDeclaration> declarations = parseDeclarationBlock();
if (selectors == null) {
PXRuleSet ruleSet = new PXRuleSet();
for (PXDeclaration declaration : declarations) {
ruleSet.addDeclaration(declaration);
}
// save rule set
currentStyleSheet.addRuleSet(ruleSet);
} else {
for (PXSelector selector : selectors) {
// build rule set
PXRuleSet ruleSet = new PXRuleSet();
// add selector
if (selector != null) {
ruleSet.addSelector(selector);
}
for (PXDeclaration declaration : declarations) {
ruleSet.addDeclaration(declaration);
}
// save rule set
currentStyleSheet.addRuleSet(ruleSet);
}
}
}
}
private void parseKeyframes() {
// advance over '@keyframes'
assertTypeAndAdvance(PXStylesheetTokenType.KEYFRAMES);
// grab keyframe name
assertType(PXStylesheetTokenType.IDENTIFIER);
PXKeyframe keyframe = new PXKeyframe(currentLexeme.getValue().toString());
advance();
// advance over '{'
assertTypeAndAdvance(PXStylesheetTokenType.LCURLY);
// process each block
while (isInTypeSet(KEYFRAME_SELECTOR_SET)) {
// grab all offsets
List<Number> offsets = new ArrayList<Number>();
offsets.add(parseOffset());
while (isType(PXStylesheetTokenType.COMMA)) {
// advance over ','
advance();
offsets.add(parseOffset());
}
// grab declarations
List<PXDeclaration> declarations = parseDeclarationBlock();
// create blocks, one for each offset, using the same declarations
// for each
for (Number number : offsets) {
float offset = number.floatValue();
// create keyframe block
PXKeyframeBlock block = new PXKeyframeBlock(offset);
// add declarations to it
for (PXDeclaration declaration : declarations) {
block.addDeclaration(declaration);
}
keyframe.addKeyframeBlock(block);
}
}
// add keyframe to current stylesheet
currentStyleSheet.addKeyframe(keyframe);
// advance over '}'
assertTypeAndAdvance(PXStylesheetTokenType.RCURLY);
}
private float parseOffset() {
float offset = 0.0f;
assertTypeInSet(KEYFRAME_SELECTOR_SET);
switch (currentLexeme.getType()) {
case IDENTIFIER:
// NOTE: we only check for 'to' since 'from' and unrecognized
// values will use the default value of 0.0f
if ("to".equals(currentLexeme.getValue())) {
offset = 1.0f;
}
advance();
break;
case PERCENTAGE: {
PXDimension percentage = (PXDimension) currentLexeme.getValue();
offset = percentage.getNumber() / 100.0f;
offset = Math.min(1.0f, offset);
offset = Math.max(0.0f, offset);
advance();
break;
}
default: {
String message = String.format("Unrecognized keyframe selector type: %s",
currentLexeme);
errorWithMessage(message);
break;
}
}
return offset;
}
private void parseNamespace() {
assertTypeAndAdvance(PXStylesheetTokenType.NAMESPACE);
String identifier = null;
String uri;
if (isType(PXStylesheetTokenType.IDENTIFIER)) {
identifier = currentLexeme.getValue().toString();
advance();
}
assertTypeInSet(NAMESPACE_SET);
// grab value
uri = currentLexeme.getValue().toString();
// trim string
if (isType(PXStylesheetTokenType.STRING)) {
// this will remove the URI double quotes.
uri = uri.substring(1, uri.length() - 1);
}
advance();
// set namespace on stylesheet (identifier is the namespace prefix)
currentStyleSheet.setURI(uri, identifier);
assertTypeAndAdvance(PXStylesheetTokenType.SEMICOLON);
}
// level 2
private List<PXSelector> parseSelectorGroup() {
List<PXSelector> selectors = new ArrayList<PXSelector>();
PXSelector selectorSequence = parseSelectorSequence();
if (selectorSequence != null) {
selectors.add(selectorSequence);
}
while (currentLexeme.getType() == PXStylesheetTokenType.COMMA) {
// advance over ','
advance();
// grab next selector
PXSelector nextSelector = parseSelectorSequence();
if (nextSelector == null) {
// We have a problem with this selectors group
errorWithMessage("Expected a Selector or Pseudo-element after a comma");
} else {
selectors.add(nextSelector);
}
}
if (selectors.size() == 0) {
errorWithMessage("Expected a Selector or Pseudo-element");
}
return selectors;
}
private List<PXDeclaration> parseDeclarationBlock() {
assertTypeAndAdvance(PXStylesheetTokenType.LCURLY);
List<PXDeclaration> declarations = parseDeclarations();
assertTypeAndAdvance(PXStylesheetTokenType.RCURLY);
return declarations;
}
private void parseMediaExpressions() {
try {
// create container for zero-or-more expressions
List<PXMediaExpression> expressions = new ArrayList<PXMediaExpression>();
// add at least one expression
expressions.add(parseMediaExpression());
// and any others
while (isType(PXStylesheetTokenType.AND)) {
advance();
expressions.add(parseMediaExpression());
}
// create expression group or use single entry
if (expressions.size() == 1) {
currentStyleSheet.setActiveMediaQuery(expressions.get(0));
} else {
PXMediaExpressionGroup group = new PXMediaExpressionGroup();
for (PXMediaExpression expression : expressions) {
group.addExpression(expression);
}
currentStyleSheet.setActiveMediaQuery(group);
}
} catch (Exception e) {
addError(e.getMessage());
// TODO: error recovery
}
}
// level 3
private PXSelector parseSelectorSequence() {
PXSelector root = parseSelector();
while (isInTypeSet(SELECTOR_SEQUENCE_SET)) {
Lexeme<PXStylesheetTokenType> operator = null;
if (isInTypeSet(SELECTOR_OPERATOR_SET)) {
operator = currentLexeme;
advance();
}
PXSelector rhs = parseSelector();
if (operator != null) {
switch (operator.getType()) {
case PLUS:
root = new PXAdjacentSiblingCombinator(root, rhs);
break;
case GREATER_THAN:
root = new PXChildCombinator(root, rhs);
break;
case TILDE:
root = new PXSiblingCombinator(root, rhs);
break;
default:
errorWithMessage("Unsupported selector operator (combinator)");
}
} else {
root = new PXDescendantCombinator(root, rhs);
// advance();
}
}
String pseudoElement = null;
// grab possible pseudo-element in new and old formats
if (isType(PXStylesheetTokenType.DOUBLE_COLON)) {
advance();
assertType(PXStylesheetTokenType.IDENTIFIER);
pseudoElement = currentLexeme.getValue().toString();
advance();
} else if (isInTypeSet(ARCHAIC_PSEUDO_ELEMENTS_SET)) {
String stringValue = currentLexeme.getValue().toString();
pseudoElement = stringValue.substring(1);
advance();
}
if (pseudoElement != null && pseudoElement.length() > 0) {
if (root == null) {
PXTypeSelector selector = new PXTypeSelector();
selector.setPseudoElement(pseudoElement);
root = selector;
} else {
if (root instanceof PXTypeSelector) {
PXTypeSelector selector = (PXTypeSelector) root;
selector.setPseudoElement(pseudoElement);
} else if (root instanceof PXCombinatorBase) {
PXCombinatorBase combinator = (PXCombinatorBase) root;
PXTypeSelector selector = (PXTypeSelector) combinator.getRhs();
selector.setPseudoElement(pseudoElement);
}
}
}
return root;
}
private List<PXDeclaration> parseDeclarations() {
List<PXDeclaration> declarations = new ArrayList<PXDeclaration>();
// parse properties
while (currentLexeme != null && currentLexeme.getType() != PXStylesheetTokenType.EOF
&& currentLexeme.getType() != PXStylesheetTokenType.RCURLY) {
try {
PXDeclaration declaration = parseDeclaration();
declarations.add(declaration);
} catch (Exception e) {
addError(e.getMessage());
// TODO: parseDeclaration could do error recovery. If not, this
// should probably do the same recovery
while (currentLexeme != null
&& currentLexeme.getType() != PXStylesheetTokenType.EOF
&& !isInTypeSet(DECLARATION_DELIMITER_SET)) {
advance();
}
advanceIfIsType(PXStylesheetTokenType.SEMICOLON);
}
}
return declarations;
}
private PXMediaExpression parseMediaExpression() {
assertTypeAndAdvance(PXStylesheetTokenType.LPAREN);
// grab name
assertType(PXStylesheetTokenType.IDENTIFIER);
String name = currentLexeme.getValue().toString().toLowerCase(Locale.US);
advance();
Object value = null;
// parse optional value
if (isType(PXStylesheetTokenType.COLON)) {
// advance over ':'
assertTypeAndAdvance(PXStylesheetTokenType.COLON);
// grab value
assertTypeInSet(QUERY_VALUE_SET);
value = currentLexeme.getValue();
boolean isNumber = currentLexeme.getType() == PXStylesheetTokenType.NUMBER;
advance();
// make string values lowercase to avoid doing it later
if (!isNumber && value instanceof String) {
value = ((String) value).toLowerCase(Locale.US);
}
// check for possible ratio syntax
else if (isNumber && isType(PXStylesheetTokenType.SLASH)) {
Float numerator = getFloatValue(value);
// advance over '/'
advance();
// grab denominator
assertType(PXStylesheetTokenType.NUMBER);
Float denom = getFloatValue(currentLexeme.getValue());
advance();
if (numerator.floatValue() == 0.0f) {
// do nothing, leave result as 0.0
} else if (denom.floatValue() == 0.0f) {
value = Double.NaN;
} else {
value = numerator.floatValue() / denom.floatValue();
}
}
}
advanceIfIsType(PXStylesheetTokenType.RPAREN, "Expected closing parenthesis in media query");
// create query expression and activate it in current stylesheet
return new PXNamedMediaExpression(name, value);
}
/**
* Tries to convert the given value into a {@link Float}. In case the value
* is already instance of Number, the method simply cast it to one.
*
* @param value
* @return A {@link Float} instance; <code>null</code> in case the value
* cannot be converted.
*/
private Float getFloatValue(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).floatValue();
}
try {
return Float.parseFloat(value.toString());
} catch (NumberFormatException e) {
return null;
}
}
// level 4
private PXSelector parseSelector() {
PXTypeSelector result = null;
if (isInTypeSet(SELECTOR_SET)) {
if (isInTypeSet(TYPE_SELECTOR_SET)) {
result = parseTypeSelector();
} else {
// match any element
result = new PXTypeSelector();
// clear whitespace flag, so first expression will not fail in
// this case
currentLexeme.clearFlag(Lexeme.FLAG_TYPE_FOLLOWS_WHITESPACE);
}
if (isInTypeSet(SELECTOR_EXPRESSION_SET)) {
for (PXSelector expression : parseSelectorExpressions()) {
result.addAttributeExpression(expression);
}
}
}
// else, fail silently in case a pseudo-element follows
return result;
}
private PXDeclaration parseDeclaration() {
// process property name
assertType(PXStylesheetTokenType.IDENTIFIER);
PXDeclaration declaration = new PXDeclaration(currentLexeme.getValue().toString());
advance();
// colon
assertTypeAndAdvance(PXStylesheetTokenType.COLON);
// collect values
Stack<PXStylesheetLexeme> lexemes = new Stack<PXStylesheetLexeme>();
while (currentLexeme != null && currentLexeme.getType() != PXStylesheetTokenType.EOF
&& !isInTypeSet(DECLARATION_DELIMITER_SET)) {
if (!lexemes.isEmpty() && currentLexeme.getType() == PXStylesheetTokenType.COLON
&& lexemes.lastElement().getType() == PXStylesheetTokenType.IDENTIFIER) {
// assume we've moved into a new declaration, so push last
// lexeme back into the lexeme stream
Lexeme<PXStylesheetTokenType> propertyName = lexemes.pop();
// this pushes the colon back to the lexer and makes the
// property name the current lexeme
pushLexeme(propertyName);
// signal end of this declaration
break;
} else {
lexemes.add((PXStylesheetLexeme) currentLexeme);
advance();
}
}
// let semicolons be optional
advanceIfIsType(PXStylesheetTokenType.SEMICOLON);
// grab original source, for error messages and hashing
String source;
if (lexemes.size() > 0) {
Lexeme<PXStylesheetTokenType> firstLexeme = lexemes.firstElement();
Lexeme<PXStylesheetTokenType> lastLexeme = lexemes.firstElement();
int start = firstLexeme.getOffset();
int end = lastLexeme.getEndingOffset();
source = lexer.getSource().substring(start, end);
} else {
source = StringUtil.EMPTY;
}
// check for !important
Lexeme<PXStylesheetTokenType> lastLexeme = lexemes.isEmpty() ? null : lexemes.lastElement();
if (lastLexeme != null && lastLexeme.getType() == PXStylesheetTokenType.IMPORTANT) {
// drop !important and tag declaration as important
lexemes.pop();
declaration.setImportant(true);
}
// associate lexemes with declaration
declaration.setSource(source, getCurrentFilename(), new ArrayList<PXStylesheetLexeme>(
lexemes));
return declaration;
}
// level 5
private PXTypeSelector parseTypeSelector() {
PXTypeSelector result = null;
if (isInTypeSet(TYPE_SELECTOR_SET)) {
String namespace = null;
String name = null;
// namespace or type
if (isInTypeSet(TYPE_NAME_SET)) {
// assume we have a name only
name = currentLexeme.getValue().toString();
advance();
}
// if pipe, then we had a namespace, now process type
if (isType(PXStylesheetTokenType.PIPE)) {
namespace = name;
// advance over '|'
advance();
if (isInTypeSet(TYPE_NAME_SET)) {
// set name
name = currentLexeme.getValue().toString();
advance();
} else {
errorWithMessage("Expected IDENTIFIER or STAR");
}
} else {
namespace = "*";
}
// find namespace URI from namespace prefix
String namespaceURI = null;
if (namespace != null) {
if ("*".equals(namespace)) {
namespaceURI = namespace;
} else {
namespaceURI = currentStyleSheet.getNamespaceForPrefix(namespace);
}
}
result = new PXTypeSelector(namespaceURI, name);
} else {
errorWithMessage("Expected IDENTIFIER, STAR, or PIPE");
}
return result;
}
private List<PXSelector> parseSelectorExpressions() {
List<PXSelector> expressions = new ArrayList<PXSelector>();
while (!currentLexeme.isFlagSet(Lexeme.FLAG_TYPE_FOLLOWS_WHITESPACE)
&& isInTypeSet(SELECTOR_EXPRESSION_SET)) {
switch (currentLexeme.getType()) {
case ID: {
String name = currentLexeme.getValue().toString().substring(1);
expressions.add(new PXIdSelector(name));
advance();
break;
}
case CLASS: {
String name = currentLexeme.getValue().toString().substring(1);
expressions.add(new PXClassSelector(name));
advance();
break;
}
case LBRACKET:
expressions.add(parseAttributeSelector());
break;
case COLON:
expressions.add(parsePseudoClass());
break;
case NOT_PSEUDO_CLASS:
expressions.add(parseNotSelector());
break;
case ROOT_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_ROOT));
advance();
break;
case FIRST_CHILD_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_FIRST_CHILD));
advance();
break;
case LAST_CHILD_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_LAST_CHILD));
advance();
break;
case FIRST_OF_TYPE_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_FIRST_OF_TYPE));
advance();
break;
case LAST_OF_TYPE_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_LAST_OF_TYPE));
advance();
break;
case ONLY_CHILD_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_ONLY_CHILD));
advance();
break;
case ONLY_OF_TYPE_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_ONLY_OF_TYPE));
advance();
break;
case EMPTY_PSEUDO_CLASS:
expressions.add(new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_EMPTY));
advance();
break;
case NTH_CHILD_PSEUDO_CLASS:
case NTH_LAST_CHILD_PSEUDO_CLASS:
case NTH_OF_TYPE_PSEUDO_CLASS:
case NTH_LAST_OF_TYPE_PSEUDO_CLASS:
expressions.add(parsePseudoClassFunction());
assertTypeAndAdvance(PXStylesheetTokenType.RPAREN);
break;
// TODO: implement
case LINK_PSEUDO_CLASS:
case VISITED_PSEUDO_CLASS:
case HOVER_PSEUDO_CLASS:
case ACTIVE_PSEUDO_CLASS:
case FOCUS_PSEUDO_CLASS:
case TARGET_PSEUDO_CLASS:
case ENABLED_PSEUDO_CLASS:
case CHECKED_PSEUDO_CLASS:
case INDETERMINATE_PSEUDO_CLASS:
String className = currentLexeme.getValue().toString();
if (className.startsWith(":")) {
className = className.substring(1);
}
expressions.add(new PXPseudoClassSelector(className));
advance();
break;
// TODO: implement
case LANG_PSEUDO_CLASS:
className = currentLexeme.getValue().toString();
if (className.startsWith(":")) {
className = className.substring(1);
}
expressions.add(new PXPseudoClassSelector(className));
advanceToType(PXStylesheetTokenType.RPAREN);
advance();
break;
default:
break;
}
}
if (expressions.size() == 0
&& !currentLexeme.isFlagSet(Lexeme.FLAG_TYPE_FOLLOWS_WHITESPACE)) {
errorWithMessage("Expected ID, CLASS, LBRACKET, or PseudoClass");
}
return expressions;
}
// level 6
private PXPseudoClassFunction parsePseudoClassFunction() {
// initialize to something to remove analyzer warnings, but the switch
// below has to cover all cases to prevent a
// bug here
PXPseudoClassFunctionType type = PXPseudoClassFunctionType.NTH_CHILD;
switch (currentLexeme.getType()) {
case NTH_CHILD_PSEUDO_CLASS:
type = PXPseudoClassFunctionType.NTH_CHILD;
break;
case NTH_LAST_CHILD_PSEUDO_CLASS:
type = PXPseudoClassFunctionType.NTH_LAST_CHILD;
break;
case NTH_OF_TYPE_PSEUDO_CLASS:
type = PXPseudoClassFunctionType.NTH_OF_TYPE;
break;
case NTH_LAST_OF_TYPE_PSEUDO_CLASS:
type = PXPseudoClassFunctionType.NTH_LAST_OF_TYPE;
break;
default:
break;
}
// advance over function name and left paren
advance();
int modulus = 0;
int remainder = 0;
// parse modulus
if (isType(PXStylesheetTokenType.NTH)) {
String numberString = currentLexeme.getValue().toString();
int length = numberString.length();
// extract modulus
if (length == 1) {
// we have 'n'
modulus = 1;
} else if (length == 2 && numberString.startsWith("-")) {
// we have '-n'
modulus = -1;
} else if (length == 2 && numberString.startsWith("+")) {
// we have '+n'
modulus = 1;
} else {
// a number precedes 'n'
modulus = Integer.parseInt(numberString.substring(0, numberString.length() - 1));
}
advance();
if (isType(PXStylesheetTokenType.PLUS)) {
advance();
// grab remainder
assertType(PXStylesheetTokenType.NUMBER);
Number remainderNumber = getFloatValue(currentLexeme.getValue());
remainder = remainderNumber.intValue();
advance();
} else if (isType(PXStylesheetTokenType.NUMBER)) {
numberString = lexer.getSource().substring(currentLexeme.getOffset(),
currentLexeme.getEndingOffset());
if (numberString.startsWith("-") || numberString.startsWith("+")) {
Number remainderNumber = getFloatValue(currentLexeme.getValue());
remainder = remainderNumber.intValue();
advance();
} else {
errorWithMessage("Expected NUMBER with leading '-' or '+'");
}
}
} else if (isType(PXStylesheetTokenType.IDENTIFIER)) {
String stringValue = currentLexeme.getValue().toString();
if ("odd".equals(stringValue)) {
modulus = 2;
remainder = 1;
} else if ("even".equals(stringValue)) {
modulus = 2;
} else {
errorWithMessage(String.format(
"Unrecognized identifier '%s'. Expected 'odd' or 'even'", stringValue));
}
advance();
} else if (isType(PXStylesheetTokenType.NUMBER)) {
modulus = 1;
Number remainderNumber = getFloatValue(currentLexeme.getValue());
remainder = remainderNumber.intValue();
advance();
} else {
errorWithMessage("Expected NTH, NUMBER, 'odd', or 'even'");
}
return new PXPseudoClassFunction(type, modulus, remainder);
}
private PXSelector parseAttributeSelector() {
PXSelector result = null;
assertTypeAndAdvance(PXStylesheetTokenType.LBRACKET);
result = parseAttributeTypeSelector();
if (isInTypeSet(ATTRIBUTE_OPERATOR_SET)) {
PXAttributeSelectorOperatorType operatorType = PXAttributeSelectorOperatorType.EQUAL; // make
// anaylzer
// happy
switch (currentLexeme.getType()) {
case STARTS_WITH:
operatorType = PXAttributeSelectorOperatorType.STARTS_WITH;
break;
case ENDS_WITH:
operatorType = PXAttributeSelectorOperatorType.ENDS_WITH;
break;
case CONTAINS:
operatorType = PXAttributeSelectorOperatorType.CONTAINS;
break;
case EQUAL:
operatorType = PXAttributeSelectorOperatorType.EQUAL;
break;
case LIST_CONTAINS:
operatorType = PXAttributeSelectorOperatorType.LIST_CONTAINS;
break;
case EQUALS_WITH_HYPHEN:
operatorType = PXAttributeSelectorOperatorType.EQUAL_WITH_HYPHEN;
break;
default:
errorWithMessage("Unsupported attribute operator type");
break;
}
advance();
if (isType(PXStylesheetTokenType.STRING)) {
String value = currentLexeme.getValue().toString();
// process string
result = new PXAttributeSelectorOperator(operatorType,
(PXAttributeSelector) result, value.substring(1, value.length() - 1));
advance();
} else if (isType(PXStylesheetTokenType.IDENTIFIER)) {
// process string
result = new PXAttributeSelectorOperator(operatorType,
(PXAttributeSelector) result, currentLexeme.getValue().toString());
advance();
} else {
errorWithMessage("Expected STRING or IDENTIFIER");
}
}
assertTypeAndAdvance(PXStylesheetTokenType.RBRACKET);
return result;
}
private PXSelector parsePseudoClass() {
PXSelector result = null;
assertType(PXStylesheetTokenType.COLON);
advance();
if (isType(PXStylesheetTokenType.IDENTIFIER)) {
// process identifier
result = new PXPseudoClassSelector(currentLexeme.getValue().toString());
advance();
} else {
errorWithMessage("Expected IDENTIFIER");
}
// TODO: support an+b notation
return result;
}
private PXSelector parseNotSelector() {
// advance over 'not'
assertType(PXStylesheetTokenType.NOT_PSEUDO_CLASS);
advance();
PXSelector result = new PXNotPseudoClass(parseNegationArgument());
// advance over ')'
assertTypeAndAdvance(PXStylesheetTokenType.RPAREN);
return result;
}
// level 7
private PXAttributeSelector parseAttributeTypeSelector() {
PXAttributeSelector result = null;
if (isInTypeSet(TYPE_SELECTOR_SET)) {
String namespace = null;
String name = null;
// namespace or type
if (isInTypeSet(TYPE_NAME_SET)) {
// assume we have a name only
name = currentLexeme.getValue().toString();
advance();
}
// if pipe, then we had a namespace, now process type
if (isType(PXStylesheetTokenType.PIPE)) {
namespace = name;
// advance over '|'
advance();
if (isInTypeSet(TYPE_NAME_SET)) {
// set name
name = currentLexeme.getValue().toString();
advance();
} else {
errorWithMessage("Expected IDENTIFIER or STAR");
}
}
// NOTE: default namepace is null indicating no namespace should
// exist when matching with this selector. This
// differs from the interpretation used on type selectors
// find namespace URI from namespace prefix
String namespaceURI = null;
if (namespace != null) {
if (namespace.equals("*")) {
namespaceURI = namespace;
} else {
namespaceURI = currentStyleSheet.getNamespaceForPrefix(namespace);
}
}
result = new PXAttributeSelector(namespaceURI, name);
} else {
errorWithMessage("Expected IDENTIFIER, STAR, or PIPE");
}
return result;
}
private PXSelector parseNegationArgument() {
PXSelector result = null;
switch (currentLexeme.getType()) {
case ID: {
String name = currentLexeme.getValue().toString().substring(1);
result = new PXIdSelector(name);
advance();
break;
}
case CLASS: {
String name = currentLexeme.getValue().toString().substring(1);
result = new PXClassSelector(name);
advance();
break;
}
case LBRACKET:
result = parseAttributeSelector();
break;
case COLON:
result = parsePseudoClass();
break;
case ROOT_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(PXPseudoClassPredicateType.PREDICATE_ROOT);
advance();
break;
case FIRST_CHILD_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_FIRST_CHILD);
advance();
break;
case LAST_CHILD_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(PXPseudoClassPredicateType.PREDICATE_LAST_CHILD);
advance();
break;
case FIRST_OF_TYPE_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_FIRST_OF_TYPE);
advance();
break;
case LAST_OF_TYPE_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_LAST_OF_TYPE);
advance();
break;
case ONLY_CHILD_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(PXPseudoClassPredicateType.PREDICATE_ONLY_CHILD);
advance();
break;
case ONLY_OF_TYPE_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(
PXPseudoClassPredicateType.PREDICATE_ONLY_OF_TYPE);
advance();
break;
case EMPTY_PSEUDO_CLASS:
result = new PXPseudoClassPredicate(PXPseudoClassPredicateType.PREDICATE_EMPTY);
advance();
break;
case NTH_CHILD_PSEUDO_CLASS:
case NTH_LAST_CHILD_PSEUDO_CLASS:
case NTH_OF_TYPE_PSEUDO_CLASS:
case NTH_LAST_OF_TYPE_PSEUDO_CLASS:
result = parsePseudoClassFunction();
assertTypeAndAdvance(PXStylesheetTokenType.RPAREN);
break;
// TODO: implement
case LINK_PSEUDO_CLASS:
case VISITED_PSEUDO_CLASS:
case HOVER_PSEUDO_CLASS:
case ACTIVE_PSEUDO_CLASS:
case FOCUS_PSEUDO_CLASS:
case TARGET_PSEUDO_CLASS:
case ENABLED_PSEUDO_CLASS:
case CHECKED_PSEUDO_CLASS:
case INDETERMINATE_PSEUDO_CLASS:
result = new PXPseudoClassSelector(currentLexeme.getValue().toString());
advance();
break;
// TODO: implement
case LANG_PSEUDO_CLASS:
result = new PXPseudoClassSelector(currentLexeme.getValue().toString());
advanceToType(PXStylesheetTokenType.RPAREN);
advance();
break;
case RPAREN:
// empty body
break;
default:
if (isInTypeSet(TYPE_SELECTOR_SET)) {
result = parseTypeSelector();
} else {
errorWithMessage("Expected ID, CLASS, AttributeSelector, PseudoClass, or TypeSelect as negation argument");
}
break;
}
return result;
}
private void lexerDidPopSource() {
if (activeImports.size() > 0) {
activeImports.pop();
} else {
PXLog.e(TAG, "Tried to pop an empty activeImports array");
}
}
/*
* Overrides the super implementation. (non-Javadoc)
* @see com.pixate.freestyle.parsing.PXParserBase#advance()
*/
@Override
public Lexeme<PXStylesheetTokenType> advance() {
Lexeme<PXStylesheetTokenType> candidate = lexer.nextLexeme();
while (candidate == null && lexerStack != null && !lexerStack.isEmpty()) {
// pop lexer
lexer = lexerStack.pop();
// notify the parser that we've done so
lexerDidPopSource();
// try getting the next lexeme from the newly activated lexer
candidate = lexer.nextLexeme();
}
return currentLexeme = candidate;
}
// Helpers
private void addImportName(String name) {
if (!StringUtil.isEmpty(name)) {
if (activeImports == null) {
activeImports = new Stack<String>();
}
activeImports.push(name);
}
}
private void advanceToType(PXStylesheetTokenType type) {
while (currentLexeme != null && currentLexeme.getType() != type
&& currentLexeme.getType() != PXStylesheetTokenType.EOF) {
advance();
}
}
private void pushLexeme(Lexeme<PXStylesheetTokenType> lexeme) {
lexer.pushLexeme((PXStylesheetLexeme) currentLexeme);
currentLexeme = lexeme;
}
private void pushSource(String source) {
if (lexerStack == null) {
lexerStack = new Stack<PXStylesheetLexer>();
}
// push current lexer
lexerStack.push(lexer);
// create new lexer and activate it
lexer = new PXStylesheetLexer();
lexer.setSource(source);
}
private String getCurrentFilename() {
return (activeImports != null && activeImports.size() > 0) ? (new File(
activeImports.lastElement())).getName() : null;
}
@Override
public void addError(String error, String filename, String offset) {
offset = (currentLexeme.getType() != PXStylesheetTokenType.EOF) ? String
.valueOf(currentLexeme.getOffset()) : "EOF";
super.addError(error, filename, offset);
}
}
| |
/*
* Copyright (c) 2008-2019 akquinet tech@spree GmbH
*
* This file is part of Hibersap.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this software except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibersap.mapping.model;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.TreeSet;
import org.hibersap.MappingException;
import org.hibersap.annotations.BapiStructure;
import org.hibersap.annotations.Parameter;
import org.hibersap.conversion.BooleanConverter;
import org.hibersap.conversion.ConverterCache;
import org.junit.Before;
import org.junit.Test;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TableMappingTest {
private StructureMapping structureMapping;
private TableMapping tableMapping;
@Before
public void setUp() {
structureMapping = new StructureMapping(TestStructureBean.class, "sapStructureName", "javaStructureName",
null);
structureMapping.addParameter(new FieldMapping(Integer.class, "sapField", "javaField", null));
}
@Test
public void destinationTypeIsArrayListByDefaultWhenFieldTypeIsList() {
tableMapping = new TableMapping(List.class, Object.class, "sapName", "javaName", structureMapping, null);
Class<?> destinationType = tableMapping.getDestinationType();
assertThat(destinationType).isSameAs(ArrayList.class);
}
@Test
public void destinationTypeIsHashSetByDefaultWhenFieldTypeIsSet() {
tableMapping = new TableMapping(Set.class, Object.class, "sapName", "javaName", structureMapping, null);
Class<?> destinationType = tableMapping.getDestinationType();
assertThat(destinationType).isSameAs(HashSet.class);
}
@Test
public void destinationTypeIsArrayListByDefaultWhenFieldTypeIsCollection() {
tableMapping = new TableMapping(Collection.class, Object.class, "sapName", "javaName", structureMapping,
null);
Class<?> destinationType = tableMapping.getDestinationType();
assertThat(destinationType).isSameAs(ArrayList.class);
}
@Test
public void destinationTypeIsSameAsFieldTypeWhenFieldTypeIsConcreteCollection() {
tableMapping = new TableMapping(TreeSet.class, Object.class, "sapName", "javaName", structureMapping, null);
Class<?> destinationType = tableMapping.getDestinationType();
assertThat(destinationType).isSameAs(TreeSet.class);
}
@Test
public void destinationTypeIsArrayListWhenFieldTypeIsArray() {
tableMapping = new TableMapping(Object[].class, Object.class, "sapName", "javaName", structureMapping, null);
Class<?> destinationType = tableMapping.getDestinationType();
assertThat(destinationType).isSameAs(ArrayList.class);
}
@Test
public void destinationTypeIsSameAsFieldTypeWhenFieldHasConverter() {
tableMapping = new TableMapping(Boolean.class, Boolean.class, "sapName", "javaName", structureMapping,
BooleanConverter.class);
Class<?> destinationType = tableMapping.getDestinationType();
assertThat(destinationType).isSameAs(Boolean.class);
}
@Test(expected = MappingException.class)
public void constructorThrowsMappingExceptionWhenFieldTypeIsUnsupportedCollection() {
new TableMapping(Queue.class, Object.class, "sapName", "javaName", structureMapping, null);
}
@Test(expected = MappingException.class)
public void constructorThrowsMappingExceptionWhenFieldTypeIsNoCollectionOrArrayButConverterIsAttached() {
new TableMapping(Object.class, Object.class, "sapName", "javaName", structureMapping, null);
}
@Test
public void getUnconvertedValueReturnsListOfStructureBeansWithCorrectValues() {
tableMapping = new TableMapping(List.class, Integer.class, "sapName", "javaName", structureMapping, null);
List<Map<String, ?>> tableMap = new ArrayList<>();
tableMap.add(singletonMap("sapField", 1));
tableMap.add(singletonMap("sapField", 2));
Object value = tableMapping.getUnconvertedValueToJava(tableMap, new ConverterCache());
assertThat(value).isInstanceOf(ArrayList.class);
@SuppressWarnings({"unchecked"})
List<TestStructureBean> structureBeans = (List<TestStructureBean>) value;
assertThat(structureBeans).hasSize(2);
assertThat(structureBeans.get(0).javaField).isEqualTo(1);
assertThat(structureBeans.get(1).javaField).isEqualTo(2);
}
@Test
public void getUnconvertedValueReturnsArrayOfStructureBeansWhenDestinationTypeIsArray() {
tableMapping = new TableMapping(TestStructureBean[].class, TestStructureBean.class, "sapName", "javaName",
structureMapping, null);
List<Map<String, Integer>> tableMap = new ArrayList<>();
tableMap.add(singletonMap("sapField", 1));
tableMap.add(singletonMap("sapField", 2));
Object value = tableMapping.getUnconvertedValueToJava(tableMap, new ConverterCache());
assertThat(value).isInstanceOf(TestStructureBean[].class);
TestStructureBean[] structureBeans = (TestStructureBean[]) value;
assertThat(structureBeans).hasSize(2);
assertThat(structureBeans[0].javaField).isEqualTo(1);
assertThat(structureBeans[1].javaField).isEqualTo(2);
}
@Test
public void getUnconvertedValueReturnsListOfStructureBeansWhenTableParameterIsProvidedAsResultSetByRA() throws Exception {
tableMapping = new TableMapping(List.class, Integer.class, "sapName", "javaName", structureMapping, null);
String[] columnNames = {"sapField"};
Object[][] data = {{1}, {2}};
ResultSet resultSet = resultSet(columnNames, data);
Object value = tableMapping.getUnconvertedValueToJava(resultSet, new ConverterCache());
assertThat(value).isInstanceOf(ArrayList.class);
@SuppressWarnings({"unchecked"})
List<TestStructureBean> structureBeans = (List<TestStructureBean>) value;
assertThat(structureBeans).hasSize(2);
assertThat(structureBeans.get(0).javaField).isEqualTo(1);
assertThat(structureBeans.get(1).javaField).isEqualTo(2);
}
private ResultSet resultSet(String[] columnNames, Object[][] data) throws SQLException {
return (ResultSet) Proxy.newProxyInstance(
ResultSet.class.getClassLoader(),
new Class[]{ResultSet.class},
new ResultSetInvocationHandler(columnNames, data));
}
@BapiStructure
private static class TestStructureBean {
@Parameter("sapField")
private Integer javaField;
private TestStructureBean() {
}
}
/**
* Needed to create a {@link ResultSet}
*/
private static class ResultSetInvocationHandler implements InvocationHandler {
private final Object[][] data;
private int currentRow = -1;
private ResultSetMetaData metaData = mock(ResultSetMetaData.class);
private ResultSetInvocationHandler(String[] columnNames, Object[][] data) throws SQLException {
when(metaData.getColumnCount()).thenReturn(columnNames.length);
for (int i = 0; i < columnNames.length; i++) {
when(metaData.getColumnName(i + 1)).thenReturn(columnNames[i]);
}
this.data = data;
}
public Object invoke(Object proxy, Method method, Object[] args) {
if ("next".equals(method.getName())) {
return next();
}
if ("getObject".equals(method.getName())) {
return getObject((Integer) args[0]);
}
if ("getMetaData".equals(method.getName())) {
return metaData;
}
return null;
}
private Object getObject(int column) {
return data[currentRow][column - 1];
}
private boolean next() {
if (data.length > currentRow + 1) {
currentRow++;
return true;
}
return false;
}
}
}
| |
import java.applet.Applet;
import java.awt.AWTEvent;
import java.awt.BorderLayout;
import java.awt.Button;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FileDialog;
import java.awt.Frame;
import java.awt.Graphics;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Image;
import java.awt.Panel;
import java.awt.TextField;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import libsvm.svm;
import libsvm.svm_model;
import libsvm.svm_node;
import libsvm.svm_parameter;
import libsvm.svm_problem;
public class svm_toy extends Applet
{
private static final String DEFAULT_PARAM="-t 2 -c 100";
private int XLEN;
private int YLEN;
// off-screen buffer
private Image buffer;
private Graphics buffer_gc;
// pre-allocated colors
private static final Color colors[] =
{
new Color(0,0,0),
new Color(0,120,120),
new Color(120,120,0),
new Color(120,0,120),
new Color(0,200,200),
new Color(200,200,0),
new Color(200,0,200)
};
private static class point
{
point(double x, double y, byte value)
{
this.x = x;
this.y = y;
this.value = value;
}
double x, y;
byte value;
}
private List<point> point_list = new ArrayList<point>();
private byte current_value = 1;
public void init()
{
setSize(getSize());
final Button button_change = new Button("Change");
Button button_run = new Button("Run");
Button button_clear = new Button("Clear");
Button button_save = new Button("Save");
Button button_load = new Button("Load");
final TextField input_line = new TextField(DEFAULT_PARAM);
BorderLayout layout = new BorderLayout();
this.setLayout(layout);
Panel p = new Panel();
GridBagLayout gridbag = new GridBagLayout();
p.setLayout(gridbag);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.weightx = 1;
c.gridwidth = 1;
gridbag.setConstraints(button_change,c);
gridbag.setConstraints(button_run,c);
gridbag.setConstraints(button_clear,c);
gridbag.setConstraints(button_save,c);
gridbag.setConstraints(button_load,c);
c.weightx = 5;
c.gridwidth = 5;
gridbag.setConstraints(input_line,c);
button_change.setBackground(colors[current_value]);
p.add(button_change);
p.add(button_run);
p.add(button_clear);
p.add(button_save);
p.add(button_load);
p.add(input_line);
this.add(p,BorderLayout.SOUTH);
button_change.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_change_clicked(); button_change.setBackground(colors[current_value]); }});
button_run.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
button_clear.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_clear_clicked(); }});
button_save.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_save_clicked(input_line.getText()); }});
button_load.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_load_clicked(); }});
input_line.addActionListener(new ActionListener()
{ public void actionPerformed (ActionEvent e)
{ button_run_clicked(input_line.getText()); }});
this.enableEvents(AWTEvent.MOUSE_EVENT_MASK);
}
void draw_point(point p)
{
Color c = colors[p.value+3];
Graphics window_gc = getGraphics();
buffer_gc.setColor(c);
buffer_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
window_gc.setColor(c);
window_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
}
void clear_all()
{
point_list.clear();
if(buffer != null)
{
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
repaint();
}
void draw_all_points()
{
int n = point_list.size();
for(int i=0;i<n;i++)
draw_point(point_list.get(i));
}
void button_change_clicked()
{
++current_value;
if(current_value > 3) current_value = 1;
}
private static double atof(String s)
{
return Double.valueOf(s).doubleValue();
}
private static int atoi(String s)
{
return Integer.parseInt(s);
}
void button_run_clicked(String args)
{
// guard
if(point_list.isEmpty()) return;
svm_parameter param = new svm_parameter();
// default values
param.svm_type = svm_parameter.C_SVC;
param.kernel_type = svm_parameter.RBF;
param.degree = 3;
param.gamma = 0;
param.coef0 = 0;
param.nu = 0.5;
param.cache_size = 40;
param.C = 1;
param.eps = 1e-3;
param.p = 0.1;
param.shrinking = 1;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
// parse options
StringTokenizer st = new StringTokenizer(args);
String[] argv = new String[st.countTokens()];
for(int i=0;i<argv.length;i++)
argv[i] = st.nextToken();
for(int i=0;i<argv.length;i++)
{
if(argv[i].charAt(0) != '-') break;
if(++i>=argv.length)
{
System.err.print("unknown option\n");
break;
}
switch(argv[i-1].charAt(1))
{
case 's':
param.svm_type = atoi(argv[i]);
break;
case 't':
param.kernel_type = atoi(argv[i]);
break;
case 'd':
param.degree = atoi(argv[i]);
break;
case 'g':
param.gamma = atof(argv[i]);
break;
case 'r':
param.coef0 = atof(argv[i]);
break;
case 'n':
param.nu = atof(argv[i]);
break;
case 'm':
param.cache_size = atof(argv[i]);
break;
case 'c':
param.C = atof(argv[i]);
break;
case 'e':
param.eps = atof(argv[i]);
break;
case 'p':
param.p = atof(argv[i]);
break;
case 'h':
param.shrinking = atoi(argv[i]);
break;
case 'b':
param.probability = atoi(argv[i]);
break;
case 'w':
++param.nr_weight;
{
int[] old = param.weight_label;
param.weight_label = new int[param.nr_weight];
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
}
{
double[] old = param.weight;
param.weight = new double[param.nr_weight];
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
}
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
param.weight[param.nr_weight-1] = atof(argv[i]);
break;
default:
System.err.print("unknown option\n");
}
}
// build problem
svm_problem prob = new svm_problem();
prob.l = point_list.size();
prob.y = new double[prob.l];
if(param.kernel_type == svm_parameter.PRECOMPUTED)
{
}
else if(param.svm_type == svm_parameter.EPSILON_SVR ||
param.svm_type == svm_parameter.NU_SVR)
{
if(param.gamma == 0) param.gamma = 1;
prob.x = new svm_node[prob.l][1];
for(int i=0;i<prob.l;i++)
{
point p = point_list.get(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.y[i] = p.y;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[1];
x[0] = new svm_node();
x[0].index = 1;
int[] j = new int[XLEN];
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
{
x[0].value = (double) i / XLEN;
j[i] = (int)(YLEN*svm.svm_predict(model, x));
}
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(0,0,0,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(0,0,0,YLEN-1);
int p = (int)(param.p * YLEN);
for(int i=1;i<XLEN;i++)
{
buffer_gc.setColor(colors[0]);
buffer_gc.drawLine(i,0,i,YLEN-1);
window_gc.setColor(colors[0]);
window_gc.drawLine(i,0,i,YLEN-1);
buffer_gc.setColor(colors[5]);
window_gc.setColor(colors[5]);
buffer_gc.drawLine(i-1,j[i-1],i,j[i]);
window_gc.drawLine(i-1,j[i-1],i,j[i]);
if(param.svm_type == svm_parameter.EPSILON_SVR)
{
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
window_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
buffer_gc.setColor(colors[2]);
window_gc.setColor(colors[2]);
buffer_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
window_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
}
}
}
else
{
if(param.gamma == 0) param.gamma = 0.5;
prob.x = new svm_node [prob.l][2];
for(int i=0;i<prob.l;i++)
{
point p = point_list.get(i);
prob.x[i][0] = new svm_node();
prob.x[i][0].index = 1;
prob.x[i][0].value = p.x;
prob.x[i][1] = new svm_node();
prob.x[i][1].index = 2;
prob.x[i][1].value = p.y;
prob.y[i] = p.value;
}
// build model & classify
svm_model model = svm.svm_train(prob, param);
svm_node[] x = new svm_node[2];
x[0] = new svm_node();
x[1] = new svm_node();
x[0].index = 1;
x[1].index = 2;
Graphics window_gc = getGraphics();
for (int i = 0; i < XLEN; i++)
for (int j = 0; j < YLEN ; j++) {
x[0].value = (double) i / XLEN;
x[1].value = (double) j / YLEN;
double d = svm.svm_predict(model, x);
if (param.svm_type == svm_parameter.ONE_CLASS && d<0) d=2;
buffer_gc.setColor(colors[(int)d]);
window_gc.setColor(colors[(int)d]);
buffer_gc.drawLine(i,j,i,j);
window_gc.drawLine(i,j,i,j);
}
}
draw_all_points();
}
void button_clear_clicked()
{
clear_all();
}
void button_save_clicked(String args)
{
FileDialog dialog = new FileDialog(new Frame(),"Save",FileDialog.SAVE);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
try {
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(filename)));
int svm_type = svm_parameter.C_SVC;
int svm_type_idx = args.indexOf("-s ");
if(svm_type_idx != -1)
{
StringTokenizer svm_str_st = new StringTokenizer(args.substring(svm_type_idx+2).trim());
svm_type = atoi(svm_str_st.nextToken());
}
int n = point_list.size();
if(svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
{
for(int i=0;i<n;i++)
{
point p = point_list.get(i);
fp.writeBytes(p.y+" 1:"+p.x+"\n");
}
}
else
{
for(int i=0;i<n;i++)
{
point p = point_list.get(i);
fp.writeBytes(p.value+" 1:"+p.x+" 2:"+p.y+"\n");
}
}
fp.close();
} catch (IOException e) { System.err.print(e); }
}
void button_load_clicked()
{
FileDialog dialog = new FileDialog(new Frame(),"Load",FileDialog.LOAD);
dialog.setVisible(true);
String filename = dialog.getDirectory() + dialog.getFile();
if (filename == null) return;
clear_all();
try {
BufferedReader fp = new BufferedReader(new FileReader(filename));
String line;
while((line = fp.readLine()) != null)
{
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
if(st.countTokens() == 5)
{
byte value = (byte)atoi(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
st.nextToken();
double y = atof(st.nextToken());
point_list.add(new point(x,y,value));
}
else if(st.countTokens() == 3)
{
double y = atof(st.nextToken());
st.nextToken();
double x = atof(st.nextToken());
point_list.add(new point(x,y,current_value));
}else
break;
}
fp.close();
} catch (IOException e) { System.err.print(e); }
draw_all_points();
}
protected void processMouseEvent(MouseEvent e)
{
if(e.getID() == MouseEvent.MOUSE_PRESSED)
{
if(e.getX() >= XLEN || e.getY() >= YLEN) return;
point p = new point((double)e.getX()/XLEN,
(double)e.getY()/YLEN,
current_value);
point_list.add(p);
draw_point(p);
}
}
public void paint(Graphics g)
{
// create buffer first time
if(buffer == null) {
buffer = this.createImage(XLEN,YLEN);
buffer_gc = buffer.getGraphics();
buffer_gc.setColor(colors[0]);
buffer_gc.fillRect(0,0,XLEN,YLEN);
}
g.drawImage(buffer,0,0,this);
}
public Dimension getPreferredSize() { return new Dimension(XLEN,YLEN+50); }
public void setSize(Dimension d) { setSize(d.width,d.height); }
public void setSize(int w,int h) {
super.setSize(w,h);
XLEN = w;
YLEN = h-50;
clear_all();
}
public static void main(String[] argv)
{
new AppletFrame("svm_toy",new svm_toy(),500,500+50);
}
}
class AppletFrame extends Frame
{
AppletFrame(String title, Applet applet, int width, int height)
{
super(title);
this.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
applet.init();
applet.setSize(width,height);
applet.start();
this.add(applet);
this.pack();
this.setVisible(true);
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.utils.xml;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.InputSource;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Common XML utils
*/
public class XMLUtils {
public static Document parseDocument(String fileName)
throws XMLException {
return parseDocument(new java.io.File(fileName));
}
public static Document parseDocument(java.io.File file) throws XMLException {
try (InputStream is = new FileInputStream(file)) {
return parseDocument(new InputSource(is));
} catch (IOException e) {
throw new XMLException("Error opening file '" + file + "'", e);
}
}
public static Document parseDocument(java.io.InputStream is) throws XMLException {
return parseDocument(new InputSource(is));
}
public static Document parseDocument(java.io.Reader is) throws XMLException {
return parseDocument(new InputSource(is));
}
public static Document parseDocument(InputSource source) throws XMLException {
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder xmlBuilder = dbf.newDocumentBuilder();
return xmlBuilder.parse(source);
} catch (Exception er) {
throw new XMLException("Error parsing XML document", er);
}
}
public static Document createDocument()
throws XMLException {
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder xmlBuilder = dbf.newDocumentBuilder();
return xmlBuilder.newDocument();
} catch (Exception er) {
throw new XMLException("Error creating XML document", er);
}
}
public static Element getChildElement(Element element,
String childName) {
for (org.w3c.dom.Node node = element.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE &&
((Element) node).getTagName().equals(childName)) {
return (Element) node;
}
}
return null;
}
@Nullable
public static String getChildElementBody(Element element,
String childName) {
for (org.w3c.dom.Node node = element.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE &&
((Element) node).getTagName().equals(childName)) {
return getElementBody((Element) node);
}
}
return null;
}
@Nullable
public static String getElementBody(Element element) {
org.w3c.dom.Node valueNode = element.getFirstChild();
if (valueNode == null) {
return null;
}
if (valueNode.getNodeType() == org.w3c.dom.Node.TEXT_NODE) {
return valueNode.getNodeValue();
} else {
return null;
}
}
// Get list of all child elements of specified node
@NotNull
public static List<Element> getChildElementList(
Element parent,
String nodeName) {
List<Element> list = new ArrayList<>();
for (org.w3c.dom.Node node = parent.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE &&
nodeName.equals(node.getNodeName())) {
list.add((Element) node);
}
}
return list;
}
// Get list of all child elements of specified node
@NotNull
public static Collection<Element> getChildElementListNS(
Element parent,
String nsURI) {
List<Element> list = new ArrayList<>();
for (org.w3c.dom.Node node = parent.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE &&
node.getNamespaceURI().equals(nsURI)) {
list.add((Element) node);
}
}
return list;
}
// Get list of all child elements of specified node
public static Collection<Element> getChildElementListNS(
Element parent,
String nodeName,
String nsURI) {
List<Element> list = new ArrayList<>();
for (org.w3c.dom.Node node = parent.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE &&
node.getLocalName().equals(nodeName) &&
node.getNamespaceURI().equals(nsURI)) {
list.add((Element) node);
}
}
return list;
}
// Get list of all child elements of specified node
@NotNull
public static Collection<Element> getChildElementList(
Element parent,
String[] nodeNameList) {
List<Element> list = new ArrayList<>();
for (org.w3c.dom.Node node = parent.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) {
for (int i = 0; i < nodeNameList.length; i++) {
if (node.getNodeName().equals(nodeNameList[i])) {
list.add((Element) node);
}
}
}
}
return list;
}
// Find one child element with specified name
@Nullable
public static Element findChildElement(
Element parent) {
for (org.w3c.dom.Node node = parent.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) {
return (Element) node;
}
}
return null;
}
public static Object escapeXml(Object obj) {
if (obj == null) {
return null;
} else if (obj instanceof CharSequence) {
return escapeXml((CharSequence) obj);
} else {
return obj;
}
}
public static String escapeXml(CharSequence str) {
if (str == null) {
return null;
}
StringBuilder res = null;
int strLength = str.length();
for (int i = 0; i < strLength; i++) {
char c = str.charAt(i);
String repl = encodeXMLChar(c);
if (repl == null) {
if (res != null) {
res.append(c);
}
} else {
if (res == null) {
res = new StringBuilder(str.length() + 5);
for (int k = 0; k < i; k++) {
res.append(str.charAt(k));
}
}
res.append(repl);
}
}
return res == null ? str.toString() : res.toString();
}
public static boolean isValidXMLChar(char c) {
return (c >= 32 || c == '\n' || c == '\r' || c == '\t');
}
/**
* Encodes a char to XML-valid form replacing &,',",<,> with special XML encoding.
*
* @param ch char to convert
* @return XML-encoded text
*/
public static String encodeXMLChar(char ch) {
switch (ch) {
case '&':
return "&";
case '\"':
return """;
case '\'':
return "'";
case '<':
return "<";
case '>':
return ">";
default:
return null;
}
}
public static XMLException adaptSAXException(Exception toCatch) {
if (toCatch instanceof XMLException) {
return (XMLException) toCatch;
} else if (toCatch instanceof org.xml.sax.SAXException) {
String message = toCatch.getMessage();
Exception embedded = ((org.xml.sax.SAXException) toCatch).getException();
if (embedded != null && embedded.getMessage() != null && embedded.getMessage().equals(message)) {
// Just SAX wrapper - skip it
return adaptSAXException(embedded);
} else {
return new XMLException(
message,
embedded != null ? adaptSAXException(embedded) : null);
}
} else {
return new XMLException(toCatch.getMessage(), toCatch);
}
}
public static Collection<Element> getChildElementList(Element element) {
List<Element> children = new ArrayList<>();
for (org.w3c.dom.Node node = element.getFirstChild(); node != null; node = node.getNextSibling()) {
if (node.getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) {
children.add((Element) node);
}
}
return children;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti5.engine.impl.persistence.entity;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti5.engine.ProcessEngineConfiguration;
import org.activiti5.engine.history.HistoricProcessInstance;
import org.activiti5.engine.impl.context.Context;
import org.activiti5.engine.impl.db.BulkDeleteable;
import org.activiti5.engine.impl.identity.Authentication;
/**
* @author Tom Baeyens
* @author Christian Stettler
* @author Joram Barrez
*/
public class HistoricProcessInstanceEntity extends HistoricScopeInstanceEntity implements HistoricProcessInstance, BulkDeleteable {
private static final long serialVersionUID = 1L;
protected String endActivityId;
protected String businessKey;
protected String startUserId;
protected String startActivityId;
protected String superProcessInstanceId;
protected String tenantId = ProcessEngineConfiguration.NO_TENANT_ID;
protected String name;
protected String localizedName;
protected String description;
protected String localizedDescription;
protected List<HistoricVariableInstanceEntity> queryVariables;
public HistoricProcessInstanceEntity() {
}
public HistoricProcessInstanceEntity(ExecutionEntity processInstance) {
id = processInstance.getId();
processInstanceId = processInstance.getId();
businessKey = processInstance.getBusinessKey();
processDefinitionId = processInstance.getProcessDefinitionId();
processDefinitionKey = processInstance.getProcessDefinitionKey();
processDefinitionName = processInstance.getProcessDefinitionName();
processDefinitionVersion = processInstance.getProcessDefinitionVersion();
deploymentId = processInstance.getDeploymentId();
startTime = Context.getProcessEngineConfiguration().getClock().getCurrentTime();
startUserId = Authentication.getAuthenticatedUserId();
startActivityId = processInstance.getActivityId();
superProcessInstanceId = processInstance.getSuperExecution() != null ? processInstance.getSuperExecution().getProcessInstanceId() : null;
// Inherit tenant id (if applicable)
if (processInstance.getTenantId() != null) {
tenantId = processInstance.getTenantId();
}
}
public Object getPersistentState() {
Map<String, Object> persistentState = (Map<String, Object>) new HashMap<String, Object>();
persistentState.put("endTime", endTime);
persistentState.put("businessKey", businessKey);
persistentState.put("name", name);
persistentState.put("durationInMillis", durationInMillis);
persistentState.put("deleteReason", deleteReason);
persistentState.put("endStateName", endActivityId);
persistentState.put("superProcessInstanceId", superProcessInstanceId);
persistentState.put("processDefinitionId", processDefinitionId);
persistentState.put("processDefinitionKey", processDefinitionKey);
persistentState.put("processDefinitionName", processDefinitionName);
persistentState.put("processDefinitionVersion", processDefinitionVersion);
persistentState.put("deploymentId", deploymentId);
return persistentState;
}
// getters and setters //////////////////////////////////////////////////////
public String getEndActivityId() {
return endActivityId;
}
public void setEndActivityId(String endActivityId) {
this.endActivityId = endActivityId;
}
public String getBusinessKey() {
return businessKey;
}
public void setBusinessKey(String businessKey) {
this.businessKey = businessKey;
}
public String getStartUserId() {
return startUserId;
}
public void setStartUserId(String startUserId) {
this.startUserId = startUserId;
}
public String getStartActivityId() {
return startActivityId;
}
public void setStartActivityId(String startUserId) {
this.startActivityId = startUserId;
}
public String getSuperProcessInstanceId() {
return superProcessInstanceId;
}
public void setSuperProcessInstanceId(String superProcessInstanceId) {
this.superProcessInstanceId = superProcessInstanceId;
}
public String getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
public String getName() {
if (localizedName != null && localizedName.length() > 0) {
return localizedName;
} else {
return name;
}
}
public void setName(String name) {
this.name = name;
}
public String getLocalizedName() {
return localizedName;
}
public void setLocalizedName(String localizedName) {
this.localizedName = localizedName;
}
public String getDescription() {
if (localizedDescription != null && localizedDescription.length() > 0) {
return localizedDescription;
} else {
return description;
}
}
public void setDescription(String description) {
this.description = description;
}
public String getLocalizedDescription() {
return localizedDescription;
}
public void setLocalizedDescription(String localizedDescription) {
this.localizedDescription = localizedDescription;
}
public Map<String, Object> getProcessVariables() {
Map<String, Object> variables = new HashMap<String, Object>();
if (queryVariables != null) {
for (HistoricVariableInstanceEntity variableInstance: queryVariables) {
if (variableInstance.getId() != null && variableInstance.getTaskId() == null) {
variables.put(variableInstance.getName(), variableInstance.getValue());
}
}
}
return variables;
}
public List<HistoricVariableInstanceEntity> getQueryVariables() {
if(queryVariables == null && Context.getCommandContext() != null) {
queryVariables = new HistoricVariableInitializingList();
}
return queryVariables;
}
public void setQueryVariables(List<HistoricVariableInstanceEntity> queryVariables) {
this.queryVariables = queryVariables;
}
// common methods //////////////////////////////////////////////////////////
@Override
public String toString() {
return "HistoricProcessInstanceEntity[superProcessInstanceId=" + superProcessInstanceId + "]";
}
}
| |
package com.github.mikephil.charting.renderer;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Typeface;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.data.BarDataSet;
import com.github.mikephil.charting.data.ChartData;
import com.github.mikephil.charting.data.DataSet;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.PieDataSet;
import com.github.mikephil.charting.utils.Utils;
import com.github.mikephil.charting.utils.ViewPortHandler;
import java.util.ArrayList;
import java.util.List;
public class LegendRenderer extends Renderer {
/** paint for the legend labels */
protected Paint mLegendLabelPaint;
/** paint used for the legend forms */
protected Paint mLegendFormPaint;
/** the legend object this renderer renders */
protected Legend mLegend;
public LegendRenderer(ViewPortHandler viewPortHandler, Legend legend) {
super(viewPortHandler);
this.mLegend = legend;
mLegendLabelPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mLegendLabelPaint.setTextSize(Utils.convertDpToPixel(9f));
mLegendLabelPaint.setTextAlign(Align.LEFT);
mLegendFormPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mLegendFormPaint.setStyle(Paint.Style.FILL);
mLegendFormPaint.setStrokeWidth(3f);
}
/**
* Returns the Paint object used for drawing the Legend labels.
*
* @return
*/
public Paint getLabelPaint() {
return mLegendLabelPaint;
}
/**
* Returns the Paint object used for drawing the Legend forms.
*
* @return
*/
public Paint getFormPaint() {
return mLegendFormPaint;
}
/**
* Prepares the legend and calculates all needed forms, labels and colors.
*
* @param data
*/
public void computeLegend(ChartData<?> data) {
List<String> labels = new ArrayList<String>();
List<Integer> colors = new ArrayList<Integer>();
// loop for building up the colors and labels used in the legend
for (int i = 0; i < data.getDataSetCount(); i++) {
DataSet<? extends Entry> dataSet = data.getDataSetByIndex(i);
List<Integer> clrs = dataSet.getColors();
int entryCount = dataSet.getEntryCount();
// if we have a barchart with stacked bars
if (dataSet instanceof BarDataSet && ((BarDataSet) dataSet).isStacked()) {
BarDataSet bds = (BarDataSet) dataSet;
String[] sLabels = bds.getStackLabels();
for (int j = 0; j < clrs.size() && j < bds.getStackSize(); j++) {
labels.add(sLabels[j % sLabels.length]);
colors.add(clrs.get(j));
}
// add the legend description label
colors.add(-2);
labels.add(bds.getLabel());
} else if (dataSet instanceof PieDataSet) {
List<String> xVals = data.getXVals();
PieDataSet pds = (PieDataSet) dataSet;
for (int j = 0; j < clrs.size() && j < entryCount && j < xVals.size(); j++) {
labels.add(xVals.get(j));
colors.add(clrs.get(j));
}
// add the legend description label
colors.add(-2);
labels.add(pds.getLabel());
} else { // all others
for (int j = 0; j < clrs.size() && j < entryCount; j++) {
// if multiple colors are set for a DataSet, group them
if (j < clrs.size() - 1 && j < entryCount - 1) {
labels.add(null);
} else { // add label to the last entry
String label = data.getDataSetByIndex(i).getLabel();
labels.add(label);
}
colors.add(clrs.get(j));
}
}
}
mLegend.setColors(colors);
mLegend.setLabels(labels);
Typeface tf = mLegend.getTypeface();
if (tf != null)
mLegendLabelPaint.setTypeface(tf);
mLegendLabelPaint.setTextSize(mLegend.getTextSize());
mLegendLabelPaint.setColor(mLegend.getTextColor());
// calculate all dimensions of the mLegend
mLegend.calculateDimensions(mLegendLabelPaint);
}
public void renderLegend(Canvas c) {
if (!mLegend.isEnabled())
return;
Typeface tf = mLegend.getTypeface();
if (tf != null)
mLegendLabelPaint.setTypeface(tf);
mLegendLabelPaint.setTextSize(mLegend.getTextSize());
mLegendLabelPaint.setColor(mLegend.getTextColor());
String[] labels = mLegend.getLegendLabels();
int[] colors = mLegend.getColors();
float formToTextSpace = mLegend.getFormToTextSpace();
float xEntrySpace = mLegend.getXEntrySpace();
Legend.LegendDirection direction = mLegend.getDirection();
float formSize = mLegend.getFormSize();
// space between the entries
float stackSpace = mLegend.getStackSpace();
// the amount of pixels the text needs to be set down to be on the same
// height as the form
float textDrop = (Utils.calcTextHeight(mLegendLabelPaint, "AQJ") + formSize) / 2f;
float posX, posY;
// contains the stacked legend size in pixels
float stack = 0f;
boolean wasStacked = false;
float yoffset = mLegend.getYOffset();
float xoffset = mLegend.getXOffset();
switch (mLegend.getPosition()) {
case BELOW_CHART_LEFT:
posX = mViewPortHandler.contentLeft() + xoffset;
posY = mViewPortHandler.getChartHeight() - yoffset;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT) {
posX += mLegend.mNeededWidth;
}
for (int i = 0, count = labels.length; i < count; i++) {
boolean drawingForm = colors[i] != -2;
if (drawingForm) {
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
posX -= formSize;
drawForm(c, posX, posY - mLegend.mTextHeightMax / 2f, i, mLegend);
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
posX += formSize;
}
// grouped forms have null labels
if (labels[i] != null) {
// spacing between form and label
if (drawingForm)
posX += direction == Legend.LegendDirection.RIGHT_TO_LEFT ?
-formToTextSpace :
formToTextSpace;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
posX -= Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
drawLabel(c, posX, posY, labels[i]);
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
posX += Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
posX += direction == Legend.LegendDirection.RIGHT_TO_LEFT ?
-xEntrySpace :
xEntrySpace;
} else {
posX += direction == Legend.LegendDirection.RIGHT_TO_LEFT ? -stackSpace : stackSpace;
}
}
break;
case BELOW_CHART_RIGHT:
posX = mViewPortHandler.contentRight() - xoffset;
posY = mViewPortHandler.getChartHeight() - yoffset;
for (int i = 0, count = labels.length; i < count; i++) {
boolean drawingForm = colors[i] != -2;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT && drawingForm) {
posX -= formSize;
drawForm(c, posX, posY - mLegend.mTextHeightMax / 2f, i, mLegend);
posX -= formToTextSpace;
}
if (labels[i] != null) {
posX -= Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
drawLabel(c, posX, posY, labels[i]);
}
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT && drawingForm) {
posX -= formToTextSpace + formSize;
drawForm(c, posX, posY - mLegend.mTextHeightMax / 2f, i, mLegend);
}
posX -= labels[i] != null ? xEntrySpace : stackSpace;
}
break;
case BELOW_CHART_CENTER:
posX = mViewPortHandler.getChartWidth() / 2f + (direction == Legend.LegendDirection.LEFT_TO_RIGHT ? -mLegend.mNeededWidth / 2f : mLegend.mNeededWidth / 2f);
posY = mViewPortHandler.getChartHeight() - yoffset;
for (int i = 0; i < labels.length; i++) {
boolean drawingForm = colors[i] != -2;
if (drawingForm) {
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
posX -= formSize;
drawForm(c, posX, posY - mLegend.mTextHeightMax / 2f, i, mLegend);
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
posX += formSize;
}
// grouped forms have null labels
if (labels[i] != null) {
// spacing between form and label
if (drawingForm)
posX += direction == Legend.LegendDirection.RIGHT_TO_LEFT ?
-formToTextSpace :
formToTextSpace;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
posX -= Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
drawLabel(c, posX, posY, labels[i]);
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
posX += Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
posX += direction == Legend.LegendDirection.RIGHT_TO_LEFT ?
-xEntrySpace :
xEntrySpace;
} else {
posX += direction == Legend.LegendDirection.RIGHT_TO_LEFT ? -stackSpace : stackSpace;
}
}
break;
case PIECHART_CENTER:
posX = mViewPortHandler.getChartWidth() / 2f + (direction == Legend.LegendDirection.LEFT_TO_RIGHT ? -mLegend.mTextWidthMax / 2f : mLegend.mTextWidthMax / 2f);
posY = mViewPortHandler.getChartHeight() / 2f - mLegend.mNeededHeight / 2f + mLegend.getYOffset();
for (int i = 0; i < labels.length; i++) {
boolean drawingForm = colors[i] != -2;
float x = posX;
if (drawingForm) {
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
x += stack;
else
x -= formSize - stack;
drawForm(c, x, posY, i, mLegend);
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
x += formSize;
}
if (labels[i] != null) {
if (drawingForm && !wasStacked)
x += direction == Legend.LegendDirection.LEFT_TO_RIGHT ? formToTextSpace : -formToTextSpace;
else if (wasStacked)
x = posX;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
x -= Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
if (!wasStacked) {
drawLabel(c, x, posY + mLegend.mTextHeightMax / 2f, mLegend.getLabel(i));
posY += textDrop;
} else {
posY += mLegend.mTextHeightMax * 3f;
drawLabel(c, x, posY - mLegend.mTextHeightMax, mLegend.getLabel(i));
}
// make a step down
posY += mLegend.getYEntrySpace();
stack = 0f;
} else {
stack += formSize + stackSpace;
wasStacked = true;
}
}
break;
case RIGHT_OF_CHART:
case RIGHT_OF_CHART_CENTER:
case RIGHT_OF_CHART_INSIDE:
case LEFT_OF_CHART:
case LEFT_OF_CHART_CENTER:
case LEFT_OF_CHART_INSIDE:
boolean isRightAligned = mLegend.getPosition() == Legend.LegendPosition.RIGHT_OF_CHART ||
mLegend.getPosition() == Legend.LegendPosition.RIGHT_OF_CHART_CENTER ||
mLegend.getPosition() == Legend.LegendPosition.RIGHT_OF_CHART_INSIDE;
if (isRightAligned) {
posX = mViewPortHandler.getChartWidth() - xoffset;
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
posX -= mLegend.mTextWidthMax;
} else {
posX = xoffset;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
posX += mLegend.mTextWidthMax;
}
if (mLegend.getPosition() == Legend.LegendPosition.RIGHT_OF_CHART ||
mLegend.getPosition() == Legend.LegendPosition.LEFT_OF_CHART) {
posY = mViewPortHandler.contentTop() + yoffset;
} else if (mLegend.getPosition() == Legend.LegendPosition.RIGHT_OF_CHART_CENTER ||
mLegend.getPosition() == Legend.LegendPosition.LEFT_OF_CHART_CENTER) {
posY = mViewPortHandler.getChartHeight() / 2f - mLegend.mNeededHeight / 2f;
} else /*if (mLegend.getPosition() == Legend.LegendPosition.RIGHT_OF_CHART_INSIDE ||
mLegend.getPosition() == Legend.LegendPosition.LEFT_OF_CHART_INSIDE)*/ {
posY = mViewPortHandler.contentTop() + yoffset;
}
for (int i = 0; i < labels.length; i++) {
Boolean drawingForm = colors[i] != -2;
float x = posX;
if (drawingForm) {
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
x += stack;
else
x -= formSize - stack;
drawForm(c, x, posY, i, mLegend);
if (direction == Legend.LegendDirection.LEFT_TO_RIGHT)
x += formSize;
}
if (labels[i] != null) {
if (drawingForm && !wasStacked)
x += direction == Legend.LegendDirection.LEFT_TO_RIGHT ? formToTextSpace : -formToTextSpace;
else if (wasStacked)
x = posX;
if (direction == Legend.LegendDirection.RIGHT_TO_LEFT)
x -= Utils.calcTextWidth(mLegendLabelPaint, labels[i]);
if (!wasStacked) {
drawLabel(c, x, posY + mLegend.mTextHeightMax / 2f, mLegend.getLabel(i));
posY += textDrop;
} else {
posY += mLegend.mTextHeightMax * 3f;
drawLabel(c, x, posY - mLegend.mTextHeightMax, mLegend.getLabel(i));
}
// make a step down
posY += mLegend.getYEntrySpace();
stack = 0f;
} else {
stack += formSize + stackSpace;
wasStacked = true;
}
}
break;
}
}
/**
* Draws the Legend-form at the given position with the color at the given
* index.
*
* @param c canvas to draw with
* @param x
* @param y
* @param index the index of the color to use (in the colors array)
*/
protected void drawForm(Canvas c, float x, float y, int index, Legend legend) {
if (legend.getColors()[index] == -2)
return;
mLegendFormPaint.setColor(legend.getColors()[index]);
float formsize = legend.getFormSize();
float half = formsize / 2f;
switch (legend.getForm()) {
case CIRCLE:
c.drawCircle(x + half, y, half, mLegendFormPaint);
break;
case SQUARE:
c.drawRect(x, y - half, x + formsize, y + half, mLegendFormPaint);
break;
case LINE:
c.drawLine(x, y, x + formsize, y, mLegendFormPaint);
break;
}
}
/**
* Draws the provided label at the given position.
*
* @param c canvas to draw with
* @param x
* @param y
* @param label the label to draw
*/
protected void drawLabel(Canvas c, float x, float y, String label) {
c.drawText(label, x, y, mLegendLabelPaint);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement;
import com.amazonaws.services.simplesystemsmanagement.model.*;
/**
* Interface for accessing Amazon SSM asynchronously. Each asynchronous method
* will return a Java Future object representing the asynchronous operation;
* overloads which accept an {@code AsyncHandler} can be used to receive
* notification when an asynchronous operation completes.
* <p>
* <p>
* Amazon EC2 Simple Systems Manager (SSM) enables you to configure and manage
* your EC2 instances. You can create a configuration document and then
* associate it with one or more running instances.
* </p>
* <p>
* You can use a configuration document to automate the following tasks for your
* Windows instances:
* </p>
* <ul>
* <li>
* <p>
* Join an AWS Directory
* </p>
* </li>
* <li>
* <p>
* Install, repair, or uninstall software using an MSI package
* </p>
* </li>
* <li>
* <p>
* Run PowerShell scripts
* </p>
* </li>
* <li>
* <p>
* Configure CloudWatch Logs to monitor applications and systems
* </p>
* </li>
* </ul>
* <p>
* Note that configuration documents are not supported on Linux instances.
* </p>
*/
public class AWSSimpleSystemsManagementAsyncClient extends
AWSSimpleSystemsManagementClient implements
AWSSimpleSystemsManagementAsync {
private static final int DEFAULT_THREAD_POOL_SIZE = 50;
private final java.util.concurrent.ExecutorService executorService;
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM. A credentials provider chain will be used that searches for
* credentials in this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Credential profiles file at the default location (~/.aws/credentials)
* shared by all AWS SDKs and the AWS CLI</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSSimpleSystemsManagementAsyncClient() {
this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain());
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM. A credentials provider chain will be used that searches for
* credentials in this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Credential profiles file at the default location (~/.aws/credentials)
* shared by all AWS SDKs and the AWS CLI</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* a number of threads equal to the maximum number of concurrent connections
* configured via {@code ClientConfiguration.getMaxConnections()}.
*
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to Amazon SSM (ex: proxy settings, retry counts, etc).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.ClientConfiguration clientConfiguration) {
this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain(),
clientConfiguration, java.util.concurrent.Executors
.newFixedThreadPool(clientConfiguration
.getMaxConnections()));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the specified AWS account credentials.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials) {
this(awsCredentials, java.util.concurrent.Executors
.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the specified AWS account credentials and executor service.
* Default client settings will be used.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials,
java.util.concurrent.ExecutorService executorService) {
this(awsCredentials, new com.amazonaws.ClientConfiguration(),
executorService);
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the specified AWS account credentials, executor service, and
* client configuration options.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials,
com.amazonaws.ClientConfiguration clientConfiguration,
java.util.concurrent.ExecutorService executorService) {
super(awsCredentials, clientConfiguration);
this.executorService = executorService;
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the specified AWS account credentials provider. Default client
* settings will be used.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider) {
this(awsCredentialsProvider, java.util.concurrent.Executors
.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the provided AWS account credentials provider and client
* configuration options.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* a number of threads equal to the maximum number of concurrent connections
* configured via {@code ClientConfiguration.getMaxConnections()}.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
com.amazonaws.ClientConfiguration clientConfiguration) {
this(awsCredentialsProvider, clientConfiguration,
java.util.concurrent.Executors
.newFixedThreadPool(clientConfiguration
.getMaxConnections()));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the specified AWS account credentials provider and executor
* service. Default client settings will be used.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
java.util.concurrent.ExecutorService executorService) {
this(awsCredentialsProvider, new com.amazonaws.ClientConfiguration(),
executorService);
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* SSM using the specified AWS account credentials provider, executor
* service, and client configuration options.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSSimpleSystemsManagementAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
com.amazonaws.ClientConfiguration clientConfiguration,
java.util.concurrent.ExecutorService executorService) {
super(awsCredentialsProvider, clientConfiguration);
this.executorService = executorService;
}
/**
* Returns the executor service used by this client to execute async
* requests.
*
* @return The executor service used by this client to execute async
* requests.
*/
public java.util.concurrent.ExecutorService getExecutorService() {
return executorService;
}
@Override
public java.util.concurrent.Future<CreateAssociationResult> createAssociationAsync(
CreateAssociationRequest request) {
return createAssociationAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateAssociationResult> createAssociationAsync(
final CreateAssociationRequest request,
final com.amazonaws.handlers.AsyncHandler<CreateAssociationRequest, CreateAssociationResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<CreateAssociationResult>() {
@Override
public CreateAssociationResult call() throws Exception {
CreateAssociationResult result;
try {
result = createAssociation(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<CreateAssociationBatchResult> createAssociationBatchAsync(
CreateAssociationBatchRequest request) {
return createAssociationBatchAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateAssociationBatchResult> createAssociationBatchAsync(
final CreateAssociationBatchRequest request,
final com.amazonaws.handlers.AsyncHandler<CreateAssociationBatchRequest, CreateAssociationBatchResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<CreateAssociationBatchResult>() {
@Override
public CreateAssociationBatchResult call() throws Exception {
CreateAssociationBatchResult result;
try {
result = createAssociationBatch(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<CreateDocumentResult> createDocumentAsync(
CreateDocumentRequest request) {
return createDocumentAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateDocumentResult> createDocumentAsync(
final CreateDocumentRequest request,
final com.amazonaws.handlers.AsyncHandler<CreateDocumentRequest, CreateDocumentResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<CreateDocumentResult>() {
@Override
public CreateDocumentResult call() throws Exception {
CreateDocumentResult result;
try {
result = createDocument(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DeleteAssociationResult> deleteAssociationAsync(
DeleteAssociationRequest request) {
return deleteAssociationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteAssociationResult> deleteAssociationAsync(
final DeleteAssociationRequest request,
final com.amazonaws.handlers.AsyncHandler<DeleteAssociationRequest, DeleteAssociationResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DeleteAssociationResult>() {
@Override
public DeleteAssociationResult call() throws Exception {
DeleteAssociationResult result;
try {
result = deleteAssociation(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DeleteDocumentResult> deleteDocumentAsync(
DeleteDocumentRequest request) {
return deleteDocumentAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDocumentResult> deleteDocumentAsync(
final DeleteDocumentRequest request,
final com.amazonaws.handlers.AsyncHandler<DeleteDocumentRequest, DeleteDocumentResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DeleteDocumentResult>() {
@Override
public DeleteDocumentResult call() throws Exception {
DeleteDocumentResult result;
try {
result = deleteDocument(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DescribeAssociationResult> describeAssociationAsync(
DescribeAssociationRequest request) {
return describeAssociationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeAssociationResult> describeAssociationAsync(
final DescribeAssociationRequest request,
final com.amazonaws.handlers.AsyncHandler<DescribeAssociationRequest, DescribeAssociationResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DescribeAssociationResult>() {
@Override
public DescribeAssociationResult call() throws Exception {
DescribeAssociationResult result;
try {
result = describeAssociation(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DescribeDocumentResult> describeDocumentAsync(
DescribeDocumentRequest request) {
return describeDocumentAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDocumentResult> describeDocumentAsync(
final DescribeDocumentRequest request,
final com.amazonaws.handlers.AsyncHandler<DescribeDocumentRequest, DescribeDocumentResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DescribeDocumentResult>() {
@Override
public DescribeDocumentResult call() throws Exception {
DescribeDocumentResult result;
try {
result = describeDocument(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetDocumentResult> getDocumentAsync(
GetDocumentRequest request) {
return getDocumentAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDocumentResult> getDocumentAsync(
final GetDocumentRequest request,
final com.amazonaws.handlers.AsyncHandler<GetDocumentRequest, GetDocumentResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetDocumentResult>() {
@Override
public GetDocumentResult call() throws Exception {
GetDocumentResult result;
try {
result = getDocument(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListAssociationsResult> listAssociationsAsync(
ListAssociationsRequest request) {
return listAssociationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListAssociationsResult> listAssociationsAsync(
final ListAssociationsRequest request,
final com.amazonaws.handlers.AsyncHandler<ListAssociationsRequest, ListAssociationsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<ListAssociationsResult>() {
@Override
public ListAssociationsResult call() throws Exception {
ListAssociationsResult result;
try {
result = listAssociations(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListDocumentsResult> listDocumentsAsync(
ListDocumentsRequest request) {
return listDocumentsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListDocumentsResult> listDocumentsAsync(
final ListDocumentsRequest request,
final com.amazonaws.handlers.AsyncHandler<ListDocumentsRequest, ListDocumentsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<ListDocumentsResult>() {
@Override
public ListDocumentsResult call() throws Exception {
ListDocumentsResult result;
try {
result = listDocuments(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
/**
* Simplified method form for invoking the ListDocuments operation.
*
* @see #listDocumentsAsync(ListDocumentsRequest)
*/
@Override
public java.util.concurrent.Future<ListDocumentsResult> listDocumentsAsync() {
return listDocumentsAsync(new ListDocumentsRequest());
}
/**
* Simplified method form for invoking the ListDocuments operation with an
* AsyncHandler.
*
* @see #listDocumentsAsync(ListDocumentsRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
public java.util.concurrent.Future<ListDocumentsResult> listDocumentsAsync(
com.amazonaws.handlers.AsyncHandler<ListDocumentsRequest, ListDocumentsResult> asyncHandler) {
return listDocumentsAsync(new ListDocumentsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<UpdateAssociationStatusResult> updateAssociationStatusAsync(
UpdateAssociationStatusRequest request) {
return updateAssociationStatusAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateAssociationStatusResult> updateAssociationStatusAsync(
final UpdateAssociationStatusRequest request,
final com.amazonaws.handlers.AsyncHandler<UpdateAssociationStatusRequest, UpdateAssociationStatusResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<UpdateAssociationStatusResult>() {
@Override
public UpdateAssociationStatusResult call()
throws Exception {
UpdateAssociationStatusResult result;
try {
result = updateAssociationStatus(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
/**
* Shuts down the client, releasing all managed resources. This includes
* forcibly terminating all pending asynchronous service calls. Clients who
* wish to give pending asynchronous service calls time to complete should
* call {@code getExecutorService().shutdown()} followed by
* {@code getExecutorService().awaitTermination()} prior to calling this
* method.
*/
@Override
public void shutdown() {
super.shutdown();
executorService.shutdownNow();
}
}
| |
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package com.umeng.analytics.social;
import android.text.TextUtils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.zip.GZIPInputStream;
import java.util.zip.InflaterInputStream;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
// Referenced classes of package com.umeng.analytics.social:
// b
public abstract class c
{
public c()
{
}
private static String a(InputStream inputstream)
{
BufferedReader bufferedreader;
StringBuilder stringbuilder;
bufferedreader = new BufferedReader(new InputStreamReader(inputstream), 8192);
stringbuilder = new StringBuilder();
_L1:
String s = bufferedreader.readLine();
if (s == null)
{
Exception exception;
IOException ioexception;
IOException ioexception1;
IOException ioexception2;
try
{
inputstream.close();
}
catch (IOException ioexception3)
{
b.b("MobclickAgent", "Caught IOException in convertStreamToString()", ioexception3);
return null;
}
return stringbuilder.toString();
}
stringbuilder.append((new StringBuilder(String.valueOf(s))).append("\n").toString());
goto _L1
ioexception1;
b.b("MobclickAgent", "Caught IOException in convertStreamToString()", ioexception1);
try
{
inputstream.close();
}
// Misplaced declaration of an exception variable
catch (IOException ioexception2)
{
b.b("MobclickAgent", "Caught IOException in convertStreamToString()", ioexception2);
return null;
}
return null;
exception;
try
{
inputstream.close();
}
// Misplaced declaration of an exception variable
catch (IOException ioexception)
{
b.b("MobclickAgent", "Caught IOException in convertStreamToString()", ioexception);
return null;
}
throw exception;
}
protected static String a(String s)
{
int i = (new Random()).nextInt(1000);
String s1;
s1 = System.getProperty("line.separator");
if (s.length() > 1)
{
break MISSING_BLOCK_LABEL_55;
}
b.b("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tInvalid baseUrl.").toString());
return null;
HttpResponse httpresponse;
HttpGet httpget = new HttpGet(s);
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(": GET_URL: ").append(s).toString());
BasicHttpParams basichttpparams = new BasicHttpParams();
HttpConnectionParams.setConnectionTimeout(basichttpparams, 10000);
HttpConnectionParams.setSoTimeout(basichttpparams, 20000);
httpresponse = (new DefaultHttpClient(basichttpparams)).execute(httpget);
if (httpresponse.getStatusLine().getStatusCode() != 200) goto _L2; else goto _L1
_L1:
HttpEntity httpentity = httpresponse.getEntity();
InputStream inputstream;
Object obj;
if (httpentity == null)
{
break MISSING_BLOCK_LABEL_451;
}
Header header;
String s2;
try
{
inputstream = httpentity.getContent();
header = httpresponse.getFirstHeader("Content-Encoding");
}
catch (ClientProtocolException clientprotocolexception)
{
b.c("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tClientProtocolException,Failed to send message.").append(s).toString(), clientprotocolexception);
return null;
}
catch (Exception exception)
{
b.c("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tIOException,Failed to send message.").append(s).toString(), exception);
return null;
}
if (header == null) goto _L4; else goto _L3
_L3:
if (!header.getValue().equalsIgnoreCase("gzip")) goto _L4; else goto _L5
_L5:
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(" Use GZIPInputStream get data....").toString());
obj = new GZIPInputStream(inputstream);
_L6:
s2 = a(((InputStream) (obj)));
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tresponse: ").append(s1).append(s2).toString());
if (s2 != null)
{
return s2;
}
break MISSING_BLOCK_LABEL_451;
_L4:
if (header == null)
{
break MISSING_BLOCK_LABEL_444;
}
if (!header.getValue().equalsIgnoreCase("deflate"))
{
break MISSING_BLOCK_LABEL_444;
}
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(" Use InflaterInputStream get data....").toString());
obj = new InflaterInputStream(inputstream);
goto _L6
_L2:
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tFailed to get message.").append(s).toString());
return null;
obj = inputstream;
goto _L6
return null;
}
protected static String a(String s, String s1)
{
int i;
String s2;
DefaultHttpClient defaulthttpclient;
i = (new Random()).nextInt(1000);
s2 = System.getProperty("line.separator");
BasicHttpParams basichttpparams = new BasicHttpParams();
HttpConnectionParams.setConnectionTimeout(basichttpparams, 10000);
HttpConnectionParams.setSoTimeout(basichttpparams, 20000);
defaulthttpclient = new DefaultHttpClient(basichttpparams);
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(": POST_URL: ").append(s).toString());
HttpResponse httpresponse;
HttpPost httppost = new HttpPost(s);
if (!TextUtils.isEmpty(s1))
{
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(": POST_BODY: ").append(s1).toString());
ArrayList arraylist = new ArrayList(1);
arraylist.add(new BasicNameValuePair("data", s1));
httppost.setEntity(new UrlEncodedFormEntity(arraylist, "UTF-8"));
}
httpresponse = defaulthttpclient.execute(httppost);
if (httpresponse.getStatusLine().getStatusCode() != 200) goto _L2; else goto _L1
_L1:
HttpEntity httpentity = httpresponse.getEntity();
InputStream inputstream;
Object obj;
String s3;
if (httpentity == null)
{
break MISSING_BLOCK_LABEL_419;
}
Header header;
try
{
inputstream = httpentity.getContent();
header = httpresponse.getFirstHeader("Content-Encoding");
}
catch (ClientProtocolException clientprotocolexception)
{
b.c("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tClientProtocolException,Failed to send message.").append(s).toString(), clientprotocolexception);
return null;
}
catch (IOException ioexception)
{
b.c("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tIOException,Failed to send message.").append(s).toString(), ioexception);
return null;
}
if (header == null)
{
break MISSING_BLOCK_LABEL_412;
}
if (!header.getValue().equalsIgnoreCase("deflate"))
{
break MISSING_BLOCK_LABEL_412;
}
obj = new InflaterInputStream(inputstream);
_L3:
s3 = a(((InputStream) (obj)));
b.a("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tresponse: ").append(s2).append(s3).toString());
if (s3 == null)
{
return null;
} else
{
return s3;
}
_L2:
b.c("MobclickAgent", (new StringBuilder(String.valueOf(i))).append(":\tFailed to send message.").append(s).toString());
return null;
obj = inputstream;
goto _L3
return null;
}
}
| |
/*
*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
package net.bither.languages;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import net.bither.utils.LocaliserUtils;
import java.awt.*;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.Locale;
import java.util.ResourceBundle;
/**
* <p>Utility to provide the following to Views:</p>
* <ul>
* <li>Access to internationalised text strings</li>
* </ul>
*
* @since 0.0.1
*/
public class Languages {
public static final String BASE_NAME = "viewer";
/**
* Utilities have private constructors
*/
private Languages() {
}
/**
* <p>Provide an array of the available amount separators. A hard space '\u00a0' is needed to
* ensure that values do not wrap.</p>
*
* @return The array
*/
public static String[] getCurrencySeparators(boolean forGrouping) {
if (forGrouping) {
// Groups can be separated by comma, point and space
return new String[]{
Languages.safeText(MessageKey.DECIMAL_COMMA),
Languages.safeText(MessageKey.DECIMAL_POINT),
Languages.safeText(MessageKey.DECIMAL_SPACE)
};
} else {
// Decimals can be separated by comma and point only
return new String[]{
Languages.safeText(MessageKey.DECIMAL_COMMA),
Languages.safeText(MessageKey.DECIMAL_POINT)
};
}
}
/**
* @return Current locale from configuration
*/
public static Locale currentLocale() {
return LocaliserUtils.getLocale();
}
/**
* @param value The encoding of the locale (e.g. "ll", "ll_rr", "ll_rr_vv")
* @return A new resource bundle based on the locale
*/
public static Locale newLocaleFromCode(String value) {
Preconditions.checkNotNull(value, "'value' must be present");
String[] parameters = value.split("_");
Preconditions.checkState(parameters.length > 0, "'value' must not be empty");
final Locale newLocale;
switch (parameters.length) {
case 1:
newLocale = new Locale(parameters[0]);
break;
case 2:
newLocale = new Locale(parameters[0], parameters[1]);
break;
case 3:
newLocale = new Locale(parameters[0], parameters[1], parameters[2]);
break;
default:
throw new IllegalArgumentException("Unknown locale descriptor: " + value);
}
return newLocale;
}
/**
* @param key The key (treated as a direct format string if not present)
* @param values An optional collection of value substitutions for {@link java.text.MessageFormat}
* @return The localised text with any substitutions made
*/
public static String safeText(MessageKey key, Object... values) {
// Simplifies processing of empty text
if (key == null) {
return "";
}
ResourceBundle rb = currentResourceBundle();
final String message;
if (!rb.containsKey(key.getKey())) {
// If no key is present then use it direct
message = key.getKey();
} else {
// Must have the key to be here
message = rb.getString(key.getKey());
}
return MessageFormat.format(message, values);
}
/**
* @param key The key (must be present in the bundle)
* @param values An optional collection of value substitutions for {@link java.text.MessageFormat}
* @return The localised text with any substitutions made
*/
public static String safeText(String key, Object... values) {
ResourceBundle rb = currentResourceBundle();
final String message;
if (!rb.containsKey(key)) {
// If no key is present then use it direct
message = "Key '" + key + "' is not localised!";
} else {
// Must have the key to be here
message = rb.getString(key);
}
return MessageFormat.format(message, values);
}
/**
* @param contents The contents to join into a localised comma-separated list
* @param maxLength The maximum length of the result single string
* @return The localised comma-separated list with ellipsis appended if truncated
*/
public static String truncatedList(Collection<String> contents, int maxLength) {
Preconditions.checkNotNull("contents", "'contents' must be present");
Preconditions.checkState(maxLength > 0 && maxLength < 4096, "'maxLength' must be [1,4096]");
String joinedContents = Joiner
.on(Languages.safeText(MessageKey.LIST_COMMA) + " ")
.join(contents);
String ellipsis = Languages.safeText(MessageKey.LIST_ELLIPSIS);
// Determine the truncation point (if required)
int maxIndex = Math.min(joinedContents.length() - 1, maxLength - ellipsis.length() - 1);
if (maxIndex == joinedContents.length() - 1) {
// No truncation
return joinedContents;
} else {
// Apply truncation (with ellipsis)
return joinedContents.substring(0, maxIndex) + ellipsis;
}
}
/**
* <p>Package access only - external consumers should use safeText()</p>
*
* @return The resource bundle based on the current locale
*/
static ResourceBundle currentResourceBundle() {
return ResourceBundle.getBundle(BASE_NAME, currentLocale());
}
/**
* @return The component orientation based on the current locale
*/
public static ComponentOrientation currentComponentOrientation() {
return ComponentOrientation.getOrientation(Languages.currentLocale());
}
/**
* @return True if text is to be placed left to right (standard Western language presentation)
*/
public static boolean isLeftToRight() {
return ComponentOrientation.getOrientation(currentLocale()).isLeftToRight();
}
}
| |
package com.packt.sfjd.ch4;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaDoubleRDD;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.sql.SparkSession;
import org.spark_project.guava.collect.Lists;
import scala.Tuple2;
public class ActionsExamplesOld implements Serializable{
/**
*
*/
private static final long serialVersionUID = 1L;
public static void main(String[] args) {
System.setProperty("hadoop.home.dir", "C:\\Users\\sumit.kumar\\Downloads");
String logFile = "src/main/resources/numSeries.txt"; // Should be some file on your system
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.WARN);
/* SparkSession spark = SparkSession
.builder().master("local")
.appName("JavaPageRank")
.config("spark.sql.warehouse.dir", "file:///C:/Users/sumit.kumar/Downloads/bin/warehouse")
.getOrCreate();
*/
SparkConf conf = new SparkConf().setMaster("local").setAppName("ApacheSparkForJavaDevelopers");
// SparkContext context =new SparkContext(conf);
// RDD<String> textFile = context.textFile("abc", 1);
JavaSparkContext spark = new JavaSparkContext(conf);
JavaRDD<String> lines = spark.textFile(logFile);
//JavaRDD<String> lines = spark.textFile(logFile).toJavaRDD().cache();
JavaDoubleRDD intMap= lines.mapToDouble(a-> Integer.parseInt(a)).cache();
JavaPairRDD<Double,Double> intDivMap= intMap.mapToPair(new PairFunction<Double, Double, Double>() {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public Tuple2<Double, Double> call(Double t) throws Exception {
return new Tuple2<Double,Double>(t, t%2);
}
});
// isEmpty
JavaRDD<Integer> intRDD = spark.parallelize(Arrays.asList(1,2,3));
boolean isRDDEmpty= intRDD.filter(a-> a.equals(5)).isEmpty();
System.out.println("The RDD is empty ::"+isRDDEmpty);
//Collect
List<String> collectedList= lines.collect();
//count()
long countVal=lines.count();
//CountByKey:
Map<Double, Long> countByKeyMap= intDivMap.countByKey();
countByKeyMap.forEach(new BiConsumer<Double, Long>() {
@Override
public void accept( Double L, Long U ) {
System.out.println("The key val is 1 ::"+L);
System.out.println("The Long is 1 ::"+U);
}
});
Map<Tuple2<Double, Double>, Long> countByValMap= intDivMap.countByValue();
countByValMap.forEach(new BiConsumer<Tuple2<Double, Double>, Long>() {
@Override
public void accept( Tuple2<Double, Double> L, Long U ) {
System.out.println("The touple val is 1 ::"+L._1());
System.out.println("The touple val is 2 ::"+L._2());
System.out.println("The Long is 1 ::"+U);
}
});
//countByValue()
Map<String, Long> countByVal=lines.countByValue();
// max
intMap.max();
/* Comparator<Double> comp =new Comparator<Double>() {
@Override
public int compare(Double a, Double b) {
// TODO Auto-generated method stub
return a.compareTo(b);
}
};*/
intMap.max(new doubleComparator());
/* intMap.max(new Comparator<Double>() {
@Override
public int compare(Double a, Double b) {
// TODO Auto-generated method stub
return a.compareTo(b);
}
});
*/
intMap.max(Comparator.naturalOrder());
intMap.max(Comparator.reverseOrder());
//////check this
// intMap.max(Comparator.comparing(a->a));
//min
intMap.min();
intMap.min(Comparator.naturalOrder());
// First:
System.out.println("The first element of RDD is"+ intMap.first());
//take()
List<String> takeTwo=lines.take(2);
takeTwo.forEach(x->System.out.println("The take elements are :: "+x));
// TakeOrdered:
List<String> takeOrderedTwo= lines.takeOrdered(2);
takeOrderedTwo.forEach(x->System.out.println("The takeOrdered elements are :: "+x));
// takeOrdered(int num, java.util.Comparator<T> comp)
List<String> takeCustomOrderedTwo= lines.takeOrdered(2, Comparator.reverseOrder());
takeCustomOrderedTwo.forEach(x->System.out.println("The takeOrdered elements with custom Comparator are :: "+x));
//TakeSample:
intRDD.takeSample(true, 3).forEach(x-> System.out.println("The take sample vals for true are :"+x));
intRDD.takeSample(false, 3).forEach(x-> System.out.println("The take sample vals for false are :"+x));
intRDD.takeSample(true, 3,9).forEach(x-> System.out.println("The take sample vals with seed are :"+x));
//top()
List<String> topFive=lines.top(5);
topFive.forEach(x->System.out.println("The value of top are ::"+x));
// top(int num, java.util.Comparator<T> comp)
// lines.top(3, Comparator.comparing(x->Integer.parseInt(x)));
//reduce
Function2<String, String, Integer> reduceSumFunc = (a, b) -> (Integer.parseInt(a) + Integer.parseInt(b));
Double sumInt=intMap.reduce((a,b)->a+b);
/* Integer sumInt=lines.reduce(new Function2<String,String,Integer>(
) {
@Override
public Integer call(String a, String b) throws Exception {
// TODO Auto-generated method stub
return Integer.parseInt(a) + Integer.parseInt(b);
}
});*/
//fold()
Double foldInt=intMap.fold((double) 0, (a,b)-> a+b);
//
//Aggeregate:
// ForEach:
lines.foreach(s->System.out.println(s));
// saveAsTextFile
// saveAsObjectFile(String path)
JavaRDD<String> rdd = spark.parallelize(Lists.newArrayList("1", "2"));
rdd.mapToPair(p -> new Tuple2<>(p, p)).saveAsObjectFile("objFileDir");
JavaPairRDD<String, String> pairRDD
= JavaPairRDD.fromJavaRDD(spark.objectFile("objFileDir"));
pairRDD.collect().forEach(System.out::println);
}
static class doubleComparator implements Comparator<Double>,Serializable{
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public int compare(Double a, Double b) {
// TODO Auto-generated method stub
return a.compareTo(b);
}
}
}
| |
/*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.internal.util.annotationfactory;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.hibernate.validator.internal.util.logging.Log;
import org.hibernate.validator.internal.util.logging.LoggerFactory;
import org.hibernate.validator.internal.util.privilegedactions.GetDeclaredMethod;
import org.hibernate.validator.internal.util.privilegedactions.GetDeclaredMethods;
import static org.hibernate.validator.internal.util.CollectionHelper.newHashMap;
/**
* A concrete implementation of {@code Annotation} that pretends it is a
* "real" source code annotation. It's also an {@code InvocationHandler}.
* <p>
* When you create an {@code AnnotationProxy}, you must initialize it
* with an {@code AnnotationDescriptor}.
* The adapter checks that the provided elements are the same elements defined
* in the annotation interface. However, it does <i>not</i> check that their
* values are the right type. If you omit an element, the adapter will use the
* default value for that element from the annotation interface, if it exists.
* If no default exists, it will throw an exception.
* </p>
*
* @author Paolo Perrotta
* @author Davide Marchignoli
* @author Gunnar Morling
* @see java.lang.annotation.Annotation
*/
class AnnotationProxy implements Annotation, InvocationHandler, Serializable {
private static final long serialVersionUID = 6907601010599429454L;
private static final Log log = LoggerFactory.make();
private final Class<? extends Annotation> annotationType;
private final Map<String, Object> values;
private final int hashCode;
AnnotationProxy(AnnotationDescriptor<?> descriptor) {
this.annotationType = descriptor.type();
values = Collections.unmodifiableMap( getAnnotationValues( descriptor ) );
this.hashCode = calculateHashCode();
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if ( values.containsKey( method.getName() ) ) {
return values.get( method.getName() );
}
return method.invoke( this, args );
}
@Override
public Class<? extends Annotation> annotationType() {
return annotationType;
}
/**
* Performs an equality check as described in {@link Annotation#equals(Object)}.
*
* @param obj The object to compare
*
* @return Whether the given object is equal to this annotation proxy or not
*
* @see Annotation#equals(Object)
*/
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( !annotationType.isInstance( obj ) ) {
return false;
}
Annotation other = annotationType.cast( obj );
//compare annotation member values
for ( Entry<String, Object> member : values.entrySet() ) {
Object value = member.getValue();
Object otherValue = getAnnotationMemberValue( other, member.getKey() );
if ( !areEqual( value, otherValue ) ) {
return false;
}
}
return true;
}
/**
* Calculates the hash code of this annotation proxy as described in
* {@link Annotation#hashCode()}.
*
* @return The hash code of this proxy.
*
* @see Annotation#hashCode()
*/
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
StringBuilder result = new StringBuilder();
result.append( '@' ).append( annotationType.getName() ).append( '(' );
for ( String s : getRegisteredMethodsInAlphabeticalOrder() ) {
result.append( s ).append( '=' ).append( values.get( s ) ).append( ", " );
}
// remove last separator:
if ( values.size() > 0 ) {
result.delete( result.length() - 2, result.length() );
result.append( ")" );
}
else {
result.delete( result.length() - 1, result.length() );
}
return result.toString();
}
private Map<String, Object> getAnnotationValues(AnnotationDescriptor<?> descriptor) {
Map<String, Object> result = newHashMap();
int processedValuesFromDescriptor = 0;
final Method[] declaredMethods = run( GetDeclaredMethods.action( annotationType ) );
for ( Method m : declaredMethods ) {
if ( descriptor.containsElement( m.getName() ) ) {
result.put( m.getName(), descriptor.valueOf( m.getName() ) );
processedValuesFromDescriptor++;
}
else if ( m.getDefaultValue() != null ) {
result.put( m.getName(), m.getDefaultValue() );
}
else {
throw log.getNoValueProvidedForAnnotationParameterException(
m.getName(),
annotationType
);
}
}
if ( processedValuesFromDescriptor != descriptor.numberOfElements() ) {
Set<String> unknownParameters = descriptor.getElements().keySet();
unknownParameters.removeAll( result.keySet() );
throw log.getTryingToInstantiateAnnotationWithUnknownParametersException(
annotationType,
unknownParameters
);
}
return result;
}
private int calculateHashCode() {
int hashCode = 0;
for ( Entry<String, Object> member : values.entrySet() ) {
Object value = member.getValue();
int nameHashCode = member.getKey().hashCode();
int valueHashCode =
!value.getClass().isArray() ? value.hashCode() :
value.getClass() == boolean[].class ? Arrays.hashCode( (boolean[]) value ) :
value.getClass() == byte[].class ? Arrays.hashCode( (byte[]) value ) :
value.getClass() == char[].class ? Arrays.hashCode( (char[]) value ) :
value.getClass() == double[].class ? Arrays.hashCode( (double[]) value ) :
value.getClass() == float[].class ? Arrays.hashCode( (float[]) value ) :
value.getClass() == int[].class ? Arrays.hashCode( (int[]) value ) :
value.getClass() == long[].class ? Arrays.hashCode(
(long[]) value
) :
value.getClass() == short[].class ? Arrays
.hashCode( (short[]) value ) :
Arrays.hashCode( (Object[]) value );
hashCode += 127 * nameHashCode ^ valueHashCode;
}
return hashCode;
}
private SortedSet<String> getRegisteredMethodsInAlphabeticalOrder() {
SortedSet<String> result = new TreeSet<String>();
result.addAll( values.keySet() );
return result;
}
private boolean areEqual(Object o1, Object o2) {
return
!o1.getClass().isArray() ? o1.equals( o2 ) :
o1.getClass() == boolean[].class ? Arrays.equals( (boolean[]) o1, (boolean[]) o2 ) :
o1.getClass() == byte[].class ? Arrays.equals( (byte[]) o1, (byte[]) o2 ) :
o1.getClass() == char[].class ? Arrays.equals( (char[]) o1, (char[]) o2 ) :
o1.getClass() == double[].class ? Arrays.equals(
(double[]) o1,
(double[]) o2
) :
o1.getClass() == float[].class ? Arrays.equals(
(float[]) o1,
(float[]) o2
) :
o1.getClass() == int[].class ? Arrays.equals(
(int[]) o1,
(int[]) o2
) :
o1.getClass() == long[].class ? Arrays.equals(
(long[]) o1,
(long[]) o2
) :
o1.getClass() == short[].class ? Arrays.equals(
(short[]) o1,
(short[]) o2
) :
Arrays.equals(
(Object[]) o1,
(Object[]) o2
);
}
private Object getAnnotationMemberValue(Annotation annotation, String name) {
try {
return run( GetDeclaredMethod.action( annotation.annotationType(), name ) ).invoke( annotation );
}
catch (IllegalAccessException e) {
throw log.getUnableToRetrieveAnnotationParameterValueException( e );
}
catch (IllegalArgumentException e) {
throw log.getUnableToRetrieveAnnotationParameterValueException( e );
}
catch (InvocationTargetException e) {
throw log.getUnableToRetrieveAnnotationParameterValueException( e );
}
}
/**
* Runs the given privileged action, using a privileged block if required.
* <p>
* <b>NOTE:</b> This must never be changed into a publicly available method to avoid execution of arbitrary
* privileged actions within HV's protection domain.
*/
private <T> T run(PrivilegedAction<T> action) {
return System.getSecurityManager() != null ? AccessController.doPrivileged( action ) : action.run();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.jdbc.test;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.calcite.linq4j.Ord;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.logical.LogicalPlan;
import org.apache.drill.common.logical.data.LogicalOperator;
import org.apache.drill.common.util.Hook;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.planner.PhysicalPlanReaderTestFactory;
import org.apache.drill.jdbc.ConnectionFactory;
import org.apache.drill.jdbc.ConnectionInfo;
import org.junit.Assert;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
import com.google.common.collect.Iterables;
/**
* Fluent interface for writing JDBC and query-planning tests.
*/
public class JdbcAssert {
private static ConnectionFactory factory = null;
public static void setFactory(ConnectionFactory factory) {
JdbcAssert.factory = factory;
}
/**
* Returns default bag of properties that is passed to JDBC connection.
* By default, includes options to:
* - turn off the web server
* - indicate DrillConnectionImpl to set up dfs_test.tmp schema location to an exclusive dir just for this test jvm
*/
public static Properties getDefaultProperties() {
final Properties properties = new Properties();
properties.setProperty("drillJDBCUnitTests", "true");
properties.setProperty(ExecConstants.HTTP_ENABLE, "false");
return properties;
}
public static ModelAndSchema withModel(final String model, final String schema) {
final Properties info = getDefaultProperties();
info.setProperty("schema", schema);
info.setProperty("model", "inline:" + model);
return new ModelAndSchema(info, factory);
}
public static ModelAndSchema withFull(final String schema) {
final Properties info = getDefaultProperties();
info.setProperty("schema", schema);
return new ModelAndSchema(info, factory);
}
public static ModelAndSchema withNoDefaultSchema() {
return new ModelAndSchema(getDefaultProperties(), factory);
}
static String toString(ResultSet resultSet, int expectedRecordCount) throws SQLException {
final StringBuilder buf = new StringBuilder();
while (resultSet.next()) {
final ResultSetMetaData metaData = resultSet.getMetaData();
final int n = metaData.getColumnCount();
String sep = "";
for (int i = 1; i <= n; i++) {
buf.append(sep)
.append(metaData.getColumnLabel(i))
.append("=")
.append(resultSet.getObject(i));
sep = "; ";
}
buf.append("\n");
}
return buf.toString();
}
static String toString(ResultSet resultSet) throws SQLException {
StringBuilder buf = new StringBuilder();
final List<Ord<String>> columns = columnLabels(resultSet);
while (resultSet.next()) {
for (Ord<String> column : columns) {
buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
}
buf.append("\n");
}
return buf.toString();
}
static Set<String> toStringSet(ResultSet resultSet) throws SQLException {
Builder<String> builder = ImmutableSet.builder();
final List<Ord<String>> columns = columnLabels(resultSet);
while (resultSet.next()) {
StringBuilder buf = new StringBuilder();
for (Ord<String> column : columns) {
buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
}
builder.add(buf.toString());
buf.setLength(0);
}
return builder.build();
}
static List<String> toStrings(ResultSet resultSet) throws SQLException {
final List<String> list = new ArrayList<>();
StringBuilder buf = new StringBuilder();
final List<Ord<String>> columns = columnLabels(resultSet);
while (resultSet.next()) {
buf.setLength(0);
for (Ord<String> column : columns) {
buf.append(column.i == 1 ? "" : "; ").append(column.e).append("=").append(resultSet.getObject(column.i));
}
list.add(buf.toString());
}
return list;
}
private static List<Ord<String>> columnLabels(ResultSet resultSet) throws SQLException {
int n = resultSet.getMetaData().getColumnCount();
List<Ord<String>> columns = new ArrayList<>();
for (int i = 1; i <= n; i++) {
columns.add(Ord.of(i, resultSet.getMetaData().getColumnLabel(i)));
}
return columns;
}
public static class ModelAndSchema {
private final Properties info;
private final ConnectionFactoryAdapter adapter;
public ModelAndSchema(final Properties info, final ConnectionFactory factory) {
this.info = info;
this.adapter = new ConnectionFactoryAdapter() {
@Override
public Connection createConnection() throws Exception {
return factory.getConnection(new ConnectionInfo("jdbc:drill:zk=local", ModelAndSchema.this.info));
}
};
}
public TestDataConnection sql(String sql) {
return new TestDataConnection(adapter, sql);
}
public <T> T withConnection(Function<Connection, T> function) throws Exception {
Connection connection = null;
try {
connection = adapter.createConnection();
return function.apply(connection);
} finally {
if (connection != null) {
connection.close();
}
}
}
}
public static class TestDataConnection {
private final ConnectionFactoryAdapter adapter;
private final String sql;
TestDataConnection(ConnectionFactoryAdapter adapter, String sql) {
this.adapter = adapter;
this.sql = sql;
}
/**
* Checks that the current SQL statement returns the expected result.
*/
public TestDataConnection returns(String expected) throws Exception {
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
expected = expected.trim();
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
if (!expected.equals(result)) {
Assert.fail(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected));
}
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
public TestDataConnection returnsSet(Set<String> expected) throws Exception {
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
Set<String> result = JdbcAssert.toStringSet(resultSet);
resultSet.close();
if (!expected.equals(result)) {
Assert.fail(String.format("Generated set:\n%s\ndoes not match:\n%s", result, expected));
}
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
/**
* Checks that the current SQL statement returns the expected result lines. Lines are compared unordered; the test
* succeeds if the query returns these lines in any order.
*/
public TestDataConnection returnsUnordered(String... expecteds) throws Exception {
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
Assert.assertEquals(unsortedList(Arrays.asList(expecteds)), unsortedList(JdbcAssert.toStrings(resultSet)));
resultSet.close();
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
public TestDataConnection displayResults(int recordCount) throws Exception {
// record count check is done in toString method
Connection connection = null;
Statement statement = null;
try {
connection = adapter.createConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
System.out.println(JdbcAssert.toString(resultSet, recordCount));
resultSet.close();
return this;
} finally {
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
}
private SortedSet<String> unsortedList(List<String> strings) {
final SortedSet<String> set = new TreeSet<>();
for (String string : strings) {
set.add(string + "\n");
}
return set;
}
public LogicalPlan logicalPlan() {
final String[] plan0 = {null};
Connection connection = null;
Statement statement = null;
final Hook.Closeable x = Hook.LOGICAL_PLAN.add(new Function<String, Void>() {
@Override
public Void apply(String o) {
plan0[0] = o;
return null;
}
});
try {
connection = adapter.createConnection();
statement = connection.prepareStatement(sql);
statement.close();
final String plan = plan0[0].trim();
return LogicalPlan.parse(PhysicalPlanReaderTestFactory.defaultLogicalPlanPersistence(DrillConfig.create()), plan);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
if (statement != null) {
try {
statement.close();
} catch (SQLException e) {
// ignore
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException e) {
// ignore
}
}
x.close();
}
}
public <T extends LogicalOperator> T planContains(final Class<T> operatorClazz) {
return (T) Iterables.find(logicalPlan().getSortedOperators(), new Predicate<LogicalOperator>() {
@Override
public boolean apply(LogicalOperator input) {
return input.getClass().equals(operatorClazz);
}
});
}
}
private static interface ConnectionFactoryAdapter {
Connection createConnection() throws Exception;
}
}
// End JdbcAssert.java
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.JmxReporter;
import org.apache.kafka.common.metrics.KafkaMetric;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.MockStreamsMetrics;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.ValueAndTimestamp;
import org.apache.kafka.test.KeyValueIteratorStub;
import org.easymock.EasyMockRunner;
import org.easymock.Mock;
import org.easymock.MockType;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.aryEq;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(EasyMockRunner.class)
public class MeteredTimestampedKeyValueStoreTest {
private final TaskId taskId = new TaskId(0, 0);
private final Map<String, String> tags = mkMap(
mkEntry("client-id", "test"),
mkEntry("task-id", taskId.toString()),
mkEntry("scope-id", "metered")
);
@Mock(type = MockType.NICE)
private KeyValueStore<Bytes, byte[]> inner;
@Mock(type = MockType.NICE)
private ProcessorContext context;
private MeteredTimestampedKeyValueStore<String, String> metered;
private final String key = "key";
private final Bytes keyBytes = Bytes.wrap(key.getBytes());
private final String value = "value";
private final ValueAndTimestamp<String> valueAndTimestamp = ValueAndTimestamp.make("value", 97L);
// timestamp is 97 what is ASCII of 'a'
private final byte[] valueAndTimestampBytes = "\0\0\0\0\0\0\0avalue".getBytes();
private final KeyValue<Bytes, byte[]> byteKeyValueTimestampPair = KeyValue.pair(keyBytes, valueAndTimestampBytes);
private final Metrics metrics = new Metrics();
@Before
public void before() {
metered = new MeteredTimestampedKeyValueStore<>(
inner,
"scope",
new MockTime(),
Serdes.String(),
new ValueAndTimestampSerde<>(Serdes.String())
);
metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
expect(context.metrics()).andReturn(new MockStreamsMetrics(metrics));
expect(context.taskId()).andReturn(taskId);
expect(inner.name()).andReturn("metered").anyTimes();
}
private void init() {
replay(inner, context);
metered.init(context, metered);
}
@Test
public void testMetrics() {
init();
final JmxReporter reporter = new JmxReporter("kafka.streams");
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format("kafka.streams:type=stream-%s-metrics,client-id=%s,task-id=%s,%s-id=%s",
"scope", "test", taskId.toString(), "scope", "metered")));
assertTrue(reporter.containsMbean(String.format("kafka.streams:type=stream-%s-metrics,client-id=%s,task-id=%s,%s-id=%s",
"scope", "test", taskId.toString(), "scope", "all")));
}
@Test
public void shouldWriteBytesToInnerStoreAndRecordPutMetric() {
inner.put(eq(keyBytes), aryEq(valueAndTimestampBytes));
expectLastCall();
init();
metered.put(key, valueAndTimestamp);
final KafkaMetric metric = metric("put-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldGetBytesFromInnerStoreAndReturnGetMetric() {
expect(inner.get(keyBytes)).andReturn(valueAndTimestampBytes);
init();
assertThat(metered.get(key), equalTo(valueAndTimestamp));
final KafkaMetric metric = metric("get-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldPutIfAbsentAndRecordPutIfAbsentMetric() {
expect(inner.putIfAbsent(eq(keyBytes), aryEq(valueAndTimestampBytes))).andReturn(null);
init();
metered.putIfAbsent(key, valueAndTimestamp);
final KafkaMetric metric = metric("put-if-absent-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
private KafkaMetric metric(final String name) {
return this.metrics.metric(new MetricName(name, "stream-scope-metrics", "", this.tags));
}
@SuppressWarnings("unchecked")
@Test
public void shouldPutAllToInnerStoreAndRecordPutAllMetric() {
inner.putAll(anyObject(List.class));
expectLastCall();
init();
metered.putAll(Collections.singletonList(KeyValue.pair(key, valueAndTimestamp)));
final KafkaMetric metric = metric("put-all-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldDeleteFromInnerStoreAndRecordDeleteMetric() {
expect(inner.delete(keyBytes)).andReturn(valueAndTimestampBytes);
init();
metered.delete(key);
final KafkaMetric metric = metric("delete-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldGetRangeFromInnerStoreAndRecordRangeMetric() {
expect(inner.range(keyBytes, keyBytes)).andReturn(
new KeyValueIteratorStub<>(Collections.singletonList(byteKeyValueTimestampPair).iterator()));
init();
final KeyValueIterator<String, ValueAndTimestamp<String>> iterator = metered.range(key, key);
assertThat(iterator.next().value, equalTo(valueAndTimestamp));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric("range-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldGetAllFromInnerStoreAndRecordAllMetric() {
expect(inner.all())
.andReturn(new KeyValueIteratorStub<>(Collections.singletonList(byteKeyValueTimestampPair).iterator()));
init();
final KeyValueIterator<String, ValueAndTimestamp<String>> iterator = metered.all();
assertThat(iterator.next().value, equalTo(valueAndTimestamp));
assertFalse(iterator.hasNext());
iterator.close();
final KafkaMetric metric = metric(new MetricName("all-rate", "stream-scope-metrics", "", tags));
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
@Test
public void shouldFlushInnerWhenFlushTimeRecords() {
inner.flush();
expectLastCall().once();
init();
metered.flush();
final KafkaMetric metric = metric("flush-rate");
assertTrue((Double) metric.metricValue() > 0);
verify(inner);
}
private interface CachedKeyValueStore extends KeyValueStore<Bytes, byte[]>, CachedStateStore<byte[], byte[]> { }
@SuppressWarnings("unchecked")
@Test
public void shouldSetFlushListenerOnWrappedCachingStore() {
final CachedKeyValueStore cachedKeyValueStore = mock(CachedKeyValueStore.class);
expect(cachedKeyValueStore.setFlushListener(anyObject(CacheFlushListener.class), eq(false))).andReturn(true);
replay(cachedKeyValueStore);
metered = new MeteredTimestampedKeyValueStore<>(
cachedKeyValueStore,
"scope",
new MockTime(),
Serdes.String(),
new ValueAndTimestampSerde<>(Serdes.String()));
assertTrue(metered.setFlushListener(null, false));
verify(cachedKeyValueStore);
}
@Test
public void shouldNotSetFlushListenerOnWrappedNoneCachingStore() {
assertFalse(metered.setFlushListener(null, false));
}
private KafkaMetric metric(final MetricName metricName) {
return this.metrics.metric(metricName);
}
@Test
@SuppressWarnings("unchecked")
public void shouldNotThrowExceptionIfSerdesCorrectlySetFromProcessorContext() {
expect(context.keySerde()).andStubReturn((Serde) Serdes.String());
expect(context.valueSerde()).andStubReturn((Serde) Serdes.Long());
final MeteredTimestampedKeyValueStore<String, Long> store = new MeteredTimestampedKeyValueStore<>(
inner,
"scope",
new MockTime(),
null,
null
);
replay(inner, context);
store.init(context, inner);
try {
store.put("key", ValueAndTimestamp.make(42L, 60000));
} catch (final StreamsException exception) {
if (exception.getCause() instanceof ClassCastException) {
fail("Serdes are not correctly set from processor context.");
}
throw exception;
}
}
@Test
@SuppressWarnings("unchecked")
public void shouldNotThrowExceptionIfSerdesCorrectlySetFromConstructorParameters() {
expect(context.keySerde()).andStubReturn((Serde) Serdes.String());
expect(context.valueSerde()).andStubReturn((Serde) Serdes.Long());
final MeteredTimestampedKeyValueStore<String, Long> store = new MeteredTimestampedKeyValueStore<>(
inner,
"scope",
new MockTime(),
Serdes.String(),
new ValueAndTimestampSerde<>(Serdes.Long())
);
replay(inner, context);
store.init(context, inner);
try {
store.put("key", ValueAndTimestamp.make(42L, 60000));
} catch (final StreamsException exception) {
if (exception.getCause() instanceof ClassCastException) {
fail("Serdes are not correctly set from constructor parameters.");
}
throw exception;
}
}
}
| |
package com.company.professor;
import java.util.List;
import java.util.ArrayList;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteStatement;
import de.greenrobot.dao.AbstractDao;
import de.greenrobot.dao.Property;
import de.greenrobot.dao.internal.SqlUtils;
import de.greenrobot.dao.internal.DaoConfig;
import com.company.professor.BerryFlavors;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* DAO for table "berry_flavors".
*/
public class BerryFlavorsDao extends AbstractDao<BerryFlavors, Long> {
public static final String TABLENAME = "berry_flavors";
/**
* Properties of entity BerryFlavors.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static class Properties {
public final static Property BerryId = new Property(0, long.class, "BerryId", true, "berry_id");
public final static Property ContestTypeId = new Property(1, long.class, "ContestTypeId", false, "contest_type_id");
public final static Property Flavor = new Property(2, long.class, "Flavor", false, "flavor");
};
private DaoSession daoSession;
public BerryFlavorsDao(DaoConfig config) {
super(config);
}
public BerryFlavorsDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
this.daoSession = daoSession;
}
/** Creates the underlying database table. */
public static void createTable(SQLiteDatabase db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"berry_flavors\" (" + //
"\"berry_id\" INTEGER PRIMARY KEY NOT NULL ," + // 0: BerryId
"\"contest_type_id\" INTEGER NOT NULL ," + // 1: ContestTypeId
"\"flavor\" INTEGER NOT NULL );"); // 2: Flavor
// Add Indexes
db.execSQL("CREATE UNIQUE INDEX " + constraint + "IDX_berry_flavors_berry_id_contest_type_id ON berry_flavors" +
" (\"berry_id\",\"contest_type_id\");");
}
/** Drops the underlying database table. */
public static void dropTable(SQLiteDatabase db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"berry_flavors\"";
db.execSQL(sql);
}
/** @inheritdoc */
@Override
protected void bindValues(SQLiteStatement stmt, BerryFlavors entity) {
stmt.clearBindings();
stmt.bindLong(1, entity.getBerryId());
stmt.bindLong(2, entity.getContestTypeId());
stmt.bindLong(3, entity.getFlavor());
}
@Override
protected void attachEntity(BerryFlavors entity) {
super.attachEntity(entity);
entity.__setDaoSession(daoSession);
}
/** @inheritdoc */
@Override
public Long readKey(Cursor cursor, int offset) {
return cursor.getLong(offset + 0);
}
/** @inheritdoc */
@Override
public BerryFlavors readEntity(Cursor cursor, int offset) {
BerryFlavors entity = new BerryFlavors( //
cursor.getLong(offset + 0), // BerryId
cursor.getLong(offset + 1), // ContestTypeId
cursor.getLong(offset + 2) // Flavor
);
return entity;
}
/** @inheritdoc */
@Override
public void readEntity(Cursor cursor, BerryFlavors entity, int offset) {
entity.setBerryId(cursor.getLong(offset + 0));
entity.setContestTypeId(cursor.getLong(offset + 1));
entity.setFlavor(cursor.getLong(offset + 2));
}
/** @inheritdoc */
@Override
protected Long updateKeyAfterInsert(BerryFlavors entity, long rowId) {
entity.setBerryId(rowId);
return rowId;
}
/** @inheritdoc */
@Override
public Long getKey(BerryFlavors entity) {
if(entity != null) {
return entity.getBerryId();
} else {
return null;
}
}
/** @inheritdoc */
@Override
protected boolean isEntityUpdateable() {
return true;
}
private String selectDeep;
protected String getSelectDeep() {
if (selectDeep == null) {
StringBuilder builder = new StringBuilder("SELECT ");
SqlUtils.appendColumns(builder, "T", getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T0", daoSession.getBerriesDao().getAllColumns());
builder.append(',');
SqlUtils.appendColumns(builder, "T1", daoSession.getContestTypesDao().getAllColumns());
builder.append(" FROM berry_flavors T");
builder.append(" LEFT JOIN berries T0 ON T.\"berry_id\"=T0.\"id\"");
builder.append(" LEFT JOIN contest_types T1 ON T.\"contest_type_id\"=T1.\"id\"");
builder.append(' ');
selectDeep = builder.toString();
}
return selectDeep;
}
protected BerryFlavors loadCurrentDeep(Cursor cursor, boolean lock) {
BerryFlavors entity = loadCurrent(cursor, 0, lock);
int offset = getAllColumns().length;
Berries Berries = loadCurrentOther(daoSession.getBerriesDao(), cursor, offset);
if(Berries != null) {
entity.setBerries(Berries);
}
offset += daoSession.getBerriesDao().getAllColumns().length;
ContestTypes ContestTypes = loadCurrentOther(daoSession.getContestTypesDao(), cursor, offset);
if(ContestTypes != null) {
entity.setContestTypes(ContestTypes);
}
return entity;
}
public BerryFlavors loadDeep(Long key) {
assertSinglePk();
if (key == null) {
return null;
}
StringBuilder builder = new StringBuilder(getSelectDeep());
builder.append("WHERE ");
SqlUtils.appendColumnsEqValue(builder, "T", getPkColumns());
String sql = builder.toString();
String[] keyArray = new String[] { key.toString() };
Cursor cursor = db.rawQuery(sql, keyArray);
try {
boolean available = cursor.moveToFirst();
if (!available) {
return null;
} else if (!cursor.isLast()) {
throw new IllegalStateException("Expected unique result, but count was " + cursor.getCount());
}
return loadCurrentDeep(cursor, true);
} finally {
cursor.close();
}
}
/** Reads all available rows from the given cursor and returns a list of new ImageTO objects. */
public List<BerryFlavors> loadAllDeepFromCursor(Cursor cursor) {
int count = cursor.getCount();
List<BerryFlavors> list = new ArrayList<BerryFlavors>(count);
if (cursor.moveToFirst()) {
if (identityScope != null) {
identityScope.lock();
identityScope.reserveRoom(count);
}
try {
do {
list.add(loadCurrentDeep(cursor, false));
} while (cursor.moveToNext());
} finally {
if (identityScope != null) {
identityScope.unlock();
}
}
}
return list;
}
protected List<BerryFlavors> loadDeepAllAndCloseCursor(Cursor cursor) {
try {
return loadAllDeepFromCursor(cursor);
} finally {
cursor.close();
}
}
/** A raw-style query where you can pass any WHERE clause and arguments. */
public List<BerryFlavors> queryDeep(String where, String... selectionArg) {
Cursor cursor = db.rawQuery(getSelectDeep() + where, selectionArg);
return loadDeepAllAndCloseCursor(cursor);
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.cxx.platform.Archiver;
import com.facebook.buck.cxx.platform.BsdArchiver;
import com.facebook.buck.cxx.platform.CompilerProvider;
import com.facebook.buck.cxx.platform.CxxPlatform;
import com.facebook.buck.cxx.platform.CxxToolProvider;
import com.facebook.buck.cxx.platform.DebugPathSanitizer;
import com.facebook.buck.cxx.platform.GnuArchiver;
import com.facebook.buck.cxx.platform.LinkerProvider;
import com.facebook.buck.cxx.platform.PosixNmSymbolNameTool;
import com.facebook.buck.cxx.platform.PreprocessorProvider;
import com.facebook.buck.cxx.platform.WindowsArchiver;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.rules.ConstantToolProvider;
import com.facebook.buck.rules.HashedFileTool;
import com.facebook.buck.util.environment.Platform;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
/**
* Utility class to create a C/C++ platform described in the "cxx" section of .buckconfig, with
* reasonable system defaults.
*/
public class DefaultCxxPlatforms {
// Utility class, do not instantiate.
private DefaultCxxPlatforms() {}
public static final Flavor FLAVOR = InternalFlavor.of("default");
private static final Path DEFAULT_C_FRONTEND = Paths.get("/usr/bin/gcc");
private static final Path DEFAULT_CXX_FRONTEND = Paths.get("/usr/bin/g++");
private static final Path DEFAULT_AR = Paths.get("/usr/bin/ar");
private static final Path DEFAULT_STRIP = Paths.get("/usr/bin/strip");
private static final Path DEFAULT_RANLIB = Paths.get("/usr/bin/ranlib");
private static final Path DEFAULT_NM = Paths.get("/usr/bin/nm");
private static final Path DEFAULT_OSX_C_FRONTEND = Paths.get("/usr/bin/clang");
private static final Path DEFAULT_OSX_CXX_FRONTEND = Paths.get("/usr/bin/clang++");
private static final String DEFAULT_WINDOWS_CXX_FRONTEND = "cl";
private static final String DEFAULT_WINDOWS_LINK = "link";
private static final String DEFAULT_WINDOWS_LIB = "lib";
private static final String DEFAULT_WINDOWS_RANLIB = "lib";
private static final String DEFAULT_UNIX_RANLIB = "ranlib";
public static CxxPlatform build(Platform platform, CxxBuckConfig config) {
String sharedLibraryExtension;
String sharedLibraryVersionedExtensionFormat;
String staticLibraryExtension;
String objectFileExtension;
Path defaultCFrontend;
Path defaultCxxFrontend;
Path defaultLinker;
LinkerProvider.Type linkerType;
Archiver archiver;
DebugPathSanitizer compilerSanitizer;
Optional<String> binaryExtension;
ImmutableMap<String, String> env = config.getEnvironment();
Optional<CxxToolProvider.Type> defaultToolType = Optional.empty();
String ranlibCommand;
switch (platform) {
case LINUX:
sharedLibraryExtension = "so";
sharedLibraryVersionedExtensionFormat = "so.%s";
staticLibraryExtension = "a";
objectFileExtension = "o";
defaultCFrontend = getExecutablePath("gcc", DEFAULT_C_FRONTEND, env);
defaultCxxFrontend = getExecutablePath("g++", DEFAULT_CXX_FRONTEND, env);
defaultLinker = defaultCxxFrontend;
linkerType = LinkerProvider.Type.GNU;
archiver = new GnuArchiver(new HashedFileTool(getExecutablePath("ar", DEFAULT_AR, env)));
compilerSanitizer = new PrefixMapDebugPathSanitizer(".", ImmutableBiMap.of());
binaryExtension = Optional.empty();
ranlibCommand = DEFAULT_UNIX_RANLIB;
break;
case MACOS:
sharedLibraryExtension = "dylib";
sharedLibraryVersionedExtensionFormat = ".%s.dylib";
staticLibraryExtension = "a";
objectFileExtension = "o";
defaultCFrontend = getExecutablePath("clang", DEFAULT_OSX_C_FRONTEND, env);
defaultCxxFrontend = getExecutablePath("clang++", DEFAULT_OSX_CXX_FRONTEND, env);
defaultLinker = defaultCxxFrontend;
linkerType = LinkerProvider.Type.DARWIN;
archiver = new BsdArchiver(new HashedFileTool(getExecutablePath("ar", DEFAULT_AR, env)));
compilerSanitizer = new PrefixMapDebugPathSanitizer(".", ImmutableBiMap.of());
binaryExtension = Optional.empty();
ranlibCommand = DEFAULT_UNIX_RANLIB;
break;
case WINDOWS:
sharedLibraryExtension = "dll";
sharedLibraryVersionedExtensionFormat = "dll";
staticLibraryExtension = "lib";
objectFileExtension = "obj";
defaultCFrontend =
getExecutablePath(
DEFAULT_WINDOWS_CXX_FRONTEND, Paths.get(DEFAULT_WINDOWS_CXX_FRONTEND), env);
defaultCxxFrontend =
getExecutablePath(
DEFAULT_WINDOWS_CXX_FRONTEND, Paths.get(DEFAULT_WINDOWS_CXX_FRONTEND), env);
defaultLinker =
getExecutablePath(DEFAULT_WINDOWS_LINK, Paths.get(DEFAULT_WINDOWS_LINK), env);
linkerType = LinkerProvider.Type.WINDOWS;
archiver =
new WindowsArchiver(
new HashedFileTool(
getExecutablePath(DEFAULT_WINDOWS_LIB, Paths.get(DEFAULT_WINDOWS_LIB), env)));
compilerSanitizer = new PrefixMapDebugPathSanitizer(".", ImmutableBiMap.of());
binaryExtension = Optional.of("exe");
defaultToolType = Optional.of(CxxToolProvider.Type.WINDOWS);
ranlibCommand = DEFAULT_WINDOWS_RANLIB;
break;
case FREEBSD:
sharedLibraryExtension = "so";
sharedLibraryVersionedExtensionFormat = "so.%s";
staticLibraryExtension = "a";
objectFileExtension = "o";
defaultCFrontend = getExecutablePath("gcc", DEFAULT_C_FRONTEND, env);
defaultCxxFrontend = getExecutablePath("g++", DEFAULT_CXX_FRONTEND, env);
defaultLinker = defaultCxxFrontend;
linkerType = LinkerProvider.Type.GNU;
archiver = new BsdArchiver(new HashedFileTool(getExecutablePath("ar", DEFAULT_AR, env)));
compilerSanitizer = new PrefixMapDebugPathSanitizer(".", ImmutableBiMap.of());
binaryExtension = Optional.empty();
ranlibCommand = DEFAULT_UNIX_RANLIB;
break;
//$CASES-OMITTED$
default:
throw new RuntimeException(String.format("Unsupported platform: %s", platform));
}
PreprocessorProvider aspp = new PreprocessorProvider(defaultCFrontend, defaultToolType);
CompilerProvider as = new CompilerProvider(defaultCFrontend, defaultToolType);
PreprocessorProvider cpp = new PreprocessorProvider(defaultCFrontend, defaultToolType);
CompilerProvider cc = new CompilerProvider(defaultCFrontend, defaultToolType);
PreprocessorProvider cxxpp = new PreprocessorProvider(defaultCxxFrontend, defaultToolType);
CompilerProvider cxx = new CompilerProvider(defaultCxxFrontend, defaultToolType);
return CxxPlatforms.build(
FLAVOR,
platform,
config,
as,
aspp,
cc,
cxx,
cpp,
cxxpp,
new DefaultLinkerProvider(
linkerType, new ConstantToolProvider(new HashedFileTool(defaultLinker))),
ImmutableList.of(),
new HashedFileTool(getExecutablePath("strip", DEFAULT_STRIP, env)),
archiver,
new HashedFileTool(getExecutablePath(ranlibCommand, DEFAULT_RANLIB, env)),
new PosixNmSymbolNameTool(new HashedFileTool(getExecutablePath("nm", DEFAULT_NM, env))),
ImmutableList.of(),
ImmutableList.of(),
ImmutableList.of(),
ImmutableList.of(),
sharedLibraryExtension,
sharedLibraryVersionedExtensionFormat,
staticLibraryExtension,
objectFileExtension,
compilerSanitizer,
new MungingDebugPathSanitizer(
config.getDebugPathSanitizerLimit(),
File.separatorChar,
Paths.get("."),
ImmutableBiMap.of()),
ImmutableMap.of(),
binaryExtension,
config.getHeaderVerification());
}
private static Path getExecutablePath(
String executableName, Path unresolvedLocation, ImmutableMap<String, String> env) {
return new ExecutableFinder()
.getOptionalExecutable(Paths.get(executableName), env)
.orElse(unresolvedLocation);
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.systemui.statusbar.policy;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.CanvasProperty;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.drawable.Drawable;
import android.view.DisplayListCanvas;
import android.view.RenderNodeAnimator;
import android.view.View;
import android.view.animation.Interpolator;
import com.android.systemui.R;
import com.android.systemui.statusbar.phone.PhoneStatusBar;
import java.util.ArrayList;
import java.util.HashSet;
public class KeyButtonRipple extends Drawable {
private static final float GLOW_MAX_SCALE_FACTOR = 1.35f;
private static final float GLOW_MAX_ALPHA = 0.2f;
private static final int ANIMATION_DURATION_SCALE = 350;
private static final int ANIMATION_DURATION_FADE = 450;
private Paint mRipplePaint;
private CanvasProperty<Float> mLeftProp;
private CanvasProperty<Float> mTopProp;
private CanvasProperty<Float> mRightProp;
private CanvasProperty<Float> mBottomProp;
private CanvasProperty<Float> mRxProp;
private CanvasProperty<Float> mRyProp;
private CanvasProperty<Paint> mPaintProp;
private float mGlowAlpha = 0f;
private float mGlowScale = 1f;
private boolean mPressed;
private boolean mDrawingHardwareGlow;
private int mMaxWidth;
private final Interpolator mInterpolator = new LogInterpolator();
private final Interpolator mAlphaExitInterpolator = PhoneStatusBar.ALPHA_OUT;
private boolean mSupportHardware;
private final View mTargetView;
private final HashSet<Animator> mRunningAnimations = new HashSet<>();
private final ArrayList<Animator> mTmpArray = new ArrayList<>();
private int mRippleColor;
public KeyButtonRipple(Context ctx, View targetView) {
mMaxWidth = ctx.getResources().getDimensionPixelSize(R.dimen.key_button_ripple_max_width);
mTargetView = targetView;
mRippleColor = ctx.getResources().getColor(R.color.navbutton_ripple_color);
}
private Paint getRipplePaint() {
if (mRipplePaint == null) {
mRipplePaint = new Paint();
mRipplePaint.setAntiAlias(true);
mRipplePaint.setColor(mRippleColor);
}
return mRipplePaint;
}
private void drawSoftware(Canvas canvas) {
if (mGlowAlpha > 0f) {
final Paint p = getRipplePaint();
p.setAlpha((int)(mGlowAlpha * 255f));
final float w = getBounds().width();
final float h = getBounds().height();
final boolean horizontal = w > h;
final float diameter = getRippleSize() * mGlowScale;
final float radius = diameter * .5f;
final float cx = w * .5f;
final float cy = h * .5f;
final float rx = horizontal ? radius : cx;
final float ry = horizontal ? cy : radius;
final float corner = horizontal ? cy : cx;
canvas.drawRoundRect(cx - rx, cy - ry,
cx + rx, cy + ry,
corner, corner, p);
}
}
@Override
public void draw(Canvas canvas) {
mSupportHardware = canvas.isHardwareAccelerated();
if (mSupportHardware) {
drawHardware((DisplayListCanvas) canvas);
} else {
drawSoftware(canvas);
}
}
@Override
public void setAlpha(int alpha) {
// Not supported.
}
@Override
public void setColorFilter(ColorFilter colorFilter) {
// Not supported.
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
private boolean isHorizontal() {
return getBounds().width() > getBounds().height();
}
private void drawHardware(DisplayListCanvas c) {
if (mDrawingHardwareGlow) {
c.drawRoundRect(mLeftProp, mTopProp, mRightProp, mBottomProp, mRxProp, mRyProp,
mPaintProp);
}
}
public float getGlowAlpha() {
return mGlowAlpha;
}
public void setGlowAlpha(float x) {
mGlowAlpha = x;
invalidateSelf();
}
public float getGlowScale() {
return mGlowScale;
}
public void setGlowScale(float x) {
mGlowScale = x;
invalidateSelf();
}
@Override
protected boolean onStateChange(int[] state) {
boolean pressed = false;
for (int i = 0; i < state.length; i++) {
if (state[i] == android.R.attr.state_pressed) {
pressed = true;
break;
}
}
if (pressed != mPressed) {
setPressed(pressed);
mPressed = pressed;
return true;
} else {
return false;
}
}
@Override
public void jumpToCurrentState() {
cancelAnimations();
}
@Override
public boolean isStateful() {
return true;
}
public void setPressed(boolean pressed) {
if (mSupportHardware) {
setPressedHardware(pressed);
} else {
setPressedSoftware(pressed);
}
}
private void cancelAnimations() {
mTmpArray.addAll(mRunningAnimations);
int size = mTmpArray.size();
for (int i = 0; i < size; i++) {
Animator a = mTmpArray.get(i);
a.cancel();
}
mTmpArray.clear();
mRunningAnimations.clear();
}
private void setPressedSoftware(boolean pressed) {
if (pressed) {
enterSoftware();
} else {
exitSoftware();
}
}
private void enterSoftware() {
cancelAnimations();
mGlowAlpha = GLOW_MAX_ALPHA;
ObjectAnimator scaleAnimator = ObjectAnimator.ofFloat(this, "glowScale",
0f, GLOW_MAX_SCALE_FACTOR);
scaleAnimator.setInterpolator(mInterpolator);
scaleAnimator.setDuration(ANIMATION_DURATION_SCALE);
scaleAnimator.addListener(mAnimatorListener);
scaleAnimator.start();
mRunningAnimations.add(scaleAnimator);
}
private void exitSoftware() {
ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(this, "glowAlpha", mGlowAlpha, 0f);
alphaAnimator.setInterpolator(mAlphaExitInterpolator);
alphaAnimator.setDuration(ANIMATION_DURATION_FADE);
alphaAnimator.addListener(mAnimatorListener);
alphaAnimator.start();
mRunningAnimations.add(alphaAnimator);
}
private void setPressedHardware(boolean pressed) {
if (pressed) {
enterHardware();
} else {
exitHardware();
}
}
/**
* Sets the left/top property for the round rect to {@code prop} depending on whether we are
* horizontal or vertical mode.
*/
private void setExtendStart(CanvasProperty<Float> prop) {
if (isHorizontal()) {
mLeftProp = prop;
} else {
mTopProp = prop;
}
}
private CanvasProperty<Float> getExtendStart() {
return isHorizontal() ? mLeftProp : mTopProp;
}
/**
* Sets the right/bottom property for the round rect to {@code prop} depending on whether we are
* horizontal or vertical mode.
*/
private void setExtendEnd(CanvasProperty<Float> prop) {
if (isHorizontal()) {
mRightProp = prop;
} else {
mBottomProp = prop;
}
}
private CanvasProperty<Float> getExtendEnd() {
return isHorizontal() ? mRightProp : mBottomProp;
}
private int getExtendSize() {
return isHorizontal() ? getBounds().width() : getBounds().height();
}
private int getRippleSize() {
int size = isHorizontal() ? getBounds().width() : getBounds().height();
return Math.min(size, mMaxWidth);
}
private void enterHardware() {
cancelAnimations();
mDrawingHardwareGlow = true;
setExtendStart(CanvasProperty.createFloat(getExtendSize() / 2));
final RenderNodeAnimator startAnim = new RenderNodeAnimator(getExtendStart(),
getExtendSize()/2 - GLOW_MAX_SCALE_FACTOR * getRippleSize()/2);
startAnim.setDuration(ANIMATION_DURATION_SCALE);
startAnim.setInterpolator(mInterpolator);
startAnim.addListener(mAnimatorListener);
startAnim.setTarget(mTargetView);
setExtendEnd(CanvasProperty.createFloat(getExtendSize() / 2));
final RenderNodeAnimator endAnim = new RenderNodeAnimator(getExtendEnd(),
getExtendSize()/2 + GLOW_MAX_SCALE_FACTOR * getRippleSize()/2);
endAnim.setDuration(ANIMATION_DURATION_SCALE);
endAnim.setInterpolator(mInterpolator);
endAnim.addListener(mAnimatorListener);
endAnim.setTarget(mTargetView);
if (isHorizontal()) {
mTopProp = CanvasProperty.createFloat(0f);
mBottomProp = CanvasProperty.createFloat(getBounds().height());
mRxProp = CanvasProperty.createFloat(getBounds().height()/2);
mRyProp = CanvasProperty.createFloat(getBounds().height()/2);
} else {
mLeftProp = CanvasProperty.createFloat(0f);
mRightProp = CanvasProperty.createFloat(getBounds().width());
mRxProp = CanvasProperty.createFloat(getBounds().width()/2);
mRyProp = CanvasProperty.createFloat(getBounds().width()/2);
}
mGlowScale = GLOW_MAX_SCALE_FACTOR;
mGlowAlpha = GLOW_MAX_ALPHA;
mRipplePaint = getRipplePaint();
mRipplePaint.setAlpha((int) (mGlowAlpha * 255));
mPaintProp = CanvasProperty.createPaint(mRipplePaint);
startAnim.start();
endAnim.start();
mRunningAnimations.add(startAnim);
mRunningAnimations.add(endAnim);
invalidateSelf();
}
private void exitHardware() {
mPaintProp = CanvasProperty.createPaint(getRipplePaint());
final RenderNodeAnimator opacityAnim = new RenderNodeAnimator(mPaintProp,
RenderNodeAnimator.PAINT_ALPHA, 0);
opacityAnim.setDuration(ANIMATION_DURATION_FADE);
opacityAnim.setInterpolator(mAlphaExitInterpolator);
opacityAnim.addListener(mAnimatorListener);
opacityAnim.setTarget(mTargetView);
opacityAnim.start();
mRunningAnimations.add(opacityAnim);
invalidateSelf();
}
private final AnimatorListenerAdapter mAnimatorListener =
new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mRunningAnimations.remove(animation);
if (mRunningAnimations.isEmpty() && !mPressed) {
mDrawingHardwareGlow = false;
invalidateSelf();
}
}
};
/**
* Interpolator with a smooth log deceleration
*/
private static final class LogInterpolator implements Interpolator {
@Override
public float getInterpolation(float input) {
return 1 - (float) Math.pow(400, -input * 1.4);
}
}
}
| |
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.accesscontrol;
import java.util.Optional;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import joynr.infrastructure.DacTypes.MasterAccessControlEntry;
import joynr.infrastructure.DacTypes.OwnerAccessControlEntry;
import joynr.infrastructure.DacTypes.Permission;
import joynr.infrastructure.DacTypes.TrustLevel;
// Test the access control Algorithm
public class AccessControlAlgorithmTest {
private static String UID = "testuid";
private static String DOMAIN = "testdomain";
private static String INTERFACE = "testinterface";
private static Permission[] allPermissions = { Permission.NO, Permission.ASK, Permission.YES };
private static TrustLevel[] allTrustLevels = { TrustLevel.LOW, TrustLevel.MID, TrustLevel.HIGH };
private AccessControlAlgorithm accessControlAlgorithm;
private MasterAccessControlEntry masterAce;
private MasterAccessControlEntry mediatorAce;
private OwnerAccessControlEntry ownerAce;
@Before
public void setup() {
this.accessControlAlgorithm = new AccessControlAlgorithm();
masterAce = new MasterAccessControlEntry(UID,
DOMAIN,
INTERFACE,
TrustLevel.LOW,
allTrustLevels,
TrustLevel.LOW,
allTrustLevels,
null,
Permission.NO,
allPermissions);
mediatorAce = new MasterAccessControlEntry(UID,
DOMAIN,
INTERFACE,
TrustLevel.LOW,
allTrustLevels,
TrustLevel.LOW,
allTrustLevels,
null,
Permission.NO,
allPermissions);
ownerAce = new OwnerAccessControlEntry(UID,
DOMAIN,
INTERFACE,
TrustLevel.LOW,
TrustLevel.LOW,
null,
Permission.NO);
}
@Test
public void testPermissionWithMasterAceOnly() {
masterAce.setDefaultConsumerPermission(Permission.YES);
masterAce.setDefaultRequiredTrustLevel(TrustLevel.HIGH);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.empty(),
Optional.empty(),
TrustLevel.HIGH);
Assert.assertEquals(Permission.YES, consumerPermission);
}
@Test
public void testPermissionMessageTrustLevelDoesntMatchAce() {
masterAce.setDefaultConsumerPermission(Permission.YES);
masterAce.setDefaultRequiredTrustLevel(TrustLevel.MID);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.empty(),
Optional.empty(),
TrustLevel.LOW);
Assert.assertEquals(Permission.NO, consumerPermission);
}
@Test
public void testPermissionWithAllAceNull() {
Permission providerPermission = accessControlAlgorithm.getConsumerPermission(Optional.empty(),
Optional.empty(),
Optional.empty(),
TrustLevel.HIGH);
Assert.assertEquals(Permission.NO, providerPermission);
}
//------ Mediator overrides master with -----------------------------
@Test
public void testPermissionWithMasterAndMediatorAce() {
masterAce.setDefaultConsumerPermission(Permission.YES);
masterAce.setDefaultRequiredTrustLevel(TrustLevel.HIGH);
masterAce.setPossibleConsumerPermissions(allPermissions);
masterAce.setPossibleRequiredTrustLevels(allTrustLevels);
mediatorAce.setDefaultConsumerPermission(Permission.ASK);
mediatorAce.setDefaultRequiredTrustLevel(TrustLevel.LOW);
Permission providerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.of(mediatorAce),
Optional.empty(),
TrustLevel.LOW);
Assert.assertEquals(Permission.ASK, providerPermission);
}
@Test
public void testPermissionWithMediatorOnly() {
mediatorAce.setDefaultConsumerPermission(Permission.YES);
mediatorAce.setDefaultRequiredTrustLevel(TrustLevel.MID);
Permission providerPermission = accessControlAlgorithm.getConsumerPermission(Optional.empty(),
Optional.of(mediatorAce),
Optional.empty(),
TrustLevel.HIGH);
Assert.assertEquals(Permission.YES, providerPermission);
}
@Test
public void testPermissionWithMasterAndInvalidMediatorAce() {
masterAce.setPossibleConsumerPermissions(new Permission[]{ Permission.NO });
mediatorAce.setPossibleConsumerPermissions(new Permission[]{ Permission.ASK, Permission.YES });
mediatorAce.setDefaultConsumerPermission(Permission.YES);
mediatorAce.setDefaultRequiredTrustLevel(TrustLevel.MID);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.of(mediatorAce),
Optional.empty(),
TrustLevel.HIGH);
Assert.assertEquals(Permission.NO, consumerPermission);
}
//------ Owner overrides master and mediator ---------------------------------
@Test
public void testPermissionWithMasterMediatorAndOwnerAce() {
masterAce.setDefaultConsumerPermission(Permission.YES);
masterAce.setDefaultRequiredTrustLevel(TrustLevel.LOW);
mediatorAce.setDefaultConsumerPermission(Permission.ASK);
mediatorAce.setDefaultRequiredTrustLevel(TrustLevel.HIGH);
ownerAce.setConsumerPermission(Permission.YES);
ownerAce.setRequiredTrustLevel(TrustLevel.MID);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.of(mediatorAce),
Optional.of(ownerAce),
TrustLevel.MID);
Assert.assertEquals(Permission.YES, consumerPermission);
}
@Test
public void testPermissionWithMasterAndOwnerAce() {
masterAce.setDefaultConsumerPermission(Permission.ASK);
masterAce.setDefaultRequiredTrustLevel(TrustLevel.LOW);
ownerAce.setConsumerPermission(Permission.YES);
ownerAce.setRequiredTrustLevel(TrustLevel.HIGH);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.empty(),
Optional.of(ownerAce),
TrustLevel.HIGH);
Assert.assertEquals(Permission.YES, consumerPermission);
}
@Test
public void testPermissionWithOwnerAceOnly() {
ownerAce.setConsumerPermission(Permission.YES);
ownerAce.setRequiredTrustLevel(TrustLevel.HIGH);
Permission providerPermission = accessControlAlgorithm.getConsumerPermission(Optional.empty(),
Optional.empty(),
Optional.of(ownerAce),
TrustLevel.HIGH);
Assert.assertEquals(Permission.YES, providerPermission);
}
@Test
public void testPermissionWithMediatorAndInvalidOwnerAce() {
mediatorAce.setPossibleConsumerPermissions(new Permission[]{ Permission.NO });
ownerAce.setConsumerPermission(Permission.ASK);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.empty(),
Optional.of(mediatorAce),
Optional.of(ownerAce),
TrustLevel.HIGH);
Assert.assertEquals(Permission.NO, consumerPermission);
}
@Test
public void testPermissionWithMasterAndInvalidOwnerAce() {
masterAce.setPossibleConsumerPermissions(new Permission[]{ Permission.NO });
ownerAce.setConsumerPermission(Permission.ASK);
Permission consumerPermission = accessControlAlgorithm.getConsumerPermission(Optional.of(masterAce),
Optional.empty(),
Optional.of(ownerAce),
TrustLevel.HIGH);
Assert.assertEquals(Permission.NO, consumerPermission);
}
}
| |
/*
* Copyright 2006-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.dsl.design;
import com.consol.citrus.TestCase;
import com.consol.citrus.actions.SendMessageAction;
import com.consol.citrus.container.SequenceAfterTest;
import com.consol.citrus.container.SequenceBeforeTest;
import com.consol.citrus.dsl.actions.DelegatingTestAction;
import com.consol.citrus.message.DefaultMessage;
import com.consol.citrus.report.TestActionListeners;
import com.consol.citrus.testng.AbstractTestNGUnitTest;
import com.consol.citrus.validation.builder.StaticMessageContentBuilder;
import com.consol.citrus.ws.actions.SendSoapMessageAction;
import com.consol.citrus.ws.client.WebServiceClient;
import com.consol.citrus.ws.message.SoapAttachment;
import com.consol.citrus.ws.message.SoapMessageHeaders;
import org.mockito.Mockito;
import org.springframework.context.ApplicationContext;
import org.springframework.core.io.Resource;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.HashMap;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.when;
/**
* @author Christoph Deppisch
*/
public class SendSoapMessageTestDesignerTest extends AbstractTestNGUnitTest {
private WebServiceClient soapClient = Mockito.mock(WebServiceClient.class);
private ApplicationContext applicationContextMock = Mockito.mock(ApplicationContext.class);
private Resource resource = Mockito.mock(Resource.class);
private SoapAttachment testAttachment = new SoapAttachment();
/**
* Setup test attachment.
*/
@BeforeClass
public void setup() {
testAttachment.setContentId("attachment01");
testAttachment.setContent("This is an attachment");
testAttachment.setContentType("text/plain");
testAttachment.setCharsetName("UTF-8");
}
@Test
public void testFork() {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
send(soapClient)
.message(new DefaultMessage("Foo").setHeader("operation", "foo"))
.header("additional", "additionalValue");
send(soapClient)
.message(new DefaultMessage("Foo").setHeader("operation", "foo"))
.fork(true);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(1)).getDelegate().getClass(), SendMessageAction.class);
SendMessageAction action = (SendMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(String.class), "Foo");
Assert.assertEquals(messageBuilder.getMessage().getHeader("operation"), "foo");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 1L);
Assert.assertEquals(messageBuilder.getMessageHeaders().get("additional"), "additionalValue");
Assert.assertFalse(action.isForkMode());
action = (SendMessageAction) ((DelegatingTestAction)test.getActions().get(1)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
Assert.assertTrue(action.isForkMode());
}
@Test
public void testSoapAction() {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
soap().client(soapClient)
.send()
.soapAction("TestService/sayHello")
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
SendSoapMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(messageBuilder.getMessage().getHeaders().size(), 3L);
Assert.assertEquals(messageBuilder.getMessage().getHeaders().get(SoapMessageHeaders.SOAP_ACTION), "TestService/sayHello");
}
@Test
public void testSoapAttachment() {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
soap().client(soapClient)
.send()
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.attachment(testAttachment);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
SendSoapMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 0L);
Assert.assertEquals(action.getAttachments().size(), 1L);
Assert.assertNull(action.getAttachments().get(0).getContentResourcePath());
Assert.assertEquals(action.getAttachments().get(0).getContent(), testAttachment.getContent());
Assert.assertEquals(action.getAttachments().get(0).getContentId(), testAttachment.getContentId());
Assert.assertEquals(action.getAttachments().get(0).getContentType(), testAttachment.getContentType());
Assert.assertEquals(action.getAttachments().get(0).getCharsetName(), testAttachment.getCharsetName());
}
@Test
public void testMtomSoapAttachment() {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
soap().client(soapClient)
.send()
.mtomEnabled(true)
.payload("<TestRequest><data>cid:attachment01</data></TestRequest>")
.attachment(testAttachment);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
SendSoapMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><data>cid:attachment01</data></TestRequest>");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 0L);
Assert.assertTrue(action.getMtomEnabled());
Assert.assertEquals(action.getAttachments().size(), 1L);
Assert.assertNull(action.getAttachments().get(0).getContentResourcePath());
Assert.assertEquals(action.getAttachments().get(0).getContent(), testAttachment.getContent());
Assert.assertEquals(action.getAttachments().get(0).getContentId(), testAttachment.getContentId());
Assert.assertEquals(action.getAttachments().get(0).getContentType(), testAttachment.getContentType());
Assert.assertEquals(action.getAttachments().get(0).getCharsetName(), testAttachment.getCharsetName());
}
@Test
public void testSoapAttachmentData() {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
soap().client(soapClient)
.send()
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.attachment(testAttachment.getContentId(), testAttachment.getContentType(), testAttachment.getContent());
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
SendSoapMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 0L);
Assert.assertEquals(action.getAttachments().size(), 1L);
Assert.assertNull(action.getAttachments().get(0).getContentResourcePath());
Assert.assertEquals(action.getAttachments().get(0).getContent(), testAttachment.getContent());
Assert.assertEquals(action.getAttachments().get(0).getContentId(), testAttachment.getContentId());
Assert.assertEquals(action.getAttachments().get(0).getContentType(), testAttachment.getContentType());
Assert.assertEquals(action.getAttachments().get(0).getCharsetName(), testAttachment.getCharsetName());
}
@Test
public void testMultipleSoapAttachmentData() {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
soap().client(soapClient)
.send()
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.attachment(testAttachment.getContentId() + 1, testAttachment.getContentType(), testAttachment.getContent() + 1)
.attachment(testAttachment.getContentId() + 2, testAttachment.getContentType(), testAttachment.getContent() + 2);
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
SendSoapMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 0L);
Assert.assertEquals(action.getAttachments().size(), 2L);
Assert.assertNull(action.getAttachments().get(0).getContentResourcePath());
Assert.assertEquals(action.getAttachments().get(0).getContent(), testAttachment.getContent() + 1);
Assert.assertEquals(action.getAttachments().get(0).getContentId(), testAttachment.getContentId() + 1);
Assert.assertEquals(action.getAttachments().get(0).getContentType(), testAttachment.getContentType());
Assert.assertEquals(action.getAttachments().get(0).getCharsetName(), testAttachment.getCharsetName());
Assert.assertNull(action.getAttachments().get(1).getContentResourcePath());
Assert.assertEquals(action.getAttachments().get(1).getContent(), testAttachment.getContent() + 2);
Assert.assertEquals(action.getAttachments().get(1).getContentId(), testAttachment.getContentId() + 2);
Assert.assertEquals(action.getAttachments().get(1).getContentType(), testAttachment.getContentType());
Assert.assertEquals(action.getAttachments().get(1).getCharsetName(), testAttachment.getCharsetName());
}
@Test
public void testSoapAttachmentResource() throws IOException {
MockTestDesigner builder = new MockTestDesigner(applicationContext, context) {
@Override
public void configure() {
soap().client(soapClient)
.send()
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.attachment(testAttachment.getContentId(), testAttachment.getContentType(), resource);
}
};
reset(resource);
when(resource.getInputStream()).thenReturn(new ByteArrayInputStream("someAttachmentData".getBytes()));
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 1);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
SendSoapMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpoint(), soapClient);
Assert.assertEquals(action.getMessageBuilder().getClass(), StaticMessageContentBuilder.class);
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 0L);
Assert.assertEquals(action.getAttachments().get(0).getContent(), "someAttachmentData");
Assert.assertEquals(action.getAttachments().get(0).getContentId(), testAttachment.getContentId());
Assert.assertEquals(action.getAttachments().get(0).getContentType(), testAttachment.getContentType());
Assert.assertEquals(action.getAttachments().get(0).getCharsetName(), testAttachment.getCharsetName());
}
@Test
public void testSendBuilderWithEndpointName() {
reset(applicationContextMock);
when(applicationContextMock.getBean(TestActionListeners.class)).thenReturn(new TestActionListeners());
when(applicationContextMock.getBeansOfType(SequenceBeforeTest.class)).thenReturn(new HashMap<String, SequenceBeforeTest>());
when(applicationContextMock.getBeansOfType(SequenceAfterTest.class)).thenReturn(new HashMap<String, SequenceAfterTest>());
MockTestDesigner builder = new MockTestDesigner(applicationContextMock, context) {
@Override
public void configure() {
soap().client("soapClient")
.send()
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>")
.header("operation", "soapOperation")
.attachment(testAttachment);
send("otherClient")
.payload("<TestRequest><Message>Hello World!</Message></TestRequest>");
}
};
builder.configure();
TestCase test = builder.getTestCase();
Assert.assertEquals(test.getActionCount(), 2);
Assert.assertEquals(test.getActions().get(0).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(0)).getDelegate().getClass(), SendSoapMessageAction.class);
Assert.assertEquals(test.getActions().get(1).getClass(), DelegatingTestAction.class);
Assert.assertEquals(((DelegatingTestAction)test.getActions().get(1)).getDelegate().getClass(), SendMessageAction.class);
SendMessageAction action = (SendSoapMessageAction) ((DelegatingTestAction)test.getActions().get(0)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpointUri(), "soapClient");
StaticMessageContentBuilder messageBuilder = (StaticMessageContentBuilder) action.getMessageBuilder();
Assert.assertEquals(messageBuilder.getMessage().getPayload(), "<TestRequest><Message>Hello World!</Message></TestRequest>");
Assert.assertEquals(messageBuilder.getMessageHeaders().size(), 1L);
Assert.assertTrue(messageBuilder.getMessageHeaders().containsKey("operation"));
action = (SendMessageAction) ((DelegatingTestAction)test.getActions().get(1)).getDelegate();
Assert.assertEquals(action.getName(), "send");
Assert.assertEquals(action.getEndpointUri(), "otherClient");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*/
/*
* Created on 23.11.2004
*
*/
package org.apache.harmony.test.func.api.java.io.share.BufferedWriter;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Writer;
import org.apache.harmony.test.func.api.java.io.share.Writer.WriterTestShared;
import org.apache.harmony.test.func.api.java.io.share.MockWriter;
import org.apache.harmony.test.func.api.java.io.share.MultiThreadRunner;
import org.apache.harmony.share.Result;
public class BufferedWriterTestShared extends WriterTestShared {
private ThreadLocal underlyingWriter = new ThreadLocal();
private Writer getUnderlyingWriter() throws IOException {
return getUnderlyingWriter(true);
}
private Writer getUnderlyingWriter(Object lock) {
underlyingWriter.set(super.getTestedWriter(lock));
return (Writer) underlyingWriter.get();
}
private Writer getUnderlyingWriter(boolean reset) throws IOException {
if (reset) {
underlyingWriter.set(null);
}
if (underlyingWriter.get() == null) {
underlyingWriter.set(super.getTestedWriter());
}
return (Writer) underlyingWriter.get();
}
protected Writer getTestedWriter() throws IOException {
return new BufferedWriter(getUnderlyingWriter());
}
protected Writer getTestedWriter(Object lock) {
return new BufferedWriter(getUnderlyingWriter(lock));
}
protected String getWriterAsString(Writer w) throws IOException {
return super.getWriterAsString(getUnderlyingWriter(false));
}
public Result testClose() throws IOException {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw);
MultiThreadRunner.waitAtBarrier();
bw.write('a');
bw.close();
if (mw.getLog().size() == 1 && mw.getLog().get(0).equals("close")
&& mw.toString().equals("a")) {
return passed();
}
return failed("underlying stream was not closed correctly");
}
public Result testFlush() throws IOException {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw);
MultiThreadRunner.waitAtBarrier();
bw.write('a');
bw.flush();
if (mw.getLog().size() == 1 && mw.getLog().get(0).equals("flush")
&& mw.toString().equals("a")) {
return passed();
}
return failed("underlying stream was not flushed");
}
public Result testWriteArray() throws IOException {
return super.testWriteArray();
}
public Result testWriteArrayNegativeLength() throws IOException {
return super.testWriteArrayNegativeLength();
}
public Result testWriteArrayNegativeStart() throws IOException {
return super.testWriteArrayNegativeStart();
}
public Result testWriteArraySlice() throws IOException {
return super.testWriteArraySlice();
}
public Result testWriteInt() throws IOException {
return super.testWriteInt();
}
public Result testWriteNullArray() throws IOException {
return super.testWriteNullArray();
}
public Result testWriteNullString() throws IOException {
return super.testWriteNullString();
}
public Result testWriteString() throws IOException {
return super.testWriteString();
}
public Result testWriteStringBigLength() throws IOException {
return super.testWriteStringBigLength();
}
public Result testWriteStringNegativeLength() throws IOException {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw);
MultiThreadRunner.waitAtBarrier();
bw.write("qwerty", 0, -5);
bw.close();
if (mw.toString().length() != 0) {
return failed("data is written with negative string length");
}
return passed();
}
public Result testWriteStringNegativeStart() throws IOException {
return super.testWriteStringNegativeStart();
}
public Result testWriteStringSlice() throws IOException {
return super.testWriteStringSlice();
}
public Result testNewLine() throws IOException {
BufferedWriter bw = (BufferedWriter) getTestedWriter();
MultiThreadRunner.waitAtBarrier();
bw.write("ab");
bw.newLine();
bw.write("cd");
bw.close();
String s = getWriterAsString(bw);
if (s.equals("ab" + System.getProperty("line.separator") + "cd")) {
return passed();
}
return failed("expected another value, got:" + getWriterAsString(bw)
+ ":");
}
public Result testBufferedOutput0001() throws IOException {
try {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw, 10);
MultiThreadRunner.waitAtBarrier();
bw.write('a');
if (mw.toString().length() != 0) {
return failed("data is not buffered");
}
bw.flush();
if (mw.toString().length() != 1) {
return failed("data is not flushed");
}
} catch (Throwable e) {
e.printStackTrace();
}
return passed();
}
public Result testBufferedOutput0002() throws IOException {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw, 10);
MultiThreadRunner.waitAtBarrier();
bw.write(new char[] { 'a', 'b' });
if (mw.toString().length() != 0) {
return failed("data is not buffered");
}
bw.flush();
if (mw.toString().length() != 2) {
return failed("data is not flushed");
}
return passed();
}
public Result testBufferedOutput0003() throws IOException {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw, 10);
MultiThreadRunner.waitAtBarrier();
bw.write(new char[] { 'a', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b',
'b', 'b', 'b', 'b', 'b' });
if (mw.toString().length() != 15) {
return failed("data is not written directly");
}
return passed();
}
public Result testBufferedOutput0004() throws IOException {
MockWriter mw = new MockWriter();
BufferedWriter bw = new BufferedWriter(mw, 10);
MultiThreadRunner.waitAtBarrier();
bw.write("ba");
if (mw.toString().length() != 0) {
return failed("data is not buffered");
}
bw.flush();
if (mw.toString().length() != 2) {
return failed("data is not flushed");
}
return passed();
}
public Result testExceptionAfterClose() throws IOException {
BufferedWriter bw = (BufferedWriter) getTestedWriter();
bw.close();
try {
bw.flush();
return failed("expected flush() to throw exception after close()");
} catch (IOException e) {
}
try {
bw.newLine();
return failed("expected newLine() to throw exception after close()");
} catch (IOException e) {
}
try {
bw.write(new char[] { 'a', 'b' });
return failed("expected write(char[]) to throw exception after close()");
} catch (IOException e) {
}
try {
bw.write('a');
return failed("expected write(int) to throw exception after close()");
} catch (IOException e) {
}
try {
bw.write("a");
return failed("expected write(String) to throw exception after close()");
} catch (IOException e) {
}
return passed();
}
}
| |
/*
* This file is part of the DITA Open Toolkit project.
* See the accompanying license.txt file for applicable licenses.
*/
/*
* (c) Copyright IBM Corp. 2005, 2006 All Rights Reserved.
*/
package org.dita.dost.index;
import static org.dita.dost.util.Constants.*;
import java.util.*;
import org.dita.dost.util.DITAOTCollator;
/**
* This class represents an indexterm.
*
* @version 1.0 2005-04-30
*
* @author Wu, Zhi Qiang
*/
public final class IndexTerm implements Comparable<IndexTerm> {
/** The locale of the indexterm, used for sorting. */
private static Locale termLocale = null;
/** The name of the indexterm. */
private String termName = null;
/** The target list of the indexterm. */
private List<IndexTermTarget> targetList = null;
/** The sorting termKey of the indexterm, default will be the term name. */
private String termKey = null;
/** The start attribute. */
private String start=null;
/** The end attribute. */
private String end=null;
/** The sub indexterms contained by this indexterm. */
private List<IndexTerm> subTerms = null;
/** The prefix added to the term name (such as IndexTerm_Prefix_See or IndexTerm_Prefix_See_Also). */
private String termPrefix = null;
/** The list of rtl locale.*/
private static final ArrayList<String> rtlLocaleList;
/**
* The boolean to show whether current term is leaf term
* leaf means the current indexterm element doesn't contains any subterms
* or only has "index-see" or "index-see-also" subterms.
*/
private boolean leaf = true;
//initialization for rtlLocaleList
static{
rtlLocaleList = new ArrayList<String>(2);
rtlLocaleList.add("ar_EG");
rtlLocaleList.add("he_IL");
}
/**
* Constructor.
*/
public IndexTerm() {
subTerms = new ArrayList<IndexTerm>(1);
targetList = new ArrayList<IndexTermTarget>(1);
}
/**
* Get the global locale of indexterm.
*
* @return Locale language
*/
public static Locale getTermLocale() {
return termLocale;
}
/**
* Set the global locale of indexterm.
*
* @param locale locale
*/
public static void setTermLocale(final Locale locale) {
termLocale = locale;
}
/**
* Get the index term name.
*
* @return term name
*/
public String getTermName() {
return termName;
}
/**
* Set the index term name.
*
* @param name name to set
*/
public void setTermName(final String name) {
termName = name;
}
/**
* Get the key used for sorting this term.
* @return Returns the termKey.
*/
public String getTermKey() {
return termKey;
}
/**
* Set the key used for sorting this term.
* @param key The termKey to set.
*/
public void setTermKey(final String key) {
termKey = key;
}
/**
* Get the sub term list.
*
* @return sub term list
*/
public List<IndexTerm> getSubTerms() {
return subTerms;
}
/**
* Get the start attribute.
* @return start attribute
*/
public String getStartAttribute(){
return start;
}
/**
* Get the end attribute.
* @return end attribute
*/
public String getEndAttribute(){
return end;
}
/**
* Set the start attribute.
* @param start attribute
*/
public void setStartAttribute(final String start){
this.start=start;
}
/**
* Set the end attribute.
* @param end attribute
*/
public void setEndAttribute(final String end){
this.end=end;
}
/**
* Add a sub term into the sub term list.
*
* @param term index term to be added
*/
public void addSubTerm(final IndexTerm term) {
int i = 0;
final int subTermNum = subTerms.size();
if (!IndexTerm_Prefix_See.equals(term.getTermPrefix()) &&
!IndexTerm_Prefix_See_Also.equals(term.getTermPrefix())){
//if the term is not "index-see" or "index-see-also"
leaf = false;
}
for (; i < subTermNum; i++) {
final IndexTerm subTerm = subTerms.get(i);
if (subTerm.equals(term)) {
return;
}
// Add targets when same term name and same term key
if (subTerm.getTermFullName().equals(term.getTermFullName())
&& subTerm.getTermKey().equals(term.getTermKey())) {
subTerm.addTargets(term.getTargetList());
subTerm.addSubTerms(term.getSubTerms());
return;
}
}
if (i == subTermNum) {
subTerms.add(term);
}
}
/**
* Add all the sub terms in the list.
*
* @param terms terms list
*/
public void addSubTerms(final List<IndexTerm> terms) {
int subTermsNum = 0;
if (terms == null) {
return;
}
subTermsNum = terms.size();
for (int i = 0; i < subTermsNum; i++) {
addSubTerm(terms.get(i));
}
}
/**
* IndexTerm will be equal if they have same name, target and subterms.
*
* @param o object to compare with.
* @return boolean
*/
@Override
public boolean equals(final Object o) {
if (!(o instanceof IndexTerm)) {
return false;
} else if (o == this) {
return true;
}
final IndexTerm it = (IndexTerm) o;
boolean eqTermName;
boolean eqTermKey;
boolean eqTargetList;
boolean eqSubTerms;
boolean eqTermPrefix;
eqTermName = Objects.equals(termName, it.getTermName()) || termName != null && termName.equals(it.getTermName());
eqTermPrefix = Objects.equals(termPrefix, it.getTermPrefix()) || termPrefix != null && termPrefix.equals(it.getTermPrefix());
eqTermKey = Objects.equals(termKey, it.getTermKey()) || termKey != null && termKey.equals(it.getTermKey());
eqTargetList = targetList == it.getTargetList() || targetList != null && targetList.equals(it.getTargetList());
eqSubTerms = subTerms == it.getSubTerms() || subTerms != null && subTerms.equals(it.getSubTerms());
return eqTermName && eqTermKey && eqTargetList && eqSubTerms && eqTermPrefix;
}
/**
* Generate hash code for IndexTerm.
* @return hashcode
*/
@Override
public int hashCode() {
int result = 17;
result = 37 * result + termName.hashCode();
result = 37 * result + termKey.hashCode();
result = 37 * result + targetList.hashCode();
result = 37 * result + subTerms.hashCode();
return result;
}
/**
* Sort all the subterms iteratively.
*/
public void sortSubTerms() {
final int subTermNum = subTerms.size();
if (subTerms != null && subTermNum > 0) {
Collections.sort(subTerms);
for (final IndexTerm subTerm : subTerms) {
subTerm.sortSubTerms();
}
}
}
/**
* Compare the given indexterm with current term.
*
* @param obj object to compare with
* @return int
*/
@Override
public int compareTo(final IndexTerm obj) {
return DITAOTCollator.getInstance(termLocale).compare(termKey, obj.getTermKey());
}
/**
* Get the target list of current indexterm.
*
* @return Returns the targetList.
*/
public List<IndexTermTarget> getTargetList() {
return targetList;
}
/**
* Add a new indexterm target.
*
* @param target indexterm target
*/
public void addTarget(final IndexTermTarget target) {
if (!targetList.contains(target)) {
targetList.add(target);
}
}
/**
* Add all the indexterm targets in the list.
*
* @param targets list of targets
*/
public void addTargets(final List<IndexTermTarget> targets) {
int targetNum = 0;
if (targets == null) {
return;
}
targetNum = targets.size();
for (int i = 0; i < targetNum; i++) {
addTarget(targets.get(i));
}
}
/**
* See if this indexterm has sub terms.
*
* @return true if has subterms, false or else.
*/
public boolean hasSubTerms() {
return subTerms != null && subTerms.size() > 0;
}
/**
* @see java.lang.Object#toString()
* @return string
*/
@Override
public String toString() {
return "{Term name: " + termName + ", Term key: " + termKey + ", Target list: " + targetList.toString() + ", Sub-terms: " + subTerms.toString() + "}";
}
/**
* Get the term prefix (such as IndexTerm_Prefix_See_Also).
* @return term prefix
*/
public String getTermPrefix() {
return termPrefix;
}
/**
* Set the term prefix (such as IndexTerm_Prefix_See_Also).
* @param termPrefix term prefix to set
*/
public void setTermPrefix(final String termPrefix) {
this.termPrefix = termPrefix;
}
/**
* Get the full term, with any prefix.
* @return full term with prefix
*/
public String getTermFullName(){
if (termPrefix == null){
return termName;
}else{
if (termLocale == null){
return termPrefix + STRING_BLANK + termName;
}else if (rtlLocaleList.contains(termLocale.toString())){
return termName + STRING_BLANK
+ Messages.getString("IndexTerm." + termPrefix.toLowerCase().trim().replace(' ', '-'),
termLocale);
}else {
return Messages.getString("IndexTerm." + termPrefix.toLowerCase().trim().replace(' ', '-'),
termLocale)
+ STRING_BLANK + termName;
}
}
}
/**
* Update the sub-term prefix from "See also" to "See" if there is only one sub-term.
*/
public void updateSubTerm(){
if (subTerms.size()==1){
// if there is only one subterm, it is necessary to update
final IndexTerm term = subTerms.get(0); // get the only subterm
if (term.getTermPrefix()!= null &&
IndexTerm_Prefix_See.equalsIgnoreCase(term.getTermPrefix().trim())){
//if the only subterm is index-see update it to index-see-also
term.setTermPrefix(IndexTerm_Prefix_See_Also);
}
}
}
/**
* check whether this term is leaf term
* leaf means the current indexterm element doesn't contains any subterms
* or only has "index-see" or "index-see-also" subterms.
* @return boolean
*/
public boolean isLeaf() {
return leaf;
}
}
| |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.autofill;
import android.test.suitebuilder.annotation.MediumTest;
import android.test.suitebuilder.annotation.SmallTest;
import android.text.TextUtils;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.UrlUtils;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.test.ChromeActivityTestCaseBase;
import org.chromium.content.browser.ContentViewCore;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.content.browser.test.util.DOMUtils;
import org.chromium.content_public.browser.WebContents;
import java.util.concurrent.TimeoutException;
/**
* Integration tests for the AutofillPopup.
*/
@CommandLineFlags.Add("reduce-security-for-testing")
public class AutofillDialogControllerTest extends ChromeActivityTestCaseBase<ChromeActivity> {
private static final long DIALOG_CALLBACK_DELAY_MILLISECONDS = 50;
private static final String TEST_NAME = "Joe Doe";
private static final String TEST_PHONE = "(415)413-0703";
private static final String TEST_PHONE_UNFORMATTED = "4154130703";
private static final String TEST_EMAIL = "email@server.com";
private static final String TEST_CC_NUMBER = "4111111111111111";
private static final String TEST_CC_CSC = "123";
private static final int TEST_CC_EXP_MONTH = 11;
private static final int TEST_CC_EXP_YEAR = 2015;
private static final String TEST_BILLING1 = "123 Main street";
private static final String TEST_BILLING2 = "apt 456";
private static final String TEST_BILLING3 = "leave at the office";
private static final String TEST_BILLING_STREET =
TEST_BILLING1 + "\n" + TEST_BILLING2 + "\n" + TEST_BILLING3;
private static final String TEST_BILLING_CITY = "Schenectady";
private static final String TEST_BILLING_DL = ""; // dependent locality
private static final String TEST_BILLING_STATE = "NY";
private static final String TEST_BILLING_ZIP = "12345";
private static final String TEST_BILLING_SORTING_CODE = ""; // sorting code
private static final String TEST_BILLING_COUNTRY = "US";
private static final String TEST_BILLING_LANGUAGE = ""; // language
private static final String TEST_SHIPPING_NAME = "Mister Receiver";
private static final String TEST_SHIPPING_PHONE = "+46 8 713 99 99";
private static final String TEST_SHIPPING_PHONE_UNFORMATTED = "+4687139999";
private static final String TEST_SHIPPING1 = "19 Farstaplan";
private static final String TEST_SHIPPING2 = "Third floor";
private static final String TEST_SHIPPING3 = "please call first";
private static final String TEST_SHIPPING_STREET =
TEST_SHIPPING1 + "\n" + TEST_SHIPPING2 + "\n" + TEST_SHIPPING3;
private static final String TEST_SHIPPING_CITY = "Farsta";
private static final String TEST_SHIPPING_DL = ""; // dependent locality
private static final String TEST_SHIPPING_STATE = "Stockholm";
private static final String TEST_SHIPPING_ZIP = "12346";
private static final String TEST_SHIPPING_SORTING_CODE = ""; // sorting code
private static final String TEST_SHIPPING_COUNTRY = "SE";
private static final String TEST_SHIPPING_LANGUAGE = ""; // language
private static final String HTML_PRELUDE = "<html>"
+ "<head>"
+ " <meta name=\"viewport\""
+ " content=\"width=device-width, initial-scale=1.0, maximum-scale=1.0\" />"
+ "</head>"
+ "<body>"
+ "<form id=\"id-form\">"
+ " <button id=\"id-button\">DO INTERACTIVE AUTOCOMPLETE</button>";
private static final String HTML_POSTLUDE = "</form>"
+ "<div id=\"was-autocompleted\">no</div>"
+ ":<div id=\"autocomplete-failure-reason\"></div>"
+ "<script>"
+ "var form = document.forms[0];"
+ "form.onsubmit = function(e) {"
+ " e.preventDefault();"
+ " form.requestAutocomplete();"
+ "};"
+ "form.onautocomplete = function() {"
+ " document.getElementById('was-autocompleted').textContent = 'succeeded';"
+ "};"
+ "form.onautocompleteerror = function(e) {"
+ " document.getElementById('was-autocompleted').textContent = 'failed';"
+ " document.getElementById('autocomplete-failure-reason').textContent = e.reason;"
+ "};"
+ "</script></body></html>";
private static String generatePage(
boolean requestFullBilling, boolean requestShipping, boolean requestPhoneNumbers) {
StringBuilder sb = new StringBuilder();
sb.append(HTML_PRELUDE);
sb.append("<fieldset>"
+ "<input id=\"id-billing-name\" autocomplete=\"billing name\" value=\"W\">"
+ "<input id=\"id-cc-name\" autocomplete=\"cc-name\" value=\"W\">"
+ "<input id=\"id-email\" autocomplete=\"email\" type=\"email\" value=\"W@W.W\">"
+ "<input id=\"id-cc-number\" autocomplete=\"cc-number\" value=\"W\">"
+ "<input id=\"id-cc-exp\" "
+ " autocomplete=\"cc-exp\" type=\"month\" value=\"1111-11\">"
+ "<select id=\"id-cc-exp-month\" autocomplete=\"cc-exp-month\">"
+ " <option value=\"1\" selected>1</option>"
+ " <option value=\"2\">2</option>"
+ " <option value=\"3\">3</option>"
+ " <option value=\"4\">4</option>"
+ " <option value=\"5\">5</option>"
+ " <option value=\"6\">6</option>"
+ " <option value=\"7\">7</option>"
+ " <option value=\"8\">8</option>"
+ " <option value=\"9\">9</option>"
+ " <option value=\"10\">10</option>"
+ " <option value=\"11\">11</option>"
+ " <option value=\"12\">12</option>"
+ "</select>"
+ "<select id=\"id-cc-exp-year\" autocomplete=\"cc-exp-year\">"
+ " <option value=\"2011\" selected>2011</option>"
+ " <option value=\"2012\">2012</option>"
+ " <option value=\"2013\">2013</option>"
+ " <option value=\"2014\">2014</option>"
+ " <option value=\"2015\">2015</option>"
+ "</select>"
+ "<input id=\"id-cc-csc\" autocomplete=\"cc-csc\" value=\"W\">"
+ "<input id=\"id-cc-zip\" autocomplete=\"billing postal-code\" value=\"W\">");
if (requestFullBilling) {
sb.append("<input id=\"id-cc-1\" autocomplete=\"billing address-line1\" value=\"W\">"
+ "<input id=\"id-cc-2\" autocomplete=\"billing address-line2\" value=\"W\">"
+ "<textarea id=\"id-cc-str\""
+ " autocomplete=\"billing street-address\">W</textarea>"
+ "<input id=\"id-cc-city\" autocomplete=\"billing locality\" value=\"W\">"
+ "<input id=\"id-cc-state\" autocomplete=\"billing region\" value=\"W\">"
+ "<select id=\"id-cc-country\" autocomplete=\"billing country\">"
+ " <option value=\"NL\" selected>Netherlands</option>"
+ " <option value=\"US\">United States</option>"
+ " <option value=\"SE\">Sweden</option>"
+ " <option value=\"RU\">Russia</option>"
+ "</select>");
}
sb.append("</fieldset>");
if (requestShipping) {
sb.append("<fieldset>"
+ "<input id=\"id-name\" autocomplete=\"name\" value=\"W\">"
+ "<input id=\"id-h-name\" autocomplete=\"shipping name\" value=\"W\">"
+ "<input id=\"id-h-1\" autocomplete=\"shipping address-line1\" value=\"W\">"
+ "<input id=\"id-h-2\" autocomplete=\"shipping address-line2\" value=\"W\">"
+ "<textarea id=\"id-h-str\""
+ " autocomplete=\"shipping street-address\">W</textarea>"
+ "<input id=\"id-h-city\" autocomplete=\"shipping locality\" value=\"W\">"
+ "<input id=\"id-h-state\" autocomplete=\"shipping region\" value=\"W\">"
+ "<input id=\"id-h-zip\" autocomplete=\"shipping postal-code\" value=\"W\">"
+ "<select id=\"id-h-country\" autocomplete=\"shipping country\">"
+ " <option value=\"NL\" selected>Netherlands</option>"
+ " <option value=\"US\">United States</option>"
+ " <option value=\"SE\">Sweden</option>"
+ " <option value=\"RU\">Russia</option>"
+ "</select>"
+ "</fieldset>");
}
if (requestPhoneNumbers) {
sb.append("<fieldset>"
+ "<input id=\"id-cc-tel\" autocomplete=\"billing tel\" value=\"W\">");
if (requestShipping) {
sb.append("<input id=\"id-h-tel\" autocomplete=\"shipping tel\" value=\"W\">"
+ "<input id=\"id-tel\" autocomplete=\"tel\" value=\"W\">");
}
sb.append("</fieldset>");
}
sb.append(HTML_POSTLUDE);
return UrlUtils.encodeHtmlDataUri(sb.toString());
}
public AutofillDialogControllerTest() {
super(ChromeActivity.class);
}
@Override
public void startMainActivity() throws InterruptedException {
// Don't launch activity automatically.
}
@MediumTest
@Feature({"autofill"})
public void testFieldsAreFilledMinimal() throws InterruptedException, TimeoutException {
final boolean requestFullBilling = false;
final boolean requestShipping = false;
final boolean requestPhoneNumbers = false;
verifyFieldsAreFilled(requestFullBilling, requestShipping, requestPhoneNumbers);
}
@MediumTest
@Feature({"autofill"})
public void testFieldsAreFilledFullBilling() throws InterruptedException, TimeoutException {
final boolean requestFullBilling = true;
final boolean requestShipping = false;
final boolean requestPhoneNumbers = false;
verifyFieldsAreFilled(requestFullBilling, requestShipping, requestPhoneNumbers);
}
@MediumTest
@Feature({"autofill"})
public void testFieldsAreFilledShipping() throws InterruptedException, TimeoutException {
final boolean requestFullBilling = true;
final boolean requestShipping = true;
final boolean requestPhoneNumbers = false;
verifyFieldsAreFilled(requestFullBilling, requestShipping, requestPhoneNumbers);
}
@MediumTest
@Feature({"autofill"})
public void testFieldsAreFilledBillingPhone() throws InterruptedException, TimeoutException {
final boolean requestFullBilling = true;
final boolean requestShipping = false;
final boolean requestPhoneNumbers = true;
verifyFieldsAreFilled(requestFullBilling, requestShipping, requestPhoneNumbers);
}
@MediumTest
@Feature({"autofill"})
public void testFieldsAreFilledEverything() throws InterruptedException, TimeoutException {
final boolean requestFullBilling = true;
final boolean requestShipping = true;
final boolean requestPhoneNumbers = true;
verifyFieldsAreFilled(requestFullBilling, requestShipping, requestPhoneNumbers);
}
// It is currently unspecified whether autocomplete="name" gives a SHIPPING or a BILLING name.
// I'm assuming here that this is a shipping name.
@SmallTest
@Feature({"autofill"})
public void testRacTypeName() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"name\">",
TEST_SHIPPING_NAME, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingName() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing name\">",
TEST_NAME, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingName() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping name\">",
TEST_SHIPPING_NAME, "id", false, true, false);
}
// It is currently unspecified whether autocomplete="name" gives a SHIPPING or a BILLING phone.
// I'm assuming here that this is a shipping phone.
@SmallTest
@Feature({"autofill"})
public void testRacTypeTel() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"tel\">",
TEST_SHIPPING_PHONE_UNFORMATTED, "id", false, true, true);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingTel() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing tel\">",
TEST_PHONE_UNFORMATTED, "id", true, false, true);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingTel() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping tel\">",
TEST_SHIPPING_PHONE_UNFORMATTED, "id", false, true, true);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeEmail() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"email\" type=\"email\">",
TEST_EMAIL, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeCCName() throws InterruptedException, TimeoutException {
verifyOneField(
"<input id=\"id\" autocomplete=\"cc-name\">",
TEST_NAME, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeCCNumber() throws InterruptedException, TimeoutException {
verifyOneField(
"<input id=\"id\" autocomplete=\"cc-number\">",
TEST_CC_NUMBER, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeCCCsc() throws InterruptedException, TimeoutException {
verifyOneField(
"<input id=\"id\" autocomplete=\"cc-csc\">",
TEST_CC_CSC, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeCCExp() throws InterruptedException, TimeoutException {
verifyOneField(
"<input id=\"id\" autocomplete=\"cc-exp\" type=\"month\" value=\"1111-11\">",
"" + TEST_CC_EXP_YEAR + "-" + TEST_CC_EXP_MONTH,
"id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeCCExpMonth() throws InterruptedException, TimeoutException {
verifyOneField(
"<select id=\"id\" autocomplete=\"cc-exp-month\">"
+ " <option value=\"1\" selected>1</option>"
+ " <option value=\"2\">2</option>"
+ " <option value=\"3\">3</option>"
+ " <option value=\"4\">4</option>"
+ " <option value=\"5\">5</option>"
+ " <option value=\"6\">6</option>"
+ " <option value=\"7\">7</option>"
+ " <option value=\"8\">8</option>"
+ " <option value=\"9\">9</option>"
+ " <option value=\"10\">10</option>"
+ " <option value=\"11\">11</option>"
+ " <option value=\"12\">12</option>"
+ "</select>",
"" + TEST_CC_EXP_MONTH, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeCCExpYear() throws InterruptedException, TimeoutException {
verifyOneField(
"<select id=\"id\" autocomplete=\"cc-exp-year\">"
+ " <option value=\"2011\" selected>2011</option>"
+ " <option value=\"2012\">2012</option>"
+ " <option value=\"2013\">2013</option>"
+ " <option value=\"2014\">2014</option>"
+ " <option value=\"2015\">2015</option>"
+ "</select>",
"" + TEST_CC_EXP_YEAR, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingCountry() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<select id=\"id\" autocomplete=\"billing country\">"
+ " <option value=\"NL\" selected>Netherlands</option>"
+ " <option value=\"US\">United States</option>"
+ " <option value=\"SE\">Sweden</option>"
+ " <option value=\"RU\">Russia</option>"
+ "</select>",
TEST_BILLING_COUNTRY, "id", true, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingPostalCode() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing postal-code\">",
TEST_BILLING_ZIP, "id", false, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingAddressLine1() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing address-line1\">",
TEST_BILLING1, "id", true, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingAddressLine2() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing address-line2\">",
TEST_BILLING2, "id", true, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingStreetAddress() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<textarea id=\"id\" autocomplete=\"billing street-address\"></textarea>",
TEST_BILLING_STREET, "id", true, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingLocality() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing locality\">",
TEST_BILLING_CITY, "id", true, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeBillingRegion() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"billing region\">",
TEST_BILLING_STATE, "id", true, false, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingCountry() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<select id=\"id\" autocomplete=\"shipping country\">"
+ " <option value=\"NL\" selected>Netherlands</option>"
+ " <option value=\"US\">United States</option>"
+ " <option value=\"SE\">Sweden</option>"
+ " <option value=\"RU\">Russia</option>"
+ "</select>",
TEST_SHIPPING_COUNTRY, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingPostalCode() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping postal-code\">",
TEST_SHIPPING_ZIP, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingAddressLine1() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping address-line1\">",
TEST_SHIPPING1, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingAddressLine2() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping address-line2\">",
TEST_SHIPPING2, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingStreetAddress() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<textarea id=\"id\" autocomplete=\"shipping street-address\"></textarea>",
TEST_SHIPPING_STREET, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingLocality() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping locality\">",
TEST_SHIPPING_CITY, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRacTypeShippingRegion() throws InterruptedException, TimeoutException {
verifyOneFieldWithCc(
"<input id=\"id\" autocomplete=\"shipping region\">",
TEST_SHIPPING_STATE, "id", false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRefuseToShowWithNoCcField() throws InterruptedException, TimeoutException {
String requested = "<input id=\"id\" autocomplete=\"shipping locality\">";
setUpAndExpectFailedRequestAutocomplete(
UrlUtils.encodeHtmlDataUri(HTML_PRELUDE + requested + HTML_POSTLUDE),
false, true, false);
}
@SmallTest
@Feature({"autofill"})
public void testRefuseToShowWithNoAutocompleteAttributes()
throws InterruptedException, TimeoutException {
String requested = "<input id=\"id-cc-csc\">"
+ "<input id=\"id-email\" type=\"email\">"
+ "<input id=\"id-cc-name\">"
+ "<input id=\"id-shipping-locality\">";
setUpAndExpectFailedRequestAutocomplete(
UrlUtils.encodeHtmlDataUri(HTML_PRELUDE + requested + HTML_POSTLUDE),
false, true, false);
}
private void verifyOneField(
final String htmlFragment,
final String expected, final String actualId,
final boolean requestFullBilling,
final boolean requestShipping, final boolean requestPhoneNumbers)
throws InterruptedException, TimeoutException {
verifyOneFieldWithOptionalCc(htmlFragment, expected, actualId,
requestFullBilling, requestShipping, requestPhoneNumbers, false);
}
private void verifyOneFieldWithCc(
final String htmlFragment,
final String expected, final String actualId,
final boolean requestFullBilling,
final boolean requestShipping, final boolean requestPhoneNumbers)
throws InterruptedException, TimeoutException {
verifyOneFieldWithOptionalCc(htmlFragment, expected, actualId,
requestFullBilling, requestShipping, requestPhoneNumbers, true);
}
private void verifyOneFieldWithOptionalCc(
final String htmlFragment,
final String expected, final String actualId,
final boolean requestFullBilling,
final boolean requestShipping, final boolean requestPhoneNumbers,
final boolean requestCcInfo)
throws InterruptedException, TimeoutException {
final String optionalCcFragment = requestCcInfo
? "<input id=\"id-opt-cc-csc\" autocomplete=\"cc-csc\">"
: "";
final String url = UrlUtils.encodeHtmlDataUri(
HTML_PRELUDE
+ htmlFragment
+ optionalCcFragment
+ HTML_POSTLUDE);
setUpAndRequestAutocomplete(url, requestFullBilling, requestShipping, requestPhoneNumbers);
final WebContents webContents = getActivity().getCurrentContentViewCore().getWebContents();
assertEquals(actualId + " did not match",
expected, DOMUtils.getNodeValue(webContents, actualId));
if (requestCcInfo) {
assertEquals("cc-csc did not match",
TEST_CC_CSC, DOMUtils.getNodeValue(webContents, "id-opt-cc-csc"));
}
}
private void verifyFieldsAreFilled(final boolean requestFullBilling,
final boolean requestShipping, final boolean requestPhoneNumbers)
throws InterruptedException, TimeoutException {
setUpAndRequestAutocomplete(
generatePage(requestFullBilling, requestShipping, requestPhoneNumbers),
requestFullBilling, requestShipping, requestPhoneNumbers);
final WebContents webContents = getActivity().getCurrentContentViewCore().getWebContents();
assertEquals("billing name did not match",
TEST_NAME, DOMUtils.getNodeValue(webContents, "id-billing-name"));
assertEquals("email did not match",
TEST_EMAIL, DOMUtils.getNodeValue(webContents, "id-email"));
assertEquals("cc-name did not match",
TEST_NAME, DOMUtils.getNodeValue(webContents, "id-cc-name"));
assertEquals("cc-number did not match",
TEST_CC_NUMBER, DOMUtils.getNodeValue(webContents, "id-cc-number"));
assertEquals("cc-csc did not match",
TEST_CC_CSC, DOMUtils.getNodeValue(webContents, "id-cc-csc"));
assertEquals("cc-exp did not match",
"" + TEST_CC_EXP_YEAR + "-" + TEST_CC_EXP_MONTH,
DOMUtils.getNodeValue(webContents, "id-cc-exp"));
assertEquals("cc-exp-month did not match",
"" + TEST_CC_EXP_MONTH,
DOMUtils.getNodeValue(webContents, "id-cc-exp-month"));
assertEquals("cc-exp-year did not match",
"" + TEST_CC_EXP_YEAR,
DOMUtils.getNodeValue(webContents, "id-cc-exp-year"));
assertEquals("billing postal-code did not match",
TEST_BILLING_ZIP, DOMUtils.getNodeValue(webContents, "id-cc-zip"));
if (requestFullBilling) {
assertEquals("billing address-line1 did not match",
TEST_BILLING1, DOMUtils.getNodeValue(webContents, "id-cc-1"));
assertEquals("billing address-line2 did not match",
TEST_BILLING2, DOMUtils.getNodeValue(webContents, "id-cc-2"));
assertEquals("billing street-address did not match",
TEST_BILLING_STREET, DOMUtils.getNodeValue(webContents, "id-cc-str"));
assertEquals("billing locality did not match",
TEST_BILLING_CITY, DOMUtils.getNodeValue(webContents, "id-cc-city"));
assertEquals("billing region did not match",
TEST_BILLING_STATE, DOMUtils.getNodeValue(webContents, "id-cc-state"));
assertEquals("billing country did not match",
TEST_BILLING_COUNTRY, DOMUtils.getNodeValue(webContents, "id-cc-country"));
if (requestPhoneNumbers) {
assertEquals("billing tel did not match",
TEST_PHONE_UNFORMATTED,
DOMUtils.getNodeValue(webContents, "id-cc-tel"));
}
}
if (requestShipping) {
assertEquals("shipping name did not match",
TEST_SHIPPING_NAME, DOMUtils.getNodeValue(webContents, "id-h-name"));
assertEquals("shipping postal-code did not match",
TEST_SHIPPING_ZIP, DOMUtils.getNodeValue(webContents, "id-h-zip"));
assertEquals("shipping address-line1 did not match",
TEST_SHIPPING1, DOMUtils.getNodeValue(webContents, "id-h-1"));
assertEquals("shipping address-line2 did not match",
TEST_SHIPPING2, DOMUtils.getNodeValue(webContents, "id-h-2"));
assertEquals("shipping street-address did not match",
TEST_SHIPPING_STREET, DOMUtils.getNodeValue(webContents, "id-h-str"));
assertEquals("shipping locality did not match",
TEST_SHIPPING_CITY, DOMUtils.getNodeValue(webContents, "id-h-city"));
assertEquals("shipping region did not match",
TEST_SHIPPING_STATE, DOMUtils.getNodeValue(webContents, "id-h-state"));
assertEquals("shipping country did not match",
TEST_SHIPPING_COUNTRY,
DOMUtils.getNodeValue(webContents, "id-h-country"));
// It is currently unspecified whether autocomplete="name" gives a SHIPPING or
// a BILLING name. I'm assuming here that this is a shipping name.
assertEquals("name did not match",
TEST_SHIPPING_NAME, DOMUtils.getNodeValue(webContents, "id-name"));
if (requestPhoneNumbers) {
assertEquals("shipping tel did not match",
TEST_SHIPPING_PHONE_UNFORMATTED,
DOMUtils.getNodeValue(webContents, "id-h-tel"));
// It is currently unspecified whether autocomplete="name" gives a SHIPPING or
// a BILLING phone. I'm assuming here that this is a shipping phone.
assertEquals("tel did not match",
TEST_SHIPPING_PHONE_UNFORMATTED,
DOMUtils.getNodeValue(webContents, "id-tel"));
}
}
}
// Wait and assert helper methods -------------------------------------------------------------
private void setUpAndRequestAutocomplete(final String url,
final boolean requestFullBilling,
final boolean requestShipping,
final boolean requestPhoneNumbers) throws InterruptedException, TimeoutException {
setUpAndRequestAutocompleteImpl(url,
requestFullBilling, requestShipping, requestPhoneNumbers,
false);
}
private void setUpAndExpectFailedRequestAutocomplete(final String url,
final boolean requestFullBilling,
final boolean requestShipping,
final boolean requestPhoneNumbers) throws InterruptedException, TimeoutException {
setUpAndRequestAutocompleteImpl(url,
requestFullBilling, requestShipping, requestPhoneNumbers,
true);
}
private void setUpAndRequestAutocompleteImpl(final String url,
final boolean requestFullBilling,
final boolean requestShipping,
final boolean requestPhoneNumbers,
final boolean expectFailure)
throws InterruptedException, TimeoutException {
startMainActivityWithURL(url);
final ContentViewCore viewCore = getActivity().getCurrentContentViewCore();
final WebContents webContents = getActivity().getCurrentContentViewCore().getWebContents();
AutofillDialogResult.ResultWallet result = new AutofillDialogResult.ResultWallet(
TEST_EMAIL, "Google Transaction ID",
new AutofillDialogResult.ResultCard(
TEST_CC_EXP_MONTH, TEST_CC_EXP_YEAR,
TEST_CC_NUMBER, TEST_CC_CSC),
new AutofillDialogResult.ResultAddress(
TEST_NAME, TEST_PHONE,
TEST_BILLING_STREET,
TEST_BILLING_CITY, TEST_BILLING_DL, TEST_BILLING_STATE,
TEST_BILLING_ZIP, TEST_BILLING_SORTING_CODE, TEST_BILLING_COUNTRY,
TEST_BILLING_LANGUAGE),
new AutofillDialogResult.ResultAddress(
TEST_SHIPPING_NAME, TEST_SHIPPING_PHONE,
TEST_SHIPPING_STREET,
TEST_SHIPPING_CITY, TEST_SHIPPING_DL, TEST_SHIPPING_STATE,
TEST_SHIPPING_ZIP, TEST_SHIPPING_SORTING_CODE, TEST_SHIPPING_COUNTRY,
TEST_SHIPPING_LANGUAGE));
MockAutofillDialogController.installMockFactory(
DIALOG_CALLBACK_DELAY_MILLISECONDS,
result,
true, "", "", "", "",
requestFullBilling, requestShipping, requestPhoneNumbers);
DOMUtils.clickNode(this, viewCore, "id-button");
waitForInputFieldFill(webContents);
if (!expectFailure) {
assertEquals("requestAutocomplete failed",
"succeeded",
DOMUtils.getNodeContents(webContents, "was-autocompleted"));
} else {
assertEquals("requestAutocomplete succeeded when it should be failing",
"failed",
DOMUtils.getNodeContents(webContents, "was-autocompleted"));
}
}
private void waitForInputFieldFill(final WebContents webContents) throws InterruptedException {
CriteriaHelper.pollForCriteria(new Criteria("requestAutocomplete() never completed.") {
@Override
public boolean isSatisfied() {
String wasAutocompleted;
try {
wasAutocompleted = DOMUtils.getNodeContents(
webContents, "was-autocompleted");
} catch (InterruptedException e) {
return false;
} catch (TimeoutException e) {
return false;
}
return TextUtils.equals("succeeded", wasAutocompleted)
|| TextUtils.equals("failed", wasAutocompleted);
}
});
}
}
| |
package io.jooby.openapi;
import com.fasterxml.jackson.databind.JavaType;
import examples.Letter;
import examples.MvcApp;
import examples.MvcAppWithRoutes;
import examples.MvcInstanceApp;
import examples.NoAppClass;
import examples.RouteBodyArgs;
import examples.RouteFormArgs;
import examples.RouteImport;
import examples.RouteImportReferences;
import examples.RoutePathArgs;
import examples.RouteQueryArgs;
import examples.RouteIdioms;
import examples.RouteInline;
import examples.RoutePatternIdioms;
import examples.RouteReturnTypeApp;
import examples.ABean;
import examples.RouterProduceConsume;
import io.jooby.internal.openapi.RequestBodyExt;
import io.swagger.v3.oas.models.media.ArraySchema;
import io.swagger.v3.oas.models.media.BooleanSchema;
import io.swagger.v3.oas.models.media.IntegerSchema;
import io.swagger.v3.oas.models.media.NumberSchema;
import io.swagger.v3.oas.models.media.Schema;
import io.swagger.v3.oas.models.media.StringSchema;
import kt.KtAppWithMainKt;
import kt.KtCoroutineRouteIdioms;
import kt.KtMvcApp;
import kt.KtMvcAppWithRoutes;
import kt.KtMvcInstanceApp;
import kt.KtMvcObjectApp;
import kt.KtNoAppClassKt;
import kt.KtRouteIdioms;
import kt.KtRouteImport;
import kt.KtRouteRef;
import kt.KtRouteReturnType;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.Callable;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class OpenAPIGeneratorTest {
@OpenAPITest(value = RoutePatternIdioms.class)
public void routePatternIdioms(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /variable", route.toString());
})
.next(route -> {
assertEquals("DELETE /variable/{id}", route.toString());
})
.next(route -> {
assertEquals("POST /variable/foo", route.toString());
})
.next(route -> {
assertEquals("PUT /variable/variable/foo", route.toString());
})
.verify();
}
@OpenAPITest(value = RouteInline.class)
public void routeInline(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /inline", route.toString());
})
.verify();
}
@OpenAPITest(value = RouteIdioms.class)
public void routeIdioms(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /aaa/bbb", route.toString());
})
.next(route -> {
assertEquals("GET /aaa/ccc/ddd", route.toString());
})
.next(route -> {
assertEquals("GET /aaa/eee", route.toString());
})
.next(route -> {
assertEquals("GET /inline", route.toString());
})
.next(route -> {
assertEquals("GET /routeReference", route.toString());
})
.next(route -> {
assertEquals("GET /staticRouteReference", route.toString());
})
.next(route -> {
assertEquals("GET /externalReference", route.toString());
})
.next(route -> {
assertEquals("GET /externalStaticReference", route.toString());
})
.next(route -> {
assertEquals("GET /alonevar", route.toString());
})
.next(route -> {
assertEquals("GET /aloneinline", route.toString());
})
.next(route -> {
assertEquals("GET /lambdaRef", route.toString());
})
.verify();
}
@OpenAPITest(value = KtRouteIdioms.class)
public void ktRoute(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /implicitContext", route.toString());
})
.next(route -> {
assertEquals("GET /explicitContext", route.toString());
})
.next(route -> {
assertEquals("GET /api/people", route.toString());
})
.next(route -> {
assertEquals("GET /api/version", route.toString());
})
.verify();
}
@OpenAPITest(value = KtCoroutineRouteIdioms.class)
public void ktCoroutineRoute(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /version", route.toString());
})
.next(route -> {
assertEquals("PATCH /api/version", route.toString());
})
.next(route -> {
assertEquals("GET /api/people", route.toString());
})
.verify();
}
@OpenAPITest(value = RouteQueryArgs.class)
public void routeQueryArguments(RouteIterator iterator) {
iterator
.next((route, args) -> {
assertEquals("GET /", route.toString());
args
.next(it -> {
assertEquals(String.class.getName(), it.getJavaType());
assertEquals("str", it.getName());
assertNull(it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals(int.class.getName(), it.getJavaType());
assertEquals("i", it.getName());
assertNull(it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.List<java.lang.String>", it.getJavaType());
assertEquals("listStr", it.getName());
assertNull(it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.List<java.lang.Double>", it.getJavaType());
assertEquals("listType", it.getName());
assertNull(it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals(String.class.getName(), it.getJavaType());
assertEquals("defstr", it.getName());
assertEquals("200", it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals(int.class.getName(), it.getJavaType());
assertEquals("defint", it.getName());
assertEquals(87, it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals(int.class.getName(), it.getJavaType());
assertEquals("defint0", it.getName());
assertEquals(0, it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals(boolean.class.getName(), it.getJavaType());
assertEquals("defbool", it.getName());
assertEquals(true, it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.Optional<java.lang.String>", it.getJavaType());
assertEquals("optstr", it.getName());
assertEquals(null, it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.Optional<java.lang.Integer>", it.getJavaType());
assertEquals("optint", it.getName());
assertEquals(null, it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.Optional<java.lang.String>", it.getJavaType());
assertEquals("optstr2", it.getName());
assertEquals("optional", it.getDefaultValue());
assertFalse(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.lang.Integer", it.getJavaType());
assertEquals("toI", it.getName());
assertEquals(null, it.getDefaultValue());
assertNull(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals(Letter.class.getName(), it.getJavaType());
assertEquals("letter", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.Map<java.lang.String,java.lang.String>", it.getJavaType());
assertEquals("query", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertFalse(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("java.util.Map<java.lang.String,java.util.List<java.lang.String>>",
it.getJavaType());
assertEquals("queryList", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertFalse(it.isSingle());
assertEquals("query", it.getIn());
})
.next(it -> {
assertEquals("foo", it.getName());
assertEquals(null, it.getDefaultValue());
assertNull(it.getRequired());
assertTrue(it.isSingle());
assertEquals("query", it.getIn());
})
.verify();
})
.verify();
}
@OpenAPITest(value = RoutePathArgs.class)
public void routePathArguments(RouteIterator iterator) {
iterator
.next((route, args) -> {
assertEquals("GET /", route.toString());
args
.next(it -> {
assertEquals(String.class.getName(), it.getJavaType());
assertEquals("str", it.getName());
assertNull(it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(int.class.getName(), it.getJavaType());
assertEquals("i", it.getName());
assertNull(it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.List<java.lang.String>", it.getJavaType());
assertEquals("listStr", it.getName());
assertNull(it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.List<java.lang.Double>", it.getJavaType());
assertEquals("listType", it.getName());
assertNull(it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(String.class.getName(), it.getJavaType());
assertEquals("defstr", it.getName());
assertEquals("200", it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(int.class.getName(), it.getJavaType());
assertEquals("defint", it.getName());
assertEquals(87, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(int.class.getName(), it.getJavaType());
assertEquals("defint0", it.getName());
assertEquals(0, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(boolean.class.getName(), it.getJavaType());
assertEquals("defbool", it.getName());
assertEquals(true, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.Optional<java.lang.String>", it.getJavaType());
assertEquals("optstr", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.Optional<java.lang.Integer>", it.getJavaType());
assertEquals("optint", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.Optional<java.lang.String>", it.getJavaType());
assertEquals("optstr2", it.getName());
assertEquals("optional", it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.lang.Integer", it.getJavaType());
assertEquals("toI", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(Letter.class.getName(), it.getJavaType());
assertEquals("letter", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertTrue(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.Map<java.lang.String,java.lang.String>", it.getJavaType());
assertEquals("path", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertFalse(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals("java.util.Map<java.lang.String,java.util.List<java.lang.String>>",
it.getJavaType());
assertEquals("pathList", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertFalse(it.isSingle());
assertEquals("path", it.getIn());
})
.next(it -> {
assertEquals(ABean.class.getName(), it.getJavaType());
assertEquals("path", it.getName());
assertEquals(null, it.getDefaultValue());
assertTrue(it.getRequired());
assertFalse(it.isSingle());
assertEquals("path", it.getIn());
})
.verify();
})
.verify();
}
@OpenAPITest(value = RouteFormArgs.class)
public void routeFormArguments(RouteIterator iterator) {
iterator
.next((route, args) -> {
assertEquals("GET /", route.toString());
RequestBodyExt requestBody = route.getRequestBody();
assertNotNull(requestBody);
assertNotNull(requestBody.getContentType());
assertNotNull(requestBody.getContent());
assertNotNull(requestBody.getContent().get(requestBody.getContentType()));
Schema schema = requestBody.getContent().get(requestBody.getContentType()).getSchema();
assertNotNull(schema);
new AssertIterator<Map.Entry<String, Schema>>(schema.getProperties().entrySet())
.next(e -> {
assertEquals("str", e.getKey());
assertTrue(e.getValue() instanceof StringSchema);
})
.next(e -> {
assertEquals("i", e.getKey());
assertTrue(e.getValue() instanceof IntegerSchema);
})
.next(e -> {
assertEquals("listStr", e.getKey());
assertTrue(e.getValue() instanceof ArraySchema);
ArraySchema array = (ArraySchema) e.getValue();
assertTrue(array.getItems() instanceof StringSchema);
})
.next(e -> {
assertEquals("listType", e.getKey());
assertTrue(e.getValue() instanceof ArraySchema);
ArraySchema array = (ArraySchema) e.getValue();
assertTrue(array.getItems() instanceof NumberSchema);
})
.next(e -> {
assertEquals("defstr", e.getKey());
assertTrue(e.getValue() instanceof StringSchema);
assertEquals("200", e.getValue().getDefault());
})
.next(e -> {
assertEquals("defint", e.getKey());
assertTrue(e.getValue() instanceof IntegerSchema);
assertEquals(87, e.getValue().getDefault());
})
.next(e -> {
assertEquals("defint0", e.getKey());
assertTrue(e.getValue() instanceof IntegerSchema);
assertEquals(0, e.getValue().getDefault());
})
.next(e -> {
assertEquals("defbool", e.getKey());
assertTrue(e.getValue() instanceof BooleanSchema);
})
.next(e -> {
assertEquals("optstr", e.getKey());
assertTrue(e.getValue() instanceof StringSchema);
})
.next(e -> {
assertEquals("optint", e.getKey());
assertTrue(e.getValue() instanceof IntegerSchema);
})
.next(e -> {
assertEquals("optstr2", e.getKey());
assertTrue(e.getValue() instanceof StringSchema);
assertEquals("optional", e.getValue().getDefault());
})
.next(e -> {
assertEquals("toI", e.getKey());
assertTrue(e.getValue() instanceof IntegerSchema);
})
.next(e -> {
assertEquals("letter", e.getKey());
assertTrue(e.getValue() instanceof StringSchema, e.getValue().getClass().getName());
StringSchema ss = (StringSchema) e.getValue();
assertEquals(Arrays.asList("A", "B"), ss.getEnum());
})
.verify();
})
.verify();
}
@OpenAPITest(value = RouteBodyArgs.class)
public void routeBodyArg(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /str", route.toString());
assertEquals(String.class.getName(), route.getRequestBody().getJavaType());
assertEquals(Boolean.TRUE, route.getRequestBody().getRequired());
})
.next(route -> {
assertEquals("GET /int", route.toString());
assertEquals(int.class.getName(), route.getRequestBody().getJavaType());
})
.next(route -> {
assertEquals("GET /listOfStr", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getRequestBody().getJavaType());
})
.next(route -> {
assertEquals("GET /listOfDouble", route.toString());
assertEquals("java.util.List<java.lang.Double>", route.getRequestBody().getJavaType());
})
.next(route -> {
assertEquals("GET /defstr", route.toString());
assertEquals(String.class.getName(), route.getRequestBody().getJavaType());
assertEquals(Boolean.FALSE, route.getRequestBody().getRequired());
})
.next(route -> {
assertEquals("GET /opt-int", route.toString());
assertEquals("java.util.Optional<java.lang.Integer>",
route.getRequestBody().getJavaType());
assertEquals(Boolean.FALSE, route.getRequestBody().getRequired());
})
.next(route -> {
assertEquals("GET /body-bean", route.toString());
assertEquals(ABean.class.getName(), route.getRequestBody().getJavaType());
})
.next(route -> {
assertEquals("GET /body-bean2", route.toString());
assertEquals(ABean.class.getName(), route.getRequestBody().getJavaType());
})
.verify();
}
public static class Java {
private JavaType type;
public JavaType getType() {
return type;
}
public void setType(JavaType type) {
this.type = type;
}
@Override public String toString() {
return type.toString();
}
}
@OpenAPITest(value = RouteReturnTypeApp.class, ignoreArguments = true)
public void routeReturnType(RouteIterator iterator) {
// ObjectMapper mapper = new ObjectMapper();
// Java java = new Java();
// java.type = mapper.constructType(int[].class);
// String json = mapper.writeValueAsString(java);
// System.out.println(json);
// Java j = mapper.readValue(json, Java.class);
// System.out.println(j);
iterator
.next(route -> {
assertEquals("GET /literal/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /literal/2", route.toString());
assertEquals(Integer.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /literal/3", route.toString());
assertEquals(Object.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /literal/4", route.toString());
assertEquals(Boolean.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/1", route.toString());
assertEquals(RouteReturnTypeApp.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/2", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/3", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/4", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/5", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/6", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/1", route.toString());
assertEquals(Object.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/2", route.toString());
assertEquals(Integer.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/3", route.toString());
assertEquals(Object.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/4", route.toString());
assertEquals(Callable.class.getName() + "<" + Byte.class.getName() + ">",
route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/5", route.toString());
assertEquals(Callable.class.getName() + "<" + Character.class.getName() + ">",
route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/2", route.toString());
assertEquals(Integer.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/3", route.toString());
assertEquals("[Ljava.lang.String;", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/4", route.toString());
assertEquals("[F", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /complexType/1", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /complexType/2", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /complexType/3", route.toString());
assertEquals("java.util.List<java.util.List<java.lang.String>>",
route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /multipleTypes/1", route.toString());
assertEquals("java.util.ArrayList", route.getDefaultResponse().getJavaTypes().get(0));
assertEquals("java.util.LinkedList", route.getDefaultResponse().getJavaTypes().get(1));
})
.next(route -> {
assertEquals("GET /multipleTypes/2", route.toString());
assertEquals("examples.ABean", route.getDefaultResponse().getJavaTypes().get(0));
assertEquals("examples.BBean", route.getDefaultResponse().getJavaTypes().get(1));
})
.next(route -> {
assertEquals("GET /multipleTypes/3", route.toString());
assertEquals("examples.Bean", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/1", route.toString());
assertEquals("[Z", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/2", route.toString());
assertEquals("[Lexamples.RouteReturnTypeApp;", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/3", route.toString());
assertEquals("[I", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/4", route.toString());
assertEquals("[Ljava.lang.String;", route.getDefaultResponse().getJavaType());
})
.verify();
}
@OpenAPITest(value = RouteImport.class)
public void routeImport(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /main/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /main/submain/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /require/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /subroute/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.verify();
}
@OpenAPITest(value = RouteImportReferences.class)
public void routeImportReferences(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /require/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /prefix/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.verify();
}
@OpenAPITest(value = KtRouteImport.class)
public void ktRouteImport(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /main/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /main/submain/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /require/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /subroute/a/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.verify();
}
@OpenAPITest(value = KtRouteReturnType.class, ignoreArguments = true)
public void ktRouteReturnType(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /literal/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /literal/2", route.toString());
assertEquals(Integer.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /literal/3", route.toString());
assertEquals(void.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /literal/4", route.toString());
assertEquals(Boolean.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/1", route.toString());
assertEquals(KtRouteReturnType.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/2", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/3", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/4", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/5", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /call/6", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/1", route.toString());
assertEquals(Object.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/2", route.toString());
assertEquals(Object.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/3", route.toString());
assertEquals(Object.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /generic/4", route.toString());
// assertEquals(Callable.class.getName() + "<" + Byte.class.getName() + ">",
// route.getDefaultResponse().getJavaType());
assertEquals(Callable.class.getName(),
route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/1", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/2", route.toString());
assertEquals(Integer.class.getName(), route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/3", route.toString());
assertEquals("[Ljava.lang.String;", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /localvar/4", route.toString());
assertEquals("[F", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /complexType/1", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /complexType/2", route.toString());
assertEquals("java.util.List<java.lang.String>", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /complexType/3", route.toString());
assertEquals("java.util.List",
route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /multipleTypes/1", route.toString());
assertEquals("java.util.ArrayList", route.getDefaultResponse().getJavaTypes().get(0));
assertEquals("java.util.LinkedList", route.getDefaultResponse().getJavaTypes().get(1));
})
.next(route -> {
assertEquals("GET /multipleTypes/2", route.toString());
assertEquals("examples.ABean", route.getDefaultResponse().getJavaTypes().get(0));
assertEquals("examples.BBean", route.getDefaultResponse().getJavaTypes().get(1));
})
.next(route -> {
assertEquals("GET /multipleTypes/3", route.toString());
assertEquals("examples.Bean", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/1", route.toString());
assertEquals("[Z", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/2", route.toString());
assertEquals("java.util.List", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/3", route.toString());
assertEquals("[I", route.getDefaultResponse().getJavaType());
})
.next(route -> {
assertEquals("GET /array/4", route.toString());
assertEquals("[Ljava.lang.String;", route.getDefaultResponse().getJavaType());
})
.verify();
}
@OpenAPITest(value = RouterProduceConsume.class)
public void routeProduceConsume(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /", route.toString());
assertEquals(Arrays.asList("text/html", "text/plain", "some/type"), route.getProduces());
assertEquals(Arrays.asList("application/json", "application/javascript"),
route.getConsumes());
})
.next(route -> {
assertEquals("GET /json", route.toString());
assertEquals(Arrays.asList("application/json"), route.getProduces());
assertEquals(Arrays.asList("application/json"), route.getConsumes());
})
.next(route -> {
assertEquals("GET /api/people", route.toString());
assertEquals(Arrays.asList("text/yaml"), route.getProduces());
assertEquals(Arrays.asList("text/yaml"), route.getConsumes());
})
.verify();
}
private void assertController(RouteIterator iterator) {
iterator
.next((route, args) -> {
assertEquals("GET /api/foo", route.toString());
args
.next(arg -> {
assertEquals("q", arg.getName());
assertEquals("java.util.Optional<java.lang.String>", arg.getJavaType());
assertEquals("query", arg.getIn());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /api/bar", route.toString());
args
.next(arg -> {
assertEquals("q", arg.getName());
assertEquals("java.util.Optional<java.lang.String>", arg.getJavaType());
assertEquals("query", arg.getIn());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /api", route.toString());
args
.next(arg -> {
assertEquals("bool", arg.getName());
assertEquals("boolean", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("s", arg.getName());
assertEquals("short", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("i", arg.getName());
assertEquals("int", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("c", arg.getName());
assertEquals("char", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("l", arg.getName());
assertEquals("long", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("f", arg.getName());
assertEquals("float", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("d", arg.getName());
assertEquals("double", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("POST /api/post", route.toString());
args
.next(arg -> {
assertEquals("bool", arg.getName());
assertEquals("boolean", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("s", arg.getName());
assertEquals("short", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("i", arg.getName());
assertEquals("int", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("c", arg.getName());
assertEquals("char", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("l", arg.getName());
assertEquals("long", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("f", arg.getName());
assertEquals("float", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("d", arg.getName());
assertEquals("double", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /api/path", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /api/path-only", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /api/session", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /api/returnList", route.toString());
assertEquals("java.util.List<" + String.class.getName() + ">",
route.getDefaultResponse().toString());
assertTrue(route.getDeprecated());
})
.next(route -> {
assertEquals("POST /api/bean", route.toString());
assertEquals(ABean.class.getName(), route.getDefaultResponse().toString());
assertEquals(ABean.class.getName(), route.getRequestBody().getJavaType());
})
.verify();
}
@OpenAPITest(value = MvcApp.class)
public void routeMvc(RouteIterator iterator) {
assertController(iterator);
}
@OpenAPITest(value = MvcAppWithRoutes.class)
public void routeMvcWithRoutes(RouteIterator iterator) {
assertController(iterator);
}
@OpenAPITest(value = MvcInstanceApp.class)
public void routeMvcInstance(RouteIterator iterator) {
iterator
.next((route, args) -> {
assertEquals("GET /api/foo", route.toString());
args
.next(arg -> {
assertEquals("q", arg.getName());
assertEquals("java.util.Optional<java.lang.String>", arg.getJavaType());
assertEquals("query", arg.getIn());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /api/bar", route.toString());
args
.next(arg -> {
assertEquals("q", arg.getName());
assertEquals("java.util.Optional<java.lang.String>", arg.getJavaType());
assertEquals("query", arg.getIn());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /api", route.toString());
args
.next(arg -> {
assertEquals("bool", arg.getName());
assertEquals("boolean", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("s", arg.getName());
assertEquals("short", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("i", arg.getName());
assertEquals("int", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("c", arg.getName());
assertEquals("char", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("l", arg.getName());
assertEquals("long", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("f", arg.getName());
assertEquals("float", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("d", arg.getName());
assertEquals("double", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("POST /api/post", route.toString());
args
.next(arg -> {
assertEquals("bool", arg.getName());
assertEquals("boolean", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("s", arg.getName());
assertEquals("short", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("i", arg.getName());
assertEquals("int", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("c", arg.getName());
assertEquals("char", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("l", arg.getName());
assertEquals("long", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("f", arg.getName());
assertEquals("float", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("d", arg.getName());
assertEquals("double", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.verify();
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /api/path", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /api/path-only", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /api/session", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /api/returnList", route.toString());
assertEquals("java.util.List<" + String.class.getName() + ">",
route.getDefaultResponse().toString());
assertTrue(route.getDeprecated());
})
.next(route -> {
assertEquals("POST /api/bean", route.toString());
assertEquals(ABean.class.getName(), route.getDefaultResponse().toString());
assertEquals(ABean.class.getName(), route.getRequestBody().getJavaType());
})
.verify();
}
private void assertKtController(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("DELETE /unit", route.toString());
assertEquals(void.class.getName(), route.getDefaultResponse().toString());
})
.next(route -> {
assertEquals("GET /doMap", route.toString());
assertEquals("java.util.Map<java.lang.String,java.lang.Object>",
route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /doParams", route.toString());
assertEquals(ABean.class.getName(), route.getDefaultResponse().toString());
args
.next(arg -> {
assertEquals("I", arg.getName());
assertEquals("int", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("oI", arg.getName());
assertEquals("java.lang.Integer", arg.getJavaType());
assertEquals("query", arg.getIn());
})
.next(arg -> {
assertEquals("q", arg.getName());
assertEquals("java.lang.String", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("nullq", arg.getName());
assertEquals("java.lang.String", arg.getJavaType());
assertEquals("query", arg.getIn());
})
.verify();
})
.next((route, args) -> {
assertEquals("GET /coroutine", route.toString());
assertEquals("java.util.List<" + String.class.getName() + ">",
route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /future", route.toString());
assertEquals("java.lang.String", route.getDefaultResponse().toString());
})
.next((route, args) -> {
assertEquals("GET /httpNames", route.toString());
assertEquals("java.lang.String", route.getDefaultResponse().toString());
args
.next(arg -> {
assertEquals("Last-Modified-Since", arg.getName());
assertEquals("java.lang.String", arg.getJavaType());
assertEquals("header", arg.getIn());
assertTrue(arg.getRequired());
})
.next(arg -> {
assertEquals("x-search", arg.getName());
assertEquals("java.lang.String", arg.getJavaType());
assertEquals("query", arg.getIn());
assertTrue(arg.getRequired());
})
.verify();
})
.verify();
}
@OpenAPITest(value = KtMvcApp.class)
public void ktMvc(RouteIterator iterator) {
assertKtController(iterator);
}
@OpenAPITest(value = KtMvcInstanceApp.class)
public void ktMvcInstance(RouteIterator iterator) {
assertKtController(iterator);
}
@OpenAPITest(value = KtMvcObjectApp.class)
public void ktMvcObject(RouteIterator iterator) {
assertKtController(iterator);
}
@OpenAPITest(value = KtMvcAppWithRoutes.class)
public void ktMvcWithRoutes(RouteIterator iterator) {
assertKtController(iterator);
}
@OpenAPITest(value = KtRouteRef.class)
public void ktRouteRef(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("Create a Pet", route.getSummary());
assertEquals("createPetRef", route.getOperationId());
});
}
@OpenAPITest(value = KtNoAppClassKt.class)
public void ktNoApp(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /path", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
assertEquals("getPath", route.getOperationId());
})
.next(route -> {
assertEquals("GET /fn", route.toString());
assertEquals(int.class.getName(), route.getDefaultResponse().getJavaType());
assertEquals("fnRef", route.getOperationId());
assertEquals("function reference", route.getSummary());
})
.verify();
}
@OpenAPITest(value = NoAppClass.class)
public void noApp(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /path", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
assertEquals("getPath", route.getOperationId());
})
.next(route -> {
assertEquals("GET /fn", route.toString());
assertEquals(int.class.getName(), route.getDefaultResponse().getJavaType());
assertEquals("fnRef", route.getOperationId());
assertEquals("function reference", route.getSummary());
})
.verify();
}
@OpenAPITest(value = KtAppWithMainKt.class)
public void ktAppWithMain(RouteIterator iterator) {
iterator
.next(route -> {
assertEquals("GET /welcome", route.toString());
assertEquals(String.class.getName(), route.getDefaultResponse().getJavaType());
assertEquals("getWelcome", route.getOperationId());
})
.verify();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.geo;
import com.spatial4j.core.shape.Shape;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.MapperBuilders.geoShapeField;
/**
* FieldMapper for indexing {@link com.spatial4j.core.shape.Shape}s.
* <p/>
* Currently Shapes can only be indexed and can only be queried using
* {@link org.elasticsearch.index.query.GeoShapeQueryParser}, consequently
* a lot of behavior in this Mapper is disabled.
* <p/>
* Format supported:
* <p/>
* "field" : {
* "type" : "polygon",
* "coordinates" : [
* [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
* ]
* }
*/
public class GeoShapeFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "geo_shape";
public static class Names {
public static final String TREE = "tree";
public static final String TREE_GEOHASH = "geohash";
public static final String TREE_QUADTREE = "quadtree";
public static final String TREE_LEVELS = "tree_levels";
public static final String TREE_PRESISION = "precision";
public static final String DISTANCE_ERROR_PCT = "distance_error_pct";
public static final String ORIENTATION = "orientation";
public static final String STRATEGY = "strategy";
}
public static class Defaults {
public static final String TREE = Names.TREE_GEOHASH;
public static final String STRATEGY = SpatialStrategy.RECURSIVE.getStrategyName();
public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m");
public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m");
public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d;
public static final Orientation ORIENTATION = Orientation.RIGHT;
public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType();
static {
// setting name here is a hack so freeze can be called...instead all these options should be
// moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers...
FIELD_TYPE.setNames(new MappedFieldType.Names("DoesNotExist"));
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.freeze();
}
}
public static class Builder extends FieldMapper.Builder<Builder, GeoShapeFieldMapper> {
public Builder(String name) {
super(name, Defaults.FIELD_TYPE);
}
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType)fieldType;
}
@Override
public GeoShapeFieldMapper build(BuilderContext context) {
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
if (geoShapeFieldType.tree.equals("quadtree") && context.indexCreatedVersion().before(Version.V_2_0_0_beta1)) {
geoShapeFieldType.setTree("legacyquadtree");
}
if (context.indexCreatedVersion().before(Version.V_2_0_0_beta1) ||
(geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0)) {
geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT);
}
setupFieldType(context);
return new GeoShapeFieldMapper(name, fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = geoShapeField(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (Names.TREE.equals(fieldName)) {
builder.fieldType().setTree(fieldNode.toString());
iterator.remove();
} else if (Names.TREE_LEVELS.equals(fieldName)) {
builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if (Names.TREE_PRESISION.equals(fieldName)) {
builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(), DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
iterator.remove();
} else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) {
builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString()));
iterator.remove();
} else if (Names.ORIENTATION.equals(fieldName)) {
builder.fieldType().setOrientation(ShapeBuilder.orientationFromString(fieldNode.toString()));
iterator.remove();
} else if (Names.STRATEGY.equals(fieldName)) {
builder.fieldType().setStrategyName(fieldNode.toString());
iterator.remove();
}
}
return builder;
}
}
public static final class GeoShapeFieldType extends MappedFieldType {
private String tree = Defaults.TREE;
private String strategyName = Defaults.STRATEGY;
private int treeLevels = 0;
private double precisionInMeters = -1;
private Double distanceErrorPct;
private double defaultDistanceErrorPct = 0.0;
private Orientation orientation = Defaults.ORIENTATION;
// these are built when the field type is frozen
private PrefixTreeStrategy defaultStrategy;
private RecursivePrefixTreeStrategy recursiveStrategy;
private TermQueryPrefixTreeStrategy termStrategy;
public GeoShapeFieldType() {}
protected GeoShapeFieldType(GeoShapeFieldType ref) {
super(ref);
this.tree = ref.tree;
this.strategyName = ref.strategyName;
this.treeLevels = ref.treeLevels;
this.precisionInMeters = ref.precisionInMeters;
this.distanceErrorPct = ref.distanceErrorPct;
this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct;
this.orientation = ref.orientation;
}
@Override
public GeoShapeFieldType clone() {
return new GeoShapeFieldType(this);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
GeoShapeFieldType that = (GeoShapeFieldType) o;
return treeLevels == that.treeLevels &&
precisionInMeters == that.precisionInMeters &&
defaultDistanceErrorPct == that.defaultDistanceErrorPct &&
Objects.equals(tree, that.tree) &&
Objects.equals(strategyName, that.strategyName) &&
Objects.equals(distanceErrorPct, that.distanceErrorPct) &&
orientation == that.orientation;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), tree, strategyName, treeLevels, precisionInMeters, distanceErrorPct, defaultDistanceErrorPct, orientation);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void freeze() {
super.freeze();
// This is a bit hackish: we need to setup the spatial tree and strategies once the field name is set, which
// must be by the time freeze is called.
SpatialPrefixTree prefixTree;
if ("geohash".equals(tree)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
} else if ("legacyquadtree".equals(tree)) {
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else if ("quadtree".equals(tree)) {
prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, names().indexName());
recursiveStrategy.setDistErrPct(distanceErrorPct());
recursiveStrategy.setPruneLeafyBranches(false);
termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, names().indexName());
termStrategy.setDistErrPct(distanceErrorPct());
defaultStrategy = resolveStrategy(strategyName);
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
// prevent user from changing strategies
if (strategyName().equals(other.strategyName()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different strategy");
}
// prevent user from changing trees (changes encoding)
if (tree().equals(other.tree()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different tree");
}
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (treeLevels() != other.treeLevels()) {
conflicts.add("mapper [" + names().fullName() + "] has different tree_levels");
}
if (precisionInMeters() != other.precisionInMeters()) {
conflicts.add("mapper [" + names().fullName() + "] has different precision");
}
}
private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) {
if (treeLevels > 0 || precisionInMeters >= 0) {
return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters)
: GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0);
}
return defaultLevels;
}
public String tree() {
return tree;
}
public void setTree(String tree) {
checkIfFrozen();
this.tree = tree;
}
public String strategyName() {
return strategyName;
}
public void setStrategyName(String strategyName) {
checkIfFrozen();
this.strategyName = strategyName;
}
public int treeLevels() {
return treeLevels;
}
public void setTreeLevels(int treeLevels) {
checkIfFrozen();
this.treeLevels = treeLevels;
}
public double precisionInMeters() {
return precisionInMeters;
}
public void setPrecisionInMeters(double precisionInMeters) {
checkIfFrozen();
this.precisionInMeters = precisionInMeters;
}
public double distanceErrorPct() {
return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct;
}
public void setDistanceErrorPct(double distanceErrorPct) {
checkIfFrozen();
this.distanceErrorPct = distanceErrorPct;
}
public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) {
checkIfFrozen();
this.defaultDistanceErrorPct = defaultDistanceErrorPct;
}
public Orientation orientation() { return this.orientation; }
public void setOrientation(Orientation orientation) {
checkIfFrozen();
this.orientation = orientation;
}
public PrefixTreeStrategy defaultStrategy() {
return this.defaultStrategy;
}
public PrefixTreeStrategy resolveStrategy(String strategyName) {
if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) {
return recursiveStrategy;
}
if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) {
return termStrategy;
}
throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]");
}
@Override
public String value(Object value) {
throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values");
}
}
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
}
@Override
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType) super.fieldType();
}
@Override
public Mapper parse(ParseContext context) throws IOException {
try {
Shape shape = context.parseExternalValue(Shape.class);
if (shape == null) {
ShapeBuilder shapeBuilder = ShapeBuilder.parse(context.parser(), this);
if (shapeBuilder == null) {
return null;
}
shape = shapeBuilder.build();
}
Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape);
if (fields == null || fields.length == 0) {
return null;
}
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(fieldType().boost());
}
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
}
return null;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) {
builder.field(Names.TREE, fieldType().tree());
}
if (includeDefaults || fieldType().treeLevels() != 0) {
builder.field(Names.TREE_LEVELS, fieldType().treeLevels());
}
if (includeDefaults || fieldType().precisionInMeters() != -1) {
builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters()));
}
if (includeDefaults || fieldType().strategyName() != Defaults.STRATEGY) {
builder.field(Names.STRATEGY, fieldType().strategyName());
}
if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) {
builder.field(Names.DISTANCE_ERROR_PCT, fieldType().distanceErrorPct());
}
if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) {
builder.field(Names.ORIENTATION, fieldType().orientation());
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.oplog;
import java.util.List;
import org.camunda.bpm.engine.impl.persistence.entity.PropertyChange;
/**
* One op log context entry represents an operation on a set of entities of the same type (see entityType field).
* It consist multiple {@link PropertyChange}s that end up as multiple history events.
*
* @author Thorben Lindhauer
*/
public class UserOperationLogContextEntry {
protected String deploymentId;
protected String processDefinitionId;
protected String processDefinitionKey;
protected String processInstanceId;
protected String executionId;
protected String caseDefinitionId;
protected String caseInstanceId;
protected String caseExecutionId;
protected String taskId;
protected String operationType;
protected String entityType;
protected List<PropertyChange> propertyChanges;
protected String jobDefinitionId;
protected String jobId;
protected String batchId;
protected String category;
protected String rootProcessInstanceId;
protected String externalTaskId;
protected String annotation;
public UserOperationLogContextEntry(String operationType, String entityType) {
this.operationType = operationType;
this.entityType = entityType;
}
public String getDeploymentId() {
return deploymentId;
}
public void setDeploymentId(String deploymentId) {
this.deploymentId = deploymentId;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public String getExecutionId() {
return executionId;
}
public void setExecutionId(String executionId) {
this.executionId = executionId;
}
public String getCaseDefinitionId() {
return caseDefinitionId;
}
public void setCaseDefinitionId(String caseDefinitionId) {
this.caseDefinitionId = caseDefinitionId;
}
public String getCaseInstanceId() {
return caseInstanceId;
}
public void setCaseInstanceId(String caseInstanceId) {
this.caseInstanceId = caseInstanceId;
}
public String getCaseExecutionId() {
return caseExecutionId;
}
public void setCaseExecutionId(String caseExecutionId) {
this.caseExecutionId = caseExecutionId;
}
public String getTaskId() {
return taskId;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public String getOperationType() {
return operationType;
}
public void setOperationType(String operationType) {
this.operationType = operationType;
}
public String getEntityType() {
return entityType;
}
public void setEntityType(String entityType) {
this.entityType = entityType;
}
public List<PropertyChange> getPropertyChanges() {
return propertyChanges;
}
public void setPropertyChanges(List<PropertyChange> propertyChanges) {
this.propertyChanges = propertyChanges;
}
public String getProcessDefinitionKey() {
return processDefinitionKey;
}
public void setProcessDefinitionKey(String processDefinitionKey) {
this.processDefinitionKey = processDefinitionKey;
}
public String getJobDefinitionId() {
return jobDefinitionId;
}
public void setJobDefinitionId(String jobDefinitionId) {
this.jobDefinitionId = jobDefinitionId;
}
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
public String getBatchId() {
return batchId;
}
public void setBatchId(String batchId) {
this.batchId = batchId;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getRootProcessInstanceId() {
return rootProcessInstanceId;
}
public void setRootProcessInstanceId(String rootProcessInstanceId) {
this.rootProcessInstanceId = rootProcessInstanceId;
}
public String getExternalTaskId() {
return externalTaskId;
}
public void setExternalTaskId(String externalTaskId) {
this.externalTaskId = externalTaskId;
}
public String getAnnotation() {
return annotation;
}
public void setAnnotation(String annotation) {
this.annotation = annotation;
}
}
| |
package org.hisp.dhis.dataelement;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.common.GenericDimensionalObjectStore;
import org.hisp.dhis.common.GenericNameableObjectStore;
import org.hisp.dhis.common.ListMap;
import org.hisp.dhis.dataelement.comparator.DataElementCategoryComboSizeComparator;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.i18n.I18nService;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.system.util.Filter;
import org.hisp.dhis.system.util.FilterUtils;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.hisp.dhis.i18n.I18nUtils.*;
/**
* @author Kristian Nordal
*/
@Transactional
public class DefaultDataElementService
implements DataElementService
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private DataElementStore dataElementStore;
public void setDataElementStore( DataElementStore dataElementStore )
{
this.dataElementStore = dataElementStore;
}
private GenericNameableObjectStore<DataElementGroup> dataElementGroupStore;
public void setDataElementGroupStore( GenericNameableObjectStore<DataElementGroup> dataElementGroupStore )
{
this.dataElementGroupStore = dataElementGroupStore;
}
private GenericDimensionalObjectStore<DataElementGroupSet> dataElementGroupSetStore;
public void setDataElementGroupSetStore( GenericDimensionalObjectStore<DataElementGroupSet> dataElementGroupSetStore )
{
this.dataElementGroupSetStore = dataElementGroupSetStore;
}
private I18nService i18nService;
public void setI18nService( I18nService service )
{
i18nService = service;
}
// -------------------------------------------------------------------------
// DataElement
// -------------------------------------------------------------------------
@Override
public int addDataElement( DataElement dataElement )
{
return dataElementStore.save( dataElement );
}
@Override
public void updateDataElement( DataElement dataElement )
{
dataElementStore.update( dataElement );
}
@Override
public void deleteDataElement( DataElement dataElement )
{
dataElementStore.delete( dataElement );
}
@Override
public DataElement getDataElement( int id )
{
return i18n( i18nService, dataElementStore.get( id ) );
}
@Override
public DataElement getDataElement( String uid )
{
return i18n( i18nService, dataElementStore.getByUid( uid ) );
}
@Override
public DataElement getDataElementByCode( String code )
{
return i18n( i18nService, dataElementStore.getByCode( code ) );
}
@Override
public Collection<DataElement> getAllDataElements()
{
return i18n( i18nService, dataElementStore.getAll() );
}
@Override
public Collection<DataElement> getDataElements( final Collection<Integer> identifiers )
{
Collection<DataElement> dataElements = getAllDataElements();
return identifiers == null ? dataElements : FilterUtils.filter( dataElements, new Filter<DataElement>()
{
@Override
public boolean retain( DataElement dataElement )
{
return identifiers.contains( dataElement.getId() );
}
} );
}
@Override
public List<DataElement> getDataElementsByUid( Collection<String> uids )
{
return i18n( i18nService, dataElementStore.getByUid( uids ) );
}
@Override
public void setZeroIsSignificantForDataElements( Collection<Integer> dataElementIds )
{
if ( dataElementIds != null )
{
dataElementStore.setZeroIsSignificantForDataElements( dataElementIds );
}
}
@Override
public Collection<DataElement> getDataElementsByZeroIsSignificant( boolean zeroIsSignificant )
{
return dataElementStore.getDataElementsByZeroIsSignificant( zeroIsSignificant );
}
@Override
public Collection<DataElement> getDataElementsByZeroIsSignificantAndGroup( boolean zeroIsSignificant,
DataElementGroup dataElementGroup )
{
Collection<DataElement> dataElements = new HashSet<>();
for ( DataElement element : dataElementGroup.getMembers() )
{
if ( element.isZeroIsSignificant() )
{
dataElements.add( element );
}
}
return dataElements;
}
@Override
public Collection<DataElement> getAggregateableDataElements()
{
return i18n( i18nService, dataElementStore.getAggregateableDataElements() );
}
@Override
public DataElement getDataElementByName( String name )
{
List<DataElement> dataElements = new ArrayList<>( dataElementStore.getAllEqName( name ) );
if ( dataElements.isEmpty() )
{
return null;
}
return i18n( i18nService, dataElements.get( 0 ) );
}
@Override
public Collection<DataElement> searchDataElementsByName( String key )
{
return i18n( i18nService, dataElementStore.searchDataElementsByName( key ) );
}
@Override
public DataElement getDataElementByShortName( String shortName )
{
List<DataElement> dataElements = new ArrayList<>( dataElementStore.getAllEqShortName( shortName ) );
if ( dataElements.isEmpty() )
{
return null;
}
return i18n( i18nService, dataElements.get( 0 ) );
}
@Override
public Collection<DataElement> getDataElementsByAggregationOperator( String aggregationOperator )
{
return i18n( i18nService, dataElementStore.getDataElementsByAggregationOperator( aggregationOperator ) );
}
@Override
public Collection<DataElement> getDataElementsByType( String type )
{
return i18n( i18nService, dataElementStore.getDataElementsByType( type ) );
}
@Override
public Collection<DataElement> getDataElementsByPeriodType( final PeriodType periodType )
{
Collection<DataElement> dataElements = getAllDataElements();
return FilterUtils.filter( dataElements, new Filter<DataElement>()
{
@Override
public boolean retain( DataElement dataElement )
{
return dataElement.getPeriodType() != null && dataElement.getPeriodType().equals( periodType );
}
} );
}
@Override
public Collection<DataElement> getDataElementsByDomainType( DataElementDomain domainType )
{
return i18n( i18nService, dataElementStore.getDataElementsByDomainType( domainType ) );
}
@Override
public Collection<DataElement> getDataElementsByDomainType( DataElementDomain domainType, int first, int max )
{
return i18n( i18nService, dataElementStore.getDataElementsByDomainType( domainType, first, max ) );
}
@Override
public Collection<DataElement> getDataElementByCategoryCombo( DataElementCategoryCombo categoryCombo )
{
return i18n( i18nService, dataElementStore.getDataElementByCategoryCombo( categoryCombo ) );
}
@Override
public Map<DataElementCategoryCombo, List<DataElement>> getGroupedDataElementsByCategoryCombo(
List<DataElement> dataElements )
{
Map<DataElementCategoryCombo, List<DataElement>> mappedDataElements = new HashMap<>();
for ( DataElement dataElement : dataElements )
{
if ( mappedDataElements.containsKey( dataElement.getCategoryCombo() ) )
{
mappedDataElements.get( dataElement.getCategoryCombo() ).add( dataElement );
}
else
{
List<DataElement> des = new ArrayList<>();
des.add( dataElement );
mappedDataElements.put( dataElement.getCategoryCombo(), des );
}
}
return mappedDataElements;
}
@Override
public List<DataElementCategoryCombo> getDataElementCategoryCombos( List<DataElement> dataElements )
{
Set<DataElementCategoryCombo> categoryCombos = new HashSet<>();
for ( DataElement dataElement : dataElements )
{
categoryCombos.add( dataElement.getCategoryCombo() );
}
List<DataElementCategoryCombo> listCategoryCombos = new ArrayList<>( categoryCombos );
Collections.sort( listCategoryCombos, new DataElementCategoryComboSizeComparator() );
return listCategoryCombos;
}
@Override
public Collection<DataElement> getDataElementsWithGroupSets()
{
return i18n( i18nService, dataElementStore.getDataElementsWithGroupSets() );
}
@Override
public Collection<DataElement> getDataElementsWithoutGroups()
{
return i18n( i18nService, dataElementStore.getDataElementsWithoutGroups() );
}
@Override
public Collection<DataElement> getDataElementsWithoutDataSets()
{
return i18n( i18nService, dataElementStore.getDataElementsWithoutDataSets() );
}
@Override
public Collection<DataElement> getDataElementsWithDataSets()
{
return i18n( i18nService, dataElementStore.getDataElementsWithDataSets() );
}
@Override
public Collection<DataElement> getDataElementsLikeName( String name )
{
return getObjectsByName( i18nService, dataElementStore, name );
}
@Override
public int getDataElementCount()
{
return dataElementStore.getCount();
}
@Override
public int getDataElementCountByName( String name )
{
return getCountByName( i18nService, dataElementStore, name );
}
@Override
public int getDataElementCountByDomainType( DataElementDomain domainType )
{
return dataElementStore.getCountByDomainType( domainType );
}
@Override
public Collection<DataElement> getDataElementsBetween( int first, int max )
{
return getObjectsBetween( i18nService, dataElementStore, first, max );
}
@Override
public Collection<DataElement> getDataElementsBetweenByName( String name, int first, int max )
{
return getObjectsBetweenByName( i18nService, dataElementStore, name, first, max );
}
@Override
public Collection<DataElement> getDataElementsByDataSets( Collection<DataSet> dataSets )
{
return i18n( i18nService, dataElementStore.getDataElementsByDataSets( dataSets ) );
}
@Override
public Collection<DataElement> getDataElementsByAggregationLevel( int aggregationLevel )
{
return i18n( i18nService, dataElementStore.getDataElementsByAggregationLevel( aggregationLevel ) );
}
@Override
public ListMap<String, String> getDataElementCategoryOptionComboMap( Set<String> dataElementUids )
{
return dataElementStore.getDataElementCategoryOptionComboMap( dataElementUids );
}
@Override
public Map<String, Integer> getDataElementUidIdMap()
{
Map<String, Integer> map = new HashMap<>();
for ( DataElement dataElement : getAllDataElements() )
{
map.put( dataElement.getUid(), dataElement.getId() );
}
return map;
}
@Override
public Collection<DataElement> getDataElements( DataSet dataSet, String key, Integer max )
{
return i18n( i18nService, dataElementStore.get( dataSet, key, max ) );
}
// -------------------------------------------------------------------------
// DataElementGroup
// -------------------------------------------------------------------------
@Override
public int addDataElementGroup( DataElementGroup dataElementGroup )
{
int id = dataElementGroupStore.save( dataElementGroup );
return id;
}
@Override
public void updateDataElementGroup( DataElementGroup dataElementGroup )
{
dataElementGroupStore.update( dataElementGroup );
}
@Override
public void deleteDataElementGroup( DataElementGroup dataElementGroup )
{
dataElementGroupStore.delete( dataElementGroup );
}
@Override
public DataElementGroup getDataElementGroup( int id )
{
return i18n( i18nService, dataElementGroupStore.get( id ) );
}
@Override
public DataElementGroup getDataElementGroup( int id, boolean i18nDataElements )
{
DataElementGroup group = getDataElementGroup( id );
if ( i18nDataElements )
{
i18n( i18nService, group.getMembers() );
}
return group;
}
@Override
public Collection<DataElementGroup> getDataElementGroups( final Collection<Integer> identifiers )
{
Collection<DataElementGroup> groups = getAllDataElementGroups();
return identifiers == null ? groups : FilterUtils.filter( groups, new Filter<DataElementGroup>()
{
@Override
public boolean retain( DataElementGroup object )
{
return identifiers.contains( object.getId() );
}
} );
}
@Override
public List<DataElementGroup> getDataElementGroupsByUid( Collection<String> uids )
{
return i18n( i18nService, dataElementGroupStore.getByUid( uids ) );
}
@Override
public DataElementGroup getDataElementGroup( String uid )
{
return i18n( i18nService, dataElementGroupStore.getByUid( uid ) );
}
@Override
public Collection<DataElementGroup> getAllDataElementGroups()
{
return i18n( i18nService, dataElementGroupStore.getAll() );
}
@Override
public DataElementGroup getDataElementGroupByName( String name )
{
List<DataElementGroup> dataElementGroups = new ArrayList<>(
dataElementGroupStore.getAllEqName( name ) );
if ( dataElementGroups.isEmpty() )
{
return null;
}
return i18n( i18nService, dataElementGroups.get( 0 ) );
}
@Override
public DataElementGroup getDataElementGroupByShortName( String shortName )
{
List<DataElementGroup> dataElementGroups = new ArrayList<>( dataElementGroupStore.getAllEqShortName( shortName ) );
if ( dataElementGroups.isEmpty() )
{
return null;
}
return i18n( i18nService, dataElementGroups.get( 0 ) );
}
@Override
public DataElementGroup getDataElementGroupByCode( String code )
{
return i18n( i18nService, dataElementGroupStore.getByCode( code ) );
}
@Override
public Collection<DataElementGroup> getGroupsContainingDataElement( DataElement dataElement )
{
Collection<DataElementGroup> groups = getAllDataElementGroups();
Iterator<DataElementGroup> iterator = groups.iterator();
while ( iterator.hasNext() )
{
if ( !iterator.next().getMembers().contains( dataElement ) )
{
iterator.remove();
}
}
return groups;
}
@Override
public Collection<DataElement> getDataElementsByGroupId( int groupId )
{
return i18n( i18nService, dataElementGroupStore.get( groupId ).getMembers() );
}
@Override
public int getDataElementGroupCount()
{
return dataElementGroupStore.getCount();
}
@Override
public int getDataElementGroupCountByName( String name )
{
return getCountByName( i18nService, dataElementGroupStore, name );
}
@Override
public Collection<DataElementGroup> getDataElementGroupsBetween( int first, int max )
{
return getObjectsBetween( i18nService, dataElementGroupStore, first, max );
}
@Override
public Collection<DataElementGroup> getDataElementGroupsBetweenByName( String name, int first, int max )
{
return getObjectsBetweenByName( i18nService, dataElementGroupStore, name, first, max );
}
// -------------------------------------------------------------------------
// DataElementGroupSet
// -------------------------------------------------------------------------
@Override
public int addDataElementGroupSet( DataElementGroupSet groupSet )
{
return dataElementGroupSetStore.save( groupSet );
}
@Override
public void updateDataElementGroupSet( DataElementGroupSet groupSet )
{
dataElementGroupSetStore.update( groupSet );
}
@Override
public void deleteDataElementGroupSet( DataElementGroupSet groupSet )
{
dataElementGroupSetStore.delete( groupSet );
}
@Override
public DataElementGroupSet getDataElementGroupSet( int id )
{
return i18n( i18nService, dataElementGroupSetStore.get( id ) );
}
@Override
public DataElementGroupSet getDataElementGroupSet( int id, boolean i18nGroups )
{
DataElementGroupSet groupSet = getDataElementGroupSet( id );
if ( i18nGroups )
{
i18n( i18nService, groupSet.getDataElements() );
}
return groupSet;
}
@Override
public DataElementGroupSet getDataElementGroupSet( String uid )
{
return i18n( i18nService, dataElementGroupSetStore.getByUid( uid ) );
}
@Override
public DataElementGroupSet getDataElementGroupSetByName( String name )
{
List<DataElementGroupSet> dataElementGroupSets = new ArrayList<>(
dataElementGroupSetStore.getAllEqName( name ) );
if ( dataElementGroupSets.isEmpty() )
{
return null;
}
return i18n( i18nService, dataElementGroupSets.get( 0 ) );
}
@Override
public Collection<DataElementGroupSet> getCompulsoryDataElementGroupSets()
{
Collection<DataElementGroupSet> groupSets = new ArrayList<>();
for ( DataElementGroupSet groupSet : getAllDataElementGroupSets() )
{
if ( groupSet.isCompulsory() )
{
groupSets.add( groupSet );
}
}
return groupSets;
}
@Override
public Collection<DataElementGroupSet> getCompulsoryDataElementGroupSetsWithMembers()
{
return FilterUtils.filter( getAllDataElementGroupSets(), new Filter<DataElementGroupSet>()
{
@Override
public boolean retain( DataElementGroupSet object )
{
return object.isCompulsory() && object.hasDataElementGroups();
}
} );
}
@Override
public Collection<DataElementGroupSet> getCompulsoryDataElementGroupSetsNotAssignedTo( DataElement dataElement )
{
Collection<DataElementGroupSet> groupSets = new ArrayList<>();
for ( DataElementGroupSet groupSet : getCompulsoryDataElementGroupSets() )
{
if ( !groupSet.isMemberOfDataElementGroups( dataElement ) && groupSet.hasDataElementGroups() )
{
groupSets.add( groupSet );
}
}
return groupSets;
}
@Override
public Collection<DataElementGroupSet> getAllDataElementGroupSets()
{
return i18n( i18nService, dataElementGroupSetStore.getAll() );
}
@Override
public Collection<DataElementGroupSet> getDataElementGroupSets( final Collection<Integer> identifiers )
{
Collection<DataElementGroupSet> groupSets = getAllDataElementGroupSets();
return identifiers == null ? groupSets : FilterUtils.filter( groupSets, new Filter<DataElementGroupSet>()
{
@Override
public boolean retain( DataElementGroupSet object )
{
return identifiers.contains( object.getId() );
}
} );
}
@Override
public List<DataElementGroupSet> getDataElementGroupSetsByUid( Collection<String> uids )
{
return i18n( i18nService, dataElementGroupSetStore.getByUid( uids ) );
}
@Override
public int getDataElementGroupSetCount()
{
return dataElementGroupSetStore.getCount();
}
@Override
public int getDataElementGroupSetCountByName( String name )
{
return getCountByName( i18nService, dataElementGroupSetStore, name );
}
@Override
public Collection<DataElementGroupSet> getDataElementGroupSetsBetween( int first, int max )
{
return getObjectsBetween( i18nService, dataElementGroupSetStore, first, max );
}
@Override
public Collection<DataElementGroupSet> getDataElementGroupSetsBetweenByName( String name, int first, int max )
{
return getObjectsBetweenByName( i18nService, dataElementGroupSetStore, name, first, max );
}
}
| |
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jrcs.diff;
import java.util.*;
/**
* Implements a simple differencing algortithm.<p>
*
* @date $Date: 2004/02/28 03:35:36 $
* @version $Revision: 1.8 $
* @author <a href="mailto:juanco@suigeneris.org">Juanco Anez</a>
*
* <p><b>Overview of Algorithm</b></p>
*
* <p><i>by <a
* href='http://www.topmeadow.net/bwm'> bwm</a>
* </p>
*
* <p>The algorithm is optimised for situations where the input sequences
* have few repeated objects. If it is given input with many repeated
* objects it will report sub-optimal changes. However, given appropriate
* input, it is fast, and linear in memory usage.</p>
*
* <p>The algorithm consists of the following steps:</p>
* <ul>
* <li>compute an equivalence set for the input data</li>
* <li>translate each element of the orginal
* and revised input sequences to a member of the equivalence set
* </li>
* <li>match the the input sequences to determine the deltas, i.e.
* the differences between the original and revised sequences.</li>
* </ul>
*
* <p>The first step is to compute a an equivalence set for the input data.
* The equivalence set is computed from objects that are in the original
* input sequence</p>
* <pre>
* eq(x) = the index of the first occurence of x in the original sequence.
* </pre>
*
* <p>With this equivalence function, the algorithm can compare integers rather
* than strings, which is considerably more efficient.</p>
*
* <p>The second step is to compute the datastructure on which the
* algorithm will operate. Having computed the equivalence function
* in the previous step, we can compute two arrays where
* indx[i] = eqs(orig[i]) and jndx[i] = eqs(rev[i]). The algorithm can
* now operate on indx and jndx instead of orig and rev. Thus, comparisons
* are then on O(int == int) instead of O(Object.equals(Object)).
* </p>
*
* <p>The algorithm now matches indx and jndx. Whilst indx[i] == jndx[i]
* it skips matching objects in the sequence. In seeking to match objects
* in the input sequence it assumes that each object is likely to be unique.
* It uses the known characteristics of the unique equivalence function. It can
* tell from the eq value if this object appeared in the other sequence
* at all. If it did not, there is no point in searching for a match.</p>
*
* <p>Recall that the eq function value is the index earliest occurrence in
* the orig sequence. This information is used to search efficiently for
* the next match. The algorithm is perfect when all input objects are
* unique, but degrades when input objects are not unique. When input
* objects are not unique an optimal match may not be found, but a
* correct match will be.</p>
*
* <p>Having identified common matching objects in the orig and revised
* sequences, the differences between them are easily computed.
* </p>
*
* @see Delta
* @see Revision
* Modifications:
*
* 27/Apr/2003 bwm
* Added some comments whilst trying to figure out the algorithm
*
* 03 May 2003 bwm
* Created this implementation class by refactoring it out of the Diff
* class to enable plug in difference algorithms
*
*/
public class SimpleDiff
implements DiffAlgorithm
{
static final int NOT_FOUND_i = -2;
static final int NOT_FOUND_j = -1;
static final int EOS = Integer.MAX_VALUE;
public SimpleDiff()
{
}
protected int scan(int[] ndx, int i, int target)
{
while (ndx[i] < target)
{
i++;
}
return i;
}
/**
* Compute the difference between original and revised sequences.
*
* @param orig The original sequence.
* @param rev The revised sequence to be compared with the original.
* @return A Revision object describing the differences.
* @throws DifferenciationFailedException if the diff could not be computed.
*/
public Revision diff(Object[] orig, Object[] rev)
throws DifferentiationFailedException
{
// create map eqs, such that for each item in both orig and rev
// eqs(item) = firstOccurrence(item, orig);
Map eqs = buildEqSet(orig, rev);
// create an array such that
// indx[i] = NOT_FOUND_i if orig[i] is not in rev
// indx[i] = firstOccurrence(orig[i], orig)
int[] indx = buildIndex(eqs, orig, NOT_FOUND_i);
// create an array such that
// jndx[j] = NOT_FOUND_j if orig[j] is not in rev
// jndx[j] = firstOccurrence(rev[j], orig)
int[] jndx = buildIndex(eqs, rev, NOT_FOUND_j);
// what in effect has been done is to build a unique hash
// for each item that is in both orig and rev
// and to label each item in orig and new with that hash value
// or a marker that the item is not common to both.
eqs = null; // let gc know we're done with this
Revision deltas = new Revision(); //!!! new Revision()
int i = 0;
int j = 0;
// skip matching
// skip leading items that are equal
// could be written
// for (i=0; indx[i] != EOS && indx[i] == jndx[i]; i++);
// j = i;
for (; indx[i] != EOS && indx[i] == jndx[j]; i++, j++)
{
/* void */
}
while (indx[i] != jndx[j])
{ // only equal if both == EOS
// they are different
int ia = i;
int ja = j;
// size of this delta
do
{
// look down rev for a match
// stop at a match
// or if the FO(rev[j]) > FO(orig[i])
// or at the end
while (jndx[j] < 0 || jndx[j] < indx[i])
{
j++;
}
// look down orig for a match
// stop at a match
// or if the FO(orig[i]) > FO(rev[j])
// or at the end
while (indx[i] < 0 || indx[i] < jndx[j])
{
i++;
}
// this doesn't do a compare each line with each other line
// so it won't find all matching lines
}
while (indx[i] != jndx[j]);
// on exit we have a match
// they are equal, reverse any exedent matches
// it is possible to overshoot, so count back matching items
while (i > ia && j > ja && indx[i - 1] == jndx[j - 1])
{
--i;
--j;
}
deltas.addDelta(Delta.newDelta(new Chunk(orig, ia, i - ia),
new Chunk(rev, ja, j - ja)));
// skip matching
for (; indx[i] != EOS && indx[i] == jndx[j]; i++, j++)
{
/* void */
}
}
return deltas;
}
/**
* create a <code>Map</code> from each common item in orig and rev
* to the index of its first occurrence in orig
*
* @param orig the original sequence of items
* @param rev the revised sequence of items
*/
protected Map buildEqSet(Object[] orig, Object[] rev)
{
// construct a set of the objects that orig and rev have in common
// first construct a set containing all the elements in orig
Set items = new HashSet(Arrays.asList(orig));
// then remove all those not in rev
items.retainAll(Arrays.asList(rev));
Map eqs = new HashMap();
for (int i = 0; i < orig.length; i++)
{
// if its a common item and hasn't been found before
if (items.contains(orig[i]))
{
// add it to the map
eqs.put(orig[i], Integer.valueOf(i));
// and make sure its not considered again
items.remove(orig[i]);
}
}
return eqs;
}
/**
* build a an array such each a[i] = eqs([i]) or NF if eqs([i]) undefined
*
* @param eqs a mapping from Object to Integer
* @param seq a sequence of objects
* @param NF the not found marker
*/
protected int[] buildIndex(Map eqs, Object[] seq, int NF)
{
int[] result = new int[seq.length + 1];
for (int i = 0; i < seq.length; i++)
{
Integer value = (Integer) eqs.get(seq[i]);
if (value == null || value.intValue() < 0)
{
result[i] = NF;
}
else
{
result[i] = value.intValue();
}
}
result[seq.length] = EOS;
return result;
}
}
| |
/**
* Copyright 2015 Nicolas Ferry <${email}>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vsepml.storm.mcsuite;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLConnection;
import java.nio.charset.Charset;
import java.util.Base64;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import javax.management.RuntimeErrorException;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.json.simple.parser.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.esotericsoftware.minlog.Log;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;
import backtype.storm.utils.TupleUtils;
public class CouchBolt implements IRichBolt {
private static final Logger LOG = LoggerFactory.getLogger(CouchBolt.class);
public static String COUCHDB_URL = "stormtest.couchdb.url";
public static String COUCHDB_USER = "stormtest.couchdb.user";
public static String COUCHDB_PASSWORD = "stormtest.couchdb.passwd";
public static String COUCHDB_RETRIES = "stormtest.couchdb.retries";
private OutputCollector collector;
private TupleToJSON serializer;
private URL url;
private String encodedAuth = null;
private int flushIntervalSecs = 0;
private int batchSize = 0;
private LinkedList<Tuple> buffer;
private long lastRequestTime;
private int retries;
private int currentTry;
private Map config;
public CouchBolt(TupleToJSON serializer) {
this.serializer = serializer;
}
public CouchBolt withBatching(int flushIntervalSecs, int batchSize) {
this.flushIntervalSecs = flushIntervalSecs;
this.batchSize = batchSize;
return this;
}
@Override
public void prepare(Map config, TopologyContext context, OutputCollector collector) {
this.collector = collector;
this.buffer = new LinkedList<>();
this.config=config;
this.retries = (int)config.getOrDefault(COUCHDB_RETRIES, 3);
this.currentTry = 0;
this.lastRequestTime = System.currentTimeMillis();
String surl = (String)config.get(COUCHDB_URL) + "/" + serializer.getDatabase(null);
try {
URL dbUrl = new URL(surl);
if (config.containsKey(COUCHDB_USER) && config.containsKey(COUCHDB_PASSWORD)) {
String userpass = config.get(COUCHDB_USER) + ":" + config.get(COUCHDB_PASSWORD);
this.encodedAuth = Base64.getEncoder().encodeToString(userpass.getBytes());
}
// Connect to CouchDB and make sure that the database exists, or create it
HttpURLConnection con = (HttpURLConnection)dbUrl.openConnection();
con.setRequestMethod("PUT");
con.setRequestProperty("Accept", "application/json");
if (this.encodedAuth != null) con.setRequestProperty("Authorization", "Basic " + this.encodedAuth);
switch (con.getResponseCode()) {
case HttpURLConnection.HTTP_CREATED:
case HttpURLConnection.HTTP_PRECON_FAILED:
// Created or exits
this.url = new URL(surl + "/_bulk_docs");
break;
case HttpURLConnection.HTTP_BAD_REQUEST:
throw new RuntimeException("Error creating database: " + serializer.getDatabase(null));
case HttpURLConnection.HTTP_UNAUTHORIZED:
throw new RuntimeException("Bad username/password for CouchDB: " + surl);
default:
throw new RuntimeException("Error connecting to CouchDB url: " + surl);
}
} catch (IOException e) {
throw new RuntimeException("Error connecting to CouchDB url: " + surl);
}
}
@Override
public void execute(Tuple input) {
boolean flush = false;
// Add new incoming tuple and decide if it is time to flush
if (TupleUtils.isTick(input)) {
flush = true;
} else {
this.buffer.add(input);
if (this.buffer.size() >= this.batchSize) flush = true;
}
if (flush) {
// If this is a flush retry, wait for flushtime before we try again
if (this.currentTry > 0 && (System.currentTimeMillis()-this.lastRequestTime) < this.flushIntervalSecs*1000) return;
if (this.buffer.size() < 1) return;
this.lastRequestTime = System.currentTimeMillis();
boolean success = false;
try {
// Time to write some documents
HttpURLConnection con = (HttpURLConnection)this.url.openConnection();
con.setDoOutput(true);
con.setRequestMethod("POST");
con.setRequestProperty("Accept", "application/json");
con.setRequestProperty("Content-Type", "application/json");
if (this.encodedAuth != null) con.setRequestProperty("Authorization", "Basic " + this.encodedAuth);
// Write all documents in buffer (up to batch size) to the request
Writer request = new OutputStreamWriter(con.getOutputStream(), "UTF8");
// Documents start
request.write("{ \"docs\": [");
int counter = 0;
for (Iterator it = buffer.iterator(); it.hasNext();) {
request.write(this.serializer.getJSONString((Tuple)it.next()));
if (counter++ >= this.batchSize || !it.hasNext()) break;
request.write(",");
}
// Documents end
request.write("] }");
request.close();
// Read response from server
Reader response = new InputStreamReader(con.getInputStream(), "UTF8");
Object jsonReturn = JSONValue.parseWithException(response);
if (con.getResponseCode() == HttpURLConnection.HTTP_CREATED) {
// Note all successful inserts
Set<String> insertedIds = new HashSet<>();
JSONArray docReturns = (JSONArray)jsonReturn;
for (Object obj : docReturns) {
JSONObject doc = (JSONObject)obj;
if (doc.containsKey("ok")) insertedIds.add((String)doc.get("id"));
}
// Check that requested documents in buffer succeeded
success = true;
for (Iterator it = buffer.iterator(); it.hasNext();) {
Tuple buffered = (Tuple)it.next();
if (insertedIds.contains(serializer.getId(buffered))) it.remove();
else success = false;
if (--counter < 1) break;
}
if (!success) LOG.error("CoucDB response did not match request");
} else {
LOG.error("CouchDB HTTP code: " + con.getResponseCode());
}
} catch (IOException | ParseException e) {
LOG.error("CouchDB: " + e.toString());
}
// If something failed, we need to try again
if (success) this.currentTry = 0;
else this.currentTry++;
}
// Crash if we tried too many times
if (this.currentTry > this.retries)
throw new RuntimeException("Could not write documents to CouchDB: " + this.url.toString());
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {}
@Override
public void cleanup() {}
@Override
public Map<String, Object> getComponentConfiguration() {
if (flushIntervalSecs > 0) {
Config conf = new Config();
conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, flushIntervalSecs);
return conf;
} else {
return null;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotsitewise.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotsitewise-2019-12-02/DescribeProject" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeProjectResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The ID of the project.
* </p>
*/
private String projectId;
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the project,
* which has the following format.
* </p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
* </p>
*/
private String projectArn;
/**
* <p>
* The name of the project.
* </p>
*/
private String projectName;
/**
* <p>
* The ID of the portal that the project is in.
* </p>
*/
private String portalId;
/**
* <p>
* The project's description.
* </p>
*/
private String projectDescription;
/**
* <p>
* The date the project was created, in Unix epoch time.
* </p>
*/
private java.util.Date projectCreationDate;
/**
* <p>
* The date the project was last updated, in Unix epoch time.
* </p>
*/
private java.util.Date projectLastUpdateDate;
/**
* <p>
* The ID of the project.
* </p>
*
* @param projectId
* The ID of the project.
*/
public void setProjectId(String projectId) {
this.projectId = projectId;
}
/**
* <p>
* The ID of the project.
* </p>
*
* @return The ID of the project.
*/
public String getProjectId() {
return this.projectId;
}
/**
* <p>
* The ID of the project.
* </p>
*
* @param projectId
* The ID of the project.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withProjectId(String projectId) {
setProjectId(projectId);
return this;
}
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the project,
* which has the following format.
* </p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
* </p>
*
* @param projectArn
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the
* project, which has the following format.</p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
*/
public void setProjectArn(String projectArn) {
this.projectArn = projectArn;
}
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the project,
* which has the following format.
* </p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
* </p>
*
* @return The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the
* project, which has the following format.</p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
*/
public String getProjectArn() {
return this.projectArn;
}
/**
* <p>
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the project,
* which has the following format.
* </p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
* </p>
*
* @param projectArn
* The <a href="https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html">ARN</a> of the
* project, which has the following format.</p>
* <p>
* <code>arn:${Partition}:iotsitewise:${Region}:${Account}:project/${ProjectId}</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withProjectArn(String projectArn) {
setProjectArn(projectArn);
return this;
}
/**
* <p>
* The name of the project.
* </p>
*
* @param projectName
* The name of the project.
*/
public void setProjectName(String projectName) {
this.projectName = projectName;
}
/**
* <p>
* The name of the project.
* </p>
*
* @return The name of the project.
*/
public String getProjectName() {
return this.projectName;
}
/**
* <p>
* The name of the project.
* </p>
*
* @param projectName
* The name of the project.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withProjectName(String projectName) {
setProjectName(projectName);
return this;
}
/**
* <p>
* The ID of the portal that the project is in.
* </p>
*
* @param portalId
* The ID of the portal that the project is in.
*/
public void setPortalId(String portalId) {
this.portalId = portalId;
}
/**
* <p>
* The ID of the portal that the project is in.
* </p>
*
* @return The ID of the portal that the project is in.
*/
public String getPortalId() {
return this.portalId;
}
/**
* <p>
* The ID of the portal that the project is in.
* </p>
*
* @param portalId
* The ID of the portal that the project is in.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withPortalId(String portalId) {
setPortalId(portalId);
return this;
}
/**
* <p>
* The project's description.
* </p>
*
* @param projectDescription
* The project's description.
*/
public void setProjectDescription(String projectDescription) {
this.projectDescription = projectDescription;
}
/**
* <p>
* The project's description.
* </p>
*
* @return The project's description.
*/
public String getProjectDescription() {
return this.projectDescription;
}
/**
* <p>
* The project's description.
* </p>
*
* @param projectDescription
* The project's description.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withProjectDescription(String projectDescription) {
setProjectDescription(projectDescription);
return this;
}
/**
* <p>
* The date the project was created, in Unix epoch time.
* </p>
*
* @param projectCreationDate
* The date the project was created, in Unix epoch time.
*/
public void setProjectCreationDate(java.util.Date projectCreationDate) {
this.projectCreationDate = projectCreationDate;
}
/**
* <p>
* The date the project was created, in Unix epoch time.
* </p>
*
* @return The date the project was created, in Unix epoch time.
*/
public java.util.Date getProjectCreationDate() {
return this.projectCreationDate;
}
/**
* <p>
* The date the project was created, in Unix epoch time.
* </p>
*
* @param projectCreationDate
* The date the project was created, in Unix epoch time.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withProjectCreationDate(java.util.Date projectCreationDate) {
setProjectCreationDate(projectCreationDate);
return this;
}
/**
* <p>
* The date the project was last updated, in Unix epoch time.
* </p>
*
* @param projectLastUpdateDate
* The date the project was last updated, in Unix epoch time.
*/
public void setProjectLastUpdateDate(java.util.Date projectLastUpdateDate) {
this.projectLastUpdateDate = projectLastUpdateDate;
}
/**
* <p>
* The date the project was last updated, in Unix epoch time.
* </p>
*
* @return The date the project was last updated, in Unix epoch time.
*/
public java.util.Date getProjectLastUpdateDate() {
return this.projectLastUpdateDate;
}
/**
* <p>
* The date the project was last updated, in Unix epoch time.
* </p>
*
* @param projectLastUpdateDate
* The date the project was last updated, in Unix epoch time.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeProjectResult withProjectLastUpdateDate(java.util.Date projectLastUpdateDate) {
setProjectLastUpdateDate(projectLastUpdateDate);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getProjectId() != null)
sb.append("ProjectId: ").append(getProjectId()).append(",");
if (getProjectArn() != null)
sb.append("ProjectArn: ").append(getProjectArn()).append(",");
if (getProjectName() != null)
sb.append("ProjectName: ").append(getProjectName()).append(",");
if (getPortalId() != null)
sb.append("PortalId: ").append(getPortalId()).append(",");
if (getProjectDescription() != null)
sb.append("ProjectDescription: ").append(getProjectDescription()).append(",");
if (getProjectCreationDate() != null)
sb.append("ProjectCreationDate: ").append(getProjectCreationDate()).append(",");
if (getProjectLastUpdateDate() != null)
sb.append("ProjectLastUpdateDate: ").append(getProjectLastUpdateDate());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeProjectResult == false)
return false;
DescribeProjectResult other = (DescribeProjectResult) obj;
if (other.getProjectId() == null ^ this.getProjectId() == null)
return false;
if (other.getProjectId() != null && other.getProjectId().equals(this.getProjectId()) == false)
return false;
if (other.getProjectArn() == null ^ this.getProjectArn() == null)
return false;
if (other.getProjectArn() != null && other.getProjectArn().equals(this.getProjectArn()) == false)
return false;
if (other.getProjectName() == null ^ this.getProjectName() == null)
return false;
if (other.getProjectName() != null && other.getProjectName().equals(this.getProjectName()) == false)
return false;
if (other.getPortalId() == null ^ this.getPortalId() == null)
return false;
if (other.getPortalId() != null && other.getPortalId().equals(this.getPortalId()) == false)
return false;
if (other.getProjectDescription() == null ^ this.getProjectDescription() == null)
return false;
if (other.getProjectDescription() != null && other.getProjectDescription().equals(this.getProjectDescription()) == false)
return false;
if (other.getProjectCreationDate() == null ^ this.getProjectCreationDate() == null)
return false;
if (other.getProjectCreationDate() != null && other.getProjectCreationDate().equals(this.getProjectCreationDate()) == false)
return false;
if (other.getProjectLastUpdateDate() == null ^ this.getProjectLastUpdateDate() == null)
return false;
if (other.getProjectLastUpdateDate() != null && other.getProjectLastUpdateDate().equals(this.getProjectLastUpdateDate()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getProjectId() == null) ? 0 : getProjectId().hashCode());
hashCode = prime * hashCode + ((getProjectArn() == null) ? 0 : getProjectArn().hashCode());
hashCode = prime * hashCode + ((getProjectName() == null) ? 0 : getProjectName().hashCode());
hashCode = prime * hashCode + ((getPortalId() == null) ? 0 : getPortalId().hashCode());
hashCode = prime * hashCode + ((getProjectDescription() == null) ? 0 : getProjectDescription().hashCode());
hashCode = prime * hashCode + ((getProjectCreationDate() == null) ? 0 : getProjectCreationDate().hashCode());
hashCode = prime * hashCode + ((getProjectLastUpdateDate() == null) ? 0 : getProjectLastUpdateDate().hashCode());
return hashCode;
}
@Override
public DescribeProjectResult clone() {
try {
return (DescribeProjectResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.SecureIOUtils;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.server.tasktracker.Localizer;
import org.apache.hadoop.util.ProcessTree;
import org.apache.hadoop.util.Shell;
import org.apache.log4j.Appender;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
/**
* A simple logger to handle the task-specific user logs.
* This class uses the system property <code>hadoop.log.dir</code>.
*
* This class is for Map/Reduce internal use only.
*
*/
public class TaskLog {
private static final Log LOG =
LogFactory.getLog(TaskLog.class);
static final String USERLOGS_DIR_NAME = "userlogs";
private static final File LOG_DIR =
new File(getBaseLogDir(), USERLOGS_DIR_NAME).getAbsoluteFile();
// localFS is set in (and used by) writeToIndexFile()
static LocalFileSystem localFS = null;
static {
if (!LOG_DIR.exists()) {
LOG_DIR.mkdirs();
}
}
static AtomicInteger rotor = new AtomicInteger(0);
/**
* Path filter that filters out userlogs directory.
*/
public static final PathFilter USERLOGS_PATH_FILTER = new PathFilter() {
@Override
public boolean accept(Path path) {
return !path.toString().contains(USERLOGS_DIR_NAME);
}
};
/**
* Create log directory for the given attempt. This involves creating the
* following and setting proper permissions for the new directories
* <br>{hadoop.log.dir}/userlogs/<jobid>
* <br>{hadoop.log.dir}/userlogs/<jobid>/<attempt-id-as-symlink>
* <br>{one of the mapred-local-dirs}/userlogs/<jobid>
* <br>{one of the mapred-local-dirs}/userlogs/<jobid>/<attempt-id>
*
* @param taskID attempt-id for which log dir is to be created
* @param isCleanup Is this attempt a cleanup attempt ?
* @param localDirs mapred local directories
* @throws IOException
*/
public static void createTaskAttemptLogDir(TaskAttemptID taskID,
boolean isCleanup, String[] localDirs) throws IOException {
String cleanupSuffix = isCleanup ? ".cleanup" : "";
String strAttemptLogDir = getTaskAttemptLogDir(taskID,
cleanupSuffix, localDirs);
File attemptLogDir = new File(strAttemptLogDir);
if (attemptLogDir.exists()) {
// Delete attempt log dir if it already exists on this TT and we are doing
// job recovery
if (!FileUtil.fullyDelete(attemptLogDir)) {
throw new IOException("Deletion of existing " + attemptLogDir + " failed.");
}
}
if (!attemptLogDir.mkdirs()) {
throw new IOException("Creation of " + attemptLogDir + " failed.");
}
String strLinkAttemptLogDir = getJobDir(
taskID.getJobID()).getAbsolutePath() + File.separatorChar +
taskID.toString() + cleanupSuffix;
if (FileUtil.symLink(strAttemptLogDir, strLinkAttemptLogDir) != 0) {
throw new IOException("Creation of symlink from " +
strLinkAttemptLogDir + " to " + strAttemptLogDir +
" failed.");
}
FileSystem localFs = FileSystem.getLocal(new Configuration());
localFs.setPermission(new Path(attemptLogDir.getPath()),
new FsPermission((short)0700));
}
/**
* Get one of the mapred local directory in a round-robin-way.
* @param localDirs mapred local directories
* @return the next chosen mapred local directory
* @throws IOException
*/
private static String getNextLocalDir(String[] localDirs) throws IOException {
if (localDirs.length == 0) {
throw new IOException ("Not enough mapred.local.dirs ("
+ localDirs.length + ")");
}
return localDirs[Math.abs(rotor.getAndIncrement()) % localDirs.length];
}
/**
* Get attempt log directory path for the given attempt-id under randomly
* selected mapred local directory.
* @param taskID attempt-id for which log dir path is needed
* @param cleanupSuffix ".cleanup" if this attempt is a cleanup attempt
* @param localDirs mapred local directories
* @return target task attempt log directory
* @throws IOException
*/
public static String getTaskAttemptLogDir(TaskAttemptID taskID,
String cleanupSuffix, String[] localDirs) throws IOException {
StringBuilder taskLogDirLocation = new StringBuilder();
taskLogDirLocation.append(getNextLocalDir(localDirs));
taskLogDirLocation.append(File.separatorChar);
taskLogDirLocation.append(USERLOGS_DIR_NAME);
taskLogDirLocation.append(File.separatorChar);
taskLogDirLocation.append(taskID.getJobID().toString());
taskLogDirLocation.append(File.separatorChar);
taskLogDirLocation.append(taskID.toString()+cleanupSuffix);
return taskLogDirLocation.toString();
}
public static File getTaskLogFile(TaskAttemptID taskid, boolean isCleanup,
LogName filter) {
return new File(getAttemptDir(taskid, isCleanup), filter.toString());
}
/**
* Get the real task-log file-path
*
* @param location Location of the log-file. This should point to an
* attempt-directory.
* @param filter
* @return
* @throws IOException
*/
static String getRealTaskLogFilePath(String location, LogName filter)
throws IOException {
return FileUtil.makeShellPath(new File(location, filter.toString()));
}
static class LogFileDetail {
final static String LOCATION = "LOG_DIR:";
String location;
long start;
long length;
}
static Map<LogName, LogFileDetail> getAllLogsFileDetails(
TaskAttemptID taskid, boolean isCleanup) throws IOException {
Map<LogName, LogFileDetail> allLogsFileDetails =
new HashMap<LogName, LogFileDetail>();
File indexFile = getIndexFile(taskid, isCleanup);
BufferedReader fis = new BufferedReader(new FileReader(indexFile));
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//stdout:<start-offset in the stdout file> <length>
//stderr:<start-offset in the stderr file> <length>
//syslog:<start-offset in the syslog file> <length>
String str = fis.readLine();
if (str == null) { //the file doesn't have anything
throw new IOException ("Index file for the log of " + taskid+" doesn't exist.");
}
String loc = str.substring(str.indexOf(LogFileDetail.LOCATION)+
LogFileDetail.LOCATION.length());
//special cases are the debugout and profile.out files. They are guaranteed
//to be associated with each task attempt since jvm reuse is disabled
//when profiling/debugging is enabled
for (LogName filter : new LogName[] { LogName.DEBUGOUT, LogName.PROFILE }) {
LogFileDetail l = new LogFileDetail();
l.location = loc;
l.length = new File(l.location, filter.toString()).length();
l.start = 0;
allLogsFileDetails.put(filter, l);
}
str = fis.readLine();
while (str != null) {
LogFileDetail l = new LogFileDetail();
l.location = loc;
int idx = str.indexOf(':');
LogName filter = LogName.valueOf(str.substring(0, idx).toUpperCase());
str = str.substring(idx + 1);
String[] startAndLen = str.split(" ");
l.start = Long.parseLong(startAndLen[0]);
l.length = Long.parseLong(startAndLen[1]);
if (l.length == -1L) {
l.length = new File(l.location, filter.toString()).length();
}
allLogsFileDetails.put(filter, l);
str = fis.readLine();
}
fis.close();
return allLogsFileDetails;
}
private static File getTmpIndexFile(TaskAttemptID taskid, boolean isCleanup) {
return new File(getAttemptDir(taskid, isCleanup), "log.tmp");
}
static File getIndexFile(TaskAttemptID taskid, boolean isCleanup) {
return new File(getAttemptDir(taskid, isCleanup), "log.index");
}
/**
* Obtain the owner of the log dir. This is
* determined by checking the job's log directory.
*/
static String obtainLogDirOwner(TaskAttemptID taskid) throws IOException {
Configuration conf = new Configuration();
FileSystem raw = FileSystem.getLocal(conf).getRaw();
Path jobLogDir = new Path(getJobDir(taskid.getJobID()).getAbsolutePath());
FileStatus jobStat = raw.getFileStatus(jobLogDir);
return jobStat.getOwner();
}
static String getBaseLogDir() {
return System.getProperty("hadoop.log.dir");
}
static File getAttemptDir(TaskAttemptID taskid, boolean isCleanup) {
String cleanupSuffix = isCleanup ? ".cleanup" : "";
return getAttemptDir(taskid.getJobID().toString(),
taskid.toString() + cleanupSuffix);
}
static File getAttemptDir(String jobid, String taskid) {
// taskid should be fully formed and it should have the optional
// .cleanup suffix
// TODO(todd) should this have cleanup suffix?
return new File(getJobDir(jobid), taskid);
}
static final List<LogName> LOGS_TRACKED_BY_INDEX_FILES =
Arrays.asList(LogName.STDOUT, LogName.STDERR, LogName.SYSLOG);
private static TaskAttemptID currentTaskid;
/**
* Map to store previous and current lengths.
*/
private static Map<LogName, Long[]> logLengths =
new HashMap<LogName, Long[]>();
static {
for (LogName logName : LOGS_TRACKED_BY_INDEX_FILES) {
logLengths.put(logName, new Long[] { Long.valueOf(0L),
Long.valueOf(0L) });
}
}
static void writeToIndexFile(String logLocation,
TaskAttemptID currentTaskid, boolean isCleanup,
Map<LogName, Long[]> lengths) throws IOException {
// To ensure atomicity of updates to index file, write to temporary index
// file first and then rename.
File tmpIndexFile = getTmpIndexFile(currentTaskid, isCleanup);
BufferedOutputStream bos =
new BufferedOutputStream(
SecureIOUtils.createForWrite(tmpIndexFile, 0644));
DataOutputStream dos = new DataOutputStream(bos);
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//STDOUT: <start-offset in the stdout file> <length>
//STDERR: <start-offset in the stderr file> <length>
//SYSLOG: <start-offset in the syslog file> <length>
dos.writeBytes(LogFileDetail.LOCATION
+ logLocation
+ "\n");
for (LogName logName : LOGS_TRACKED_BY_INDEX_FILES) {
Long[] lens = lengths.get(logName);
dos.writeBytes(logName.toString() + ":"
+ lens[0].toString() + " "
+ Long.toString(lens[1].longValue() - lens[0].longValue())
+ "\n");}
dos.close();
File indexFile = getIndexFile(currentTaskid, isCleanup);
Path indexFilePath = new Path(indexFile.getAbsolutePath());
Path tmpIndexFilePath = new Path(tmpIndexFile.getAbsolutePath());
if (localFS == null) {// set localFS once
localFS = FileSystem.getLocal(new Configuration());
}
localFS.rename (tmpIndexFilePath, indexFilePath);
}
@SuppressWarnings("unchecked")
public synchronized static void syncLogs(String logLocation,
TaskAttemptID taskid,
boolean isCleanup,
boolean segmented)
throws IOException {
System.out.flush();
System.err.flush();
Enumeration<Logger> allLoggers = LogManager.getCurrentLoggers();
while (allLoggers.hasMoreElements()) {
Logger l = allLoggers.nextElement();
Enumeration<Appender> allAppenders = l.getAllAppenders();
while (allAppenders.hasMoreElements()) {
Appender a = allAppenders.nextElement();
if (a instanceof TaskLogAppender) {
((TaskLogAppender)a).flush();
}
}
}
if (currentTaskid == null) {
currentTaskid = taskid;
}
// set start and end
for (LogName logName : LOGS_TRACKED_BY_INDEX_FILES) {
if (currentTaskid != taskid) {
// Set start = current-end
logLengths.get(logName)[0] = Long.valueOf(new File(
logLocation, logName.toString()).length());
}
// Set current end
logLengths.get(logName)[1]
= (segmented
? (Long.valueOf
(new File(logLocation, logName.toString()).length()))
: -1);
}
if (currentTaskid != taskid) {
if (currentTaskid != null) {
LOG.info("Starting logging for a new task " + taskid
+ " in the same JVM as that of the first task " + logLocation);
}
currentTaskid = taskid;
}
writeToIndexFile(logLocation, taskid, isCleanup, logLengths);
}
/**
* The filter for userlogs.
*/
public static enum LogName {
/** Log on the stdout of the task. */
STDOUT ("stdout"),
/** Log on the stderr of the task. */
STDERR ("stderr"),
/** Log on the map-reduce system logs of the task. */
SYSLOG ("syslog"),
/** The java profiler information. */
PROFILE ("profile.out"),
/** Log the debug script's stdout */
DEBUGOUT ("debugout");
private String prefix;
private LogName(String prefix) {
this.prefix = prefix;
}
@Override
public String toString() {
return prefix;
}
}
static class Reader extends InputStream {
private long bytesRemaining;
private FileInputStream file;
/**
* Read a log file from start to end positions. The offsets may be negative,
* in which case they are relative to the end of the file. For example,
* Reader(taskid, kind, 0, -1) is the entire file and
* Reader(taskid, kind, -4197, -1) is the last 4196 bytes.
* @param taskid the id of the task to read the log file for
* @param kind the kind of log to read
* @param start the offset to read from (negative is relative to tail)
* @param end the offset to read upto (negative is relative to tail)
* @param isCleanup whether the attempt is cleanup attempt or not
* @throws IOException
*/
public Reader(TaskAttemptID taskid, LogName kind,
long start, long end, boolean isCleanup) throws IOException {
// find the right log file
Map<LogName, LogFileDetail> allFilesDetails =
getAllLogsFileDetails(taskid, isCleanup);
LogFileDetail fileDetail = allFilesDetails.get(kind);
// calculate the start and stop
long size = fileDetail.length;
if (start < 0) {
start += size + 1;
}
if (end < 0) {
end += size + 1;
}
start = Math.max(0, Math.min(start, size));
end = Math.max(0, Math.min(end, size));
start += fileDetail.start;
end += fileDetail.start;
bytesRemaining = end - start;
String owner = obtainLogDirOwner(taskid);
file = SecureIOUtils.openForRead(new File(fileDetail.location, kind.toString()),
owner, null);
// skip upto start
long pos = 0;
while (pos < start) {
long result = file.skip(start - pos);
if (result < 0) {
bytesRemaining = 0;
break;
}
pos += result;
}
}
@Override
public int read() throws IOException {
int result = -1;
if (bytesRemaining > 0) {
bytesRemaining -= 1;
result = file.read();
}
return result;
}
@Override
public int read(byte[] buffer, int offset, int length) throws IOException {
length = (int) Math.min(length, bytesRemaining);
int bytes = file.read(buffer, offset, length);
if (bytes > 0) {
bytesRemaining -= bytes;
}
return bytes;
}
@Override
public int available() throws IOException {
return (int) Math.min(bytesRemaining, file.available());
}
@Override
public void close() throws IOException {
file.close();
}
}
private static final String bashCommand = "bash";
private static final String tailCommand = "tail";
/**
* Get the desired maximum length of task's logs.
* @param conf the job to look in
* @return the number of bytes to cap the log files at
*/
public static long getTaskLogLength(JobConf conf) {
return conf.getLong("mapred.userlog.limit.kb", 100) * 1024;
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* If the tailLength is 0, the entire output will be saved.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @return the modified command that should be run
*/
public static List<String> captureOutAndError(List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength
) throws IOException {
return captureOutAndError(null, cmd, stdoutFilename,
stderrFilename, tailLength, false);
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* Setup commands such as setting memory limit can be passed which
* will be executed before exec.
* If the tailLength is 0, the entire output will be saved.
* @param setup The setup commands for the execed process.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @return the modified command that should be run
*/
public static List<String> captureOutAndError(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength
) throws IOException {
return captureOutAndError(setup, cmd, stdoutFilename, stderrFilename,
tailLength, false);
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* Setup commands such as setting memory limit can be passed which
* will be executed before exec.
* If the tailLength is 0, the entire output will be saved.
* @param setup The setup commands for the execed process.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @deprecated pidFiles are no more used. Instead pid is exported to
* env variable JVM_PID.
* @return the modified command that should be run
*/
@Deprecated
public static List<String> captureOutAndError(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength,
String pidFileName
) throws IOException {
return captureOutAndError(setup, cmd, stdoutFilename, stderrFilename,
tailLength, false, pidFileName);
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* Setup commands such as setting memory limit can be passed which
* will be executed before exec.
* If the tailLength is 0, the entire output will be saved.
* @param setup The setup commands for the execed process.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @param useSetsid Should setsid be used in the command or not.
* @deprecated pidFiles are no more used. Instead pid is exported to
* env variable JVM_PID.
* @return the modified command that should be run
*
*/
@Deprecated
public static List<String> captureOutAndError(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength,
boolean useSetsid,
String pidFileName
) throws IOException {
return captureOutAndError(setup,cmd, stdoutFilename, stderrFilename, tailLength,
useSetsid);
}
/**
* Wrap a command in a shell to capture stdout and stderr to files.
* Setup commands such as setting memory limit can be passed which
* will be executed before exec.
* If the tailLength is 0, the entire output will be saved.
* @param setup The setup commands for the execed process.
* @param cmd The command and the arguments that should be run
* @param stdoutFilename The filename that stdout should be saved to
* @param stderrFilename The filename that stderr should be saved to
* @param tailLength The length of the tail to be saved.
* @param useSetsid Should setsid be used in the command or not.
* @return the modified command that should be run
*/
public static List<String> captureOutAndError(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength,
boolean useSetsid
) throws IOException {
List<String> result = new ArrayList<String>(3);
result.add(bashCommand);
result.add("-c");
String mergedCmd = buildCommandLine(setup,
cmd,
stdoutFilename,
stderrFilename, tailLength,
useSetsid);
result.add(mergedCmd.toString());
return result;
}
static String buildCommandLine(List<String> setup,
List<String> cmd,
File stdoutFilename,
File stderrFilename,
long tailLength,
boolean useSetSid) throws IOException {
String stdout = FileUtil.makeShellPath(stdoutFilename);
String stderr = FileUtil.makeShellPath(stderrFilename);
StringBuilder mergedCmd = new StringBuilder();
if (!Shell.WINDOWS) {
mergedCmd.append("export JVM_PID=`echo $$`\n");
}
if (setup != null) {
for (String s : setup) {
mergedCmd.append(s);
mergedCmd.append("\n");
}
}
if (tailLength > 0) {
mergedCmd.append("(");
} else if (ProcessTree.isSetsidAvailable && useSetSid
&& !Shell.WINDOWS) {
mergedCmd.append("exec setsid ");
} else {
mergedCmd.append("exec ");
}
mergedCmd.append(addCommand(cmd, true));
mergedCmd.append(" < /dev/null ");
if (tailLength > 0) {
mergedCmd.append(" | ");
mergedCmd.append(tailCommand);
mergedCmd.append(" -c ");
mergedCmd.append(tailLength);
mergedCmd.append(" >> ");
mergedCmd.append(stdout);
mergedCmd.append(" ; exit $PIPESTATUS ) 2>&1 | ");
mergedCmd.append(tailCommand);
mergedCmd.append(" -c ");
mergedCmd.append(tailLength);
mergedCmd.append(" >> ");
mergedCmd.append(stderr);
mergedCmd.append(" ; exit $PIPESTATUS");
} else {
mergedCmd.append(" 1>> ");
mergedCmd.append(stdout);
mergedCmd.append(" 2>> ");
mergedCmd.append(stderr);
}
return mergedCmd.toString();
}
/**
* Add quotes to each of the command strings and
* return as a single string
* @param cmd The command to be quoted
* @param isExecutable makes shell path if the first
* argument is executable
* @return returns The quoted string.
* @throws IOException
*/
public static String addCommand(List<String> cmd, boolean isExecutable)
throws IOException {
StringBuffer command = new StringBuffer();
for(String s: cmd) {
command.append('\'');
if (isExecutable) {
// the executable name needs to be expressed as a shell path for the
// shell to find it.
command.append(FileUtil.makeShellPath(new File(s)));
isExecutable = false;
} else {
command.append(s);
}
command.append('\'');
command.append(" ");
}
return command.toString();
}
/**
* Wrap a command in a shell to capture debug script's
* stdout and stderr to debugout.
* @param cmd The command and the arguments that should be run
* @param debugoutFilename The filename that stdout and stderr
* should be saved to.
* @return the modified command that should be run
* @throws IOException
*/
public static List<String> captureDebugOut(List<String> cmd,
File debugoutFilename
) throws IOException {
String debugout = FileUtil.makeShellPath(debugoutFilename);
List<String> result = new ArrayList<String>(3);
result.add(bashCommand);
result.add("-c");
StringBuffer mergedCmd = new StringBuffer();
mergedCmd.append("exec ");
boolean isExecutable = true;
for(String s: cmd) {
if (isExecutable) {
// the executable name needs to be expressed as a shell path for the
// shell to find it.
mergedCmd.append(FileUtil.makeShellPath(new File(s)));
isExecutable = false;
} else {
mergedCmd.append(s);
}
mergedCmd.append(" ");
}
mergedCmd.append(" < /dev/null ");
mergedCmd.append(" >");
mergedCmd.append(debugout);
mergedCmd.append(" 2>&1 ");
result.add(mergedCmd.toString());
return result;
}
public static File getUserLogDir() {
return LOG_DIR;
}
/**
* Get the user log directory for the job jobid.
*
* @param jobid string representation of the jobid
* @return user log directory for the job
*/
public static File getJobDir(String jobid) {
return new File(getUserLogDir(), jobid);
}
/**
* Get the user log directory for the job jobid.
*
* @param jobid the jobid object
* @return user log directory for the job
*/
public static File getJobDir(JobID jobid) {
return getJobDir(jobid.toString());
}
} // TaskLog
| |
// ************************************************************************
// (c) 2016 GOBii Project
// Initial Version: Phil Glaser
// Create Date: 2016-03-25
// ************************************************************************
package org.gobiiproject.gobiiclient.dtorequests.instructions;
import org.apache.commons.io.FileUtils;
import org.gobiiproject.gobiiapimodel.payload.PayloadEnvelope;
import org.gobiiproject.gobiiapimodel.restresources.RestUri;
import org.gobiiproject.gobiiapimodel.types.ServiceRequestId;
import org.gobiiproject.gobiiclient.core.common.ClientContext;
import org.gobiiproject.gobiiclient.core.gobii.GobiiEnvelopeRestResource;
import org.gobiiproject.gobiiclient.core.common.Authenticator;
import org.gobiiproject.gobiiclient.core.common.TestConfiguration;
import org.gobiiproject.gobiiclient.dtorequests.Helpers.TestDtoFactory;
import org.gobiiproject.gobiiclient.dtorequests.Helpers.TestUtils;
import org.gobiiproject.gobiimodel.headerlesscontainer.LoaderFilePreviewDTO;
import org.gobiiproject.gobiimodel.types.*;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.List;
import java.util.Scanner;
public class DtoRequestLoaderFilePreviewTest {
@BeforeClass
public static void setUpClass() throws Exception {
Assert.assertTrue(Authenticator.authenticate());
}
@AfterClass
public static void tearDownUpClass() throws Exception {
Assert.assertTrue(Authenticator.deAuthenticate());
}
@Test
public void testCreateDirectory() throws Exception {
LoaderFilePreviewDTO loaderFilePreviewDTO = new LoaderFilePreviewDTO();
RestUri previewTestUri = ClientContext
.getInstance(null,false)
.getUriFactory()
.resourceByUriIdParam(ServiceRequestId.URL_FILE_LOAD);
String folderName = TestDtoFactory.getFolderNameWithTimestamp("Loader File Preview Test");
previewTestUri.setParamValue("id", folderName );
GobiiEnvelopeRestResource<LoaderFilePreviewDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(previewTestUri);
PayloadEnvelope<LoaderFilePreviewDTO> resultEnvelope = gobiiEnvelopeRestResource.put(LoaderFilePreviewDTO.class,
new PayloadEnvelope<>(loaderFilePreviewDTO, GobiiProcessType.CREATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
Assert.assertTrue("No directory name DTO received", resultEnvelope.getPayload().getData().size() > 0);
LoaderFilePreviewDTO resultLoaderFilePreviewDTO = resultEnvelope.getPayload().getData().get(0);
Assert.assertNotNull(resultLoaderFilePreviewDTO.getDirectoryName());
PayloadEnvelope<LoaderFilePreviewDTO> resultEnvelopeSecondRequest = gobiiEnvelopeRestResource.put(LoaderFilePreviewDTO.class,
new PayloadEnvelope<>(loaderFilePreviewDTO, GobiiProcessType.CREATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeSecondRequest.getHeader()));
}
//Fails on SYS_INT due to the fact that it physically copies files; this test
// mechanism does not work unless the unit tests run on the same system as the
// as the web server.
@Ignore
public void testGetFilePreview() throws Exception {
//Create newFolder
LoaderFilePreviewDTO loaderFileCreateDTO = new LoaderFilePreviewDTO();
RestUri previewTestUriCreate = ClientContext
.getInstance(null,false)
.getUriFactory()
.resourceByUriIdParam(ServiceRequestId.URL_FILE_LOAD);
previewTestUriCreate.setParamValue("id", TestDtoFactory.getFolderNameWithTimestamp("Loader File Preview Test"));
GobiiEnvelopeRestResource<LoaderFilePreviewDTO> gobiiEnvelopeRestResourceCreate = new GobiiEnvelopeRestResource<>(previewTestUriCreate);
PayloadEnvelope<LoaderFilePreviewDTO> resultEnvelopeCreate = gobiiEnvelopeRestResourceCreate.put(LoaderFilePreviewDTO.class,
new PayloadEnvelope<>(loaderFileCreateDTO, GobiiProcessType.CREATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeCreate.getHeader()));
Assert.assertTrue("No directory name DTO received", resultEnvelopeCreate.getPayload().getData().size() > 0);
LoaderFilePreviewDTO resultLoaderFilePreviewDTOCreated = resultEnvelopeCreate.getPayload().getData().get(0);
Assert.assertNotNull(resultLoaderFilePreviewDTOCreated.getDirectoryName());
//get intended path for the created directory
TestConfiguration testConfiguration = new TestConfiguration();
String testCrop = testConfiguration.getConfigSettings().getTestExecConfig().getTestCrop();
String destinationDirectory = testConfiguration.getConfigSettings().getProcessingPath(testCrop, GobiiFileProcessDir.RAW_USER_FILES);
String createdFileDirectory = destinationDirectory + new File(resultLoaderFilePreviewDTOCreated.getDirectoryName()).getName();
//copyContentsFromCreatedFolder
File resourcesDirectory = new File("src/test/resources/datasets");
File dst = new File(resultLoaderFilePreviewDTOCreated.getDirectoryName());
FileUtils.copyDirectory(resourcesDirectory, dst);
//retrieve contents from created name
RestUri previewTestUri = ClientContext
.getInstance(null,false)
.getUriFactory()
.fileLoaderPreview();
previewTestUri.setParamValue("directoryName", dst.getName());
previewTestUri.setParamValue("fileFormat", "txt");
GobiiEnvelopeRestResource<LoaderFilePreviewDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(previewTestUri);
PayloadEnvelope<LoaderFilePreviewDTO> resultEnvelope = gobiiEnvelopeRestResource.get(LoaderFilePreviewDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
Assert.assertTrue("No file preview DTO received", resultEnvelopeCreate.getPayload().getData().size() > 0);
LoaderFilePreviewDTO resultLoaderFilePreviewDTO = resultEnvelope.getPayload().getData().get(0);
Assert.assertNotNull(resultLoaderFilePreviewDTO.getDirectoryName());
Assert.assertTrue(resultLoaderFilePreviewDTO.getDirectoryName().replace("C:","").equals(createdFileDirectory.replaceAll("/","\\\\"))); // because the getAbsolutePath function in files returns a windows format path to the file
Assert.assertNotNull(resultLoaderFilePreviewDTO.getFileList().get(0));
//compare results read to file
Assert.assertTrue(checkPreviewFileMatch(resultLoaderFilePreviewDTO.getFilePreview(), resourcesDirectory,resultLoaderFilePreviewDTO.getFileList().get(0)));
/** TEST hmp.txt format **/
//Create newFolder
LoaderFilePreviewDTO loaderFileCreateDTOHmp = new LoaderFilePreviewDTO();
RestUri previewTestUriCreateHmp = ClientContext
.getInstance(null,false)
.getUriFactory()
.resourceByUriIdParam(ServiceRequestId.URL_FILE_LOAD);
previewTestUriCreateHmp.setParamValue("id", TestDtoFactory.getFolderNameWithTimestamp("Loader File Preview Test"));
GobiiEnvelopeRestResource<LoaderFilePreviewDTO> gobiiEnvelopeRestResourceCreateHmp = new GobiiEnvelopeRestResource<>(previewTestUriCreateHmp);
PayloadEnvelope<LoaderFilePreviewDTO> resultEnvelopeCreateHmp = gobiiEnvelopeRestResourceCreateHmp.put(LoaderFilePreviewDTO.class,
new PayloadEnvelope<>(loaderFileCreateDTOHmp, GobiiProcessType.CREATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeCreateHmp.getHeader()));
Assert.assertTrue("No directory name DTO received", resultEnvelopeCreateHmp.getPayload().getData().size() > 0);
LoaderFilePreviewDTO resultLoaderFilePreviewDTOCreatedHmp = resultEnvelopeCreateHmp.getPayload().getData().get(0);
Assert.assertNotNull(resultLoaderFilePreviewDTOCreatedHmp.getDirectoryName());
//get intended path for the created directory
TestConfiguration testConfigurationHmp = new TestConfiguration();
String testCropHmp = testConfigurationHmp.getConfigSettings().getTestExecConfig().getTestCrop();
String destinationDirectoryHmp = testConfigurationHmp.getConfigSettings().getProcessingPath(testCropHmp, GobiiFileProcessDir.RAW_USER_FILES);
String createdFileDirectoryHmp = destinationDirectoryHmp + new File(resultLoaderFilePreviewDTOCreatedHmp.getDirectoryName()).getName();
File resourceDirectoryHmp = new File("src/test/resources/hmp_dataset");
File dstHmp = new File(resultLoaderFilePreviewDTOCreatedHmp.getDirectoryName());
FileUtils.copyDirectory(resourceDirectoryHmp, dstHmp);
RestUri previewTestUriHmp = ClientContext
.getInstance(null,false)
.getUriFactory()
.fileLoaderPreview();
previewTestUriHmp.setParamValue("directoryName", dstHmp.getName());
previewTestUriHmp.setParamValue("fileFormat", "hmp.txt");
GobiiEnvelopeRestResource<LoaderFilePreviewDTO> gobiiEnvelopeRestResourceHmp = new GobiiEnvelopeRestResource<>(previewTestUriHmp);
PayloadEnvelope<LoaderFilePreviewDTO> resultEnvelopeHmp = gobiiEnvelopeRestResourceHmp.get(LoaderFilePreviewDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeHmp.getHeader()));
Assert.assertTrue("No file preview DTO received", resultEnvelopeCreateHmp.getPayload().getData().size() > 0);
LoaderFilePreviewDTO resultLoaderFilePreviewDTOhmp = resultEnvelopeHmp.getPayload().getData().get(0);
Assert.assertNotNull(resultLoaderFilePreviewDTOhmp.getDirectoryName());
Assert.assertTrue(resultLoaderFilePreviewDTOhmp.getDirectoryName().replace("C:","").equals(createdFileDirectoryHmp.replaceAll("/","\\\\"))); // because the getAbsolutePath function in files returns a windows format path to the file
Assert.assertNotNull(resultLoaderFilePreviewDTOhmp.getFileList().get(0));
Assert.assertTrue(checkPreviewFileMatch(resultLoaderFilePreviewDTOhmp.getFilePreview(), resourceDirectoryHmp, resultLoaderFilePreviewDTOhmp.getFileList().get(0)));
}
public static File getFileOfType(String filePath, File resourcesDirectory) {
File newFile = new File(filePath);
for(File f: resourcesDirectory.listFiles()){
if(f.getName().equals(newFile.getName())){
return f;
}
}
return null;
}
public static boolean checkPreviewFileMatch(List<List<String>> previewFileItems, File resourcesDirectory, String filePath) throws FileNotFoundException {
Scanner input = new Scanner(System.in);
int lineCtr = 0; //count lines read
input = new Scanner(getFileOfType(filePath, resourcesDirectory));
while (lineCtr<50) { //read first 50 lines only
int ctr=0; //count words stored
String line = input.nextLine();
for(String s: line.split("\t")){
if(ctr==50) break;
else{
if(!previewFileItems.get(lineCtr).get(ctr).equals(s)) return false;
ctr++;
}
}
lineCtr++;
}
input.close();
return true;
}
}
| |
/*
This file is part of Subsonic.
Subsonic is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Subsonic is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Subsonic. If not, see <http://www.gnu.org/licenses/>.
Copyright 2009 (C) Sindre Mehus
*/
package net.sourceforge.kalimbaradio.androidapp.util;
import android.app.Notification;
import android.app.PendingIntent;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.os.Build;
import android.os.Handler;
import android.support.v4.app.NotificationCompat;
import android.view.KeyEvent;
import android.widget.RemoteViews;
import net.sourceforge.kalimbaradio.androidapp.activity.DownloadActivity;
import net.sourceforge.kalimbaradio.androidapp.domain.MusicDirectory;
import net.sourceforge.kalimbaradio.androidapp.service.DownloadServiceImpl;
import net.sourceforge.kalimbaradio.androidapp.R;
import net.sourceforge.kalimbaradio.androidapp.provider.SubsonicAppWidgetProvider;
/**
* @author Sindre Mehus
* @version $Id: NotificationUtil.java 3568 2013-11-05 16:48:46Z sindre_mehus $
*/
public final class NotificationUtil {
private static final Logger LOG = new Logger(NotificationUtil.class);
private NotificationUtil() {
}
public static void updateNotification(final Context context, final DownloadServiceImpl downloadService,
Handler handler, MusicDirectory.Entry song, boolean playing) {
// On older platforms, show a notification without buttons.
if (useSimpleNotification()) {
updateSimpleNotification(context, downloadService, handler, song, playing);
} else {
updateCustomNotification(context, downloadService, handler, song, playing);
}
// Update widget
SubsonicAppWidgetProvider.getInstance().notifyChange(context, downloadService, playing);
}
private static void updateSimpleNotification(Context context, final DownloadServiceImpl downloadService, Handler handler,
MusicDirectory.Entry song, boolean playing) {
if (song == null || !playing) {
hideNotification(downloadService, handler);
} else {
final Notification notification = createSimpleNotification(context, song);
// Send the notification and put the service in the foreground.
handler.post(new Runnable() {
@Override
public void run() {
downloadService.startForeground(Constants.NOTIFICATION_ID_PLAYING, notification);
}
});
}
}
private static void updateCustomNotification(Context context, final DownloadServiceImpl downloadService,
Handler handler, MusicDirectory.Entry song, boolean playing) {
if (song == null) {
hideNotification(downloadService, handler);
} else if (!isNotificationHiddenByUser(context)) {
final Notification notification = createCustomNotification(context, song, playing);
// Send the notification and put the service in the foreground.
handler.post(new Runnable() {
@Override
public void run() {
downloadService.startForeground(Constants.NOTIFICATION_ID_PLAYING, notification);
}
});
}
}
public static void hideNotification(final DownloadServiceImpl downloadService, Handler handler) {
// Remove notification and remove the service from the foreground
handler.post(new Runnable() {
@Override
public void run() {
downloadService.stopForeground(true);
}
});
}
public static void setNotificationHiddenByUser(Context context, boolean hiddenByUser) {
SharedPreferences preferences = Util.getPreferences(context);
SharedPreferences.Editor editor = preferences.edit();
editor.putBoolean(Constants.PREFERENCES_KEY_HIDE_NOTIFICATION_BY_USER, hiddenByUser);
editor.commit();
}
private static boolean isNotificationHiddenByUser(Context context) {
SharedPreferences preferences = Util.getPreferences(context);
return preferences.getBoolean(Constants.PREFERENCES_KEY_HIDE_NOTIFICATION_BY_USER, false);
}
private static Notification createSimpleNotification(Context context, MusicDirectory.Entry song) {
Bitmap albumArt;
try {
albumArt = FileUtil.getAlbumArtBitmap(context, song, (int) Util.convertDpToPixel(64.0F, context));
if (albumArt == null) {
albumArt = Util.decodeBitmap(context, R.drawable.unknown_album);
}
} catch (Exception x) {
LOG.warn("Failed to get notification cover art", x);
albumArt = Util.decodeBitmap(context, R.drawable.unknown_album);
}
Intent notificationIntent = new Intent(context, DownloadActivity.class);
return new NotificationCompat.Builder(context).setOngoing(true)
.setSmallIcon(R.drawable.stat_notify_playing)
.setContentTitle(song.getTitle())
.setContentText(song.getArtist())
.setContentIntent(PendingIntent.getActivity(context, 0, notificationIntent, 0))
.setLargeIcon(albumArt)
.build();
}
private static Notification createCustomNotification(Context context, MusicDirectory.Entry song, boolean playing) {
Bitmap albumArt;
try {
albumArt = FileUtil.getUnscaledAlbumArtBitmap(context, song);
if (albumArt == null) {
albumArt = Util.decodeBitmap(context, R.drawable.unknown_album_large);
}
} catch (Exception x) {
LOG.warn("Failed to get notification cover art", x);
albumArt = Util.decodeBitmap(context, R.drawable.unknown_album_large);
}
RemoteViews contentView = new RemoteViews(context.getPackageName(), R.layout.notification);
contentView.setTextViewText(R.id.notification_title, song.getTitle());
contentView.setTextViewText(R.id.notification_artist, song.getArtist());
contentView.setImageViewBitmap(R.id.notification_image, albumArt);
contentView.setImageViewResource(R.id.notification_playpause, playing ? R.drawable.media_pause : R.drawable.media_start);
Intent intent = new Intent("1");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE));
contentView.setOnClickPendingIntent(R.id.notification_playpause, PendingIntent.getService(context, 0, intent, 0));
intent = new Intent("2");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_NEXT));
contentView.setOnClickPendingIntent(R.id.notification_next, PendingIntent.getService(context, 0, intent, 0));
intent = new Intent("4");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Constants.INTENT_EXTRA_NAME_HIDE_NOTIFICATION, true);
contentView.setOnClickPendingIntent(R.id.notification_close, PendingIntent.getService(context, 0, intent, 0));
Intent notificationIntent = new Intent(context, DownloadActivity.class);
Notification notification = new NotificationCompat.Builder(context)
.setOngoing(true)
.setSmallIcon(R.drawable.stat_notify_playing)
.setContent(contentView)
.setContentIntent(PendingIntent.getActivity(context, 0, notificationIntent, 0))
.build();
if (Build.VERSION.SDK_INT >= 16) {
notification.bigContentView = createBigContentView(context, song, albumArt, playing);
}
return notification;
}
private static RemoteViews createBigContentView(Context context, MusicDirectory.Entry song, Bitmap albumArt, boolean playing) {
RemoteViews contentView = new RemoteViews(context.getPackageName(), R.layout.notification_expanded);
contentView.setTextViewText(R.id.notification_title, song.getTitle());
contentView.setTextViewText(R.id.notification_artist, song.getArtist());
contentView.setTextViewText(R.id.notification_album, song.getAlbum());
contentView.setImageViewBitmap(R.id.notification_image, albumArt);
contentView.setImageViewResource(R.id.notification_playpause, playing ? R.drawable.media_pause : R.drawable.media_start);
Intent intent = new Intent("1");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE));
contentView.setOnClickPendingIntent(R.id.notification_playpause, PendingIntent.getService(context, 0, intent, 0));
intent = new Intent("2");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_NEXT));
contentView.setOnClickPendingIntent(R.id.notification_next, PendingIntent.getService(context, 0, intent, 0));
intent = new Intent("3");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_MEDIA_PREVIOUS));
contentView.setOnClickPendingIntent(R.id.notification_prev, PendingIntent.getService(context, 0, intent, 0));
intent = new Intent("4");
intent.setComponent(new ComponentName(context, DownloadServiceImpl.class));
intent.putExtra(Constants.INTENT_EXTRA_NAME_HIDE_NOTIFICATION, true);
contentView.setOnClickPendingIntent(R.id.notification_close, PendingIntent.getService(context, 0, intent, 0));
return contentView;
}
private static boolean useSimpleNotification() {
return Build.VERSION.SDK_INT < 11;
}
}
| |
package it.gemmed.resource;
import java.sql.Date;
import java.sql.Time;
/**
* Rappresentazione di una partita singola con dati inerenti alla data
* di svolgimento, al luogo di svolgimento, al torneo a cui appartiene
* ed al tipo di campo su cui si gioca. Contiene anche le informazioni
* dei giocatori che la disputano, dei risultati dei vari set e del
* vincitore finale.
*
* @author GEMMED
*/
public class PartitaSingola {
private final int numeroPartita;
private Date data;
private Time ora;
private int campo;
private String circolo;
private String sfidanteA;
private String sfidanteB;
private boolean vittoriaSfidanteA;
private Risultato set1;
private Risultato set2;
private Risultato set3;
private final int torneo;
private boolean forfait;
private String nomeTorneo;
/**
* Costruttore di PartitaSingola priva di dati all'interno di un torneo
*
* @param numeroPartita
* @param torneo
*/
public PartitaSingola(int numeroPartita, int torneo) {
super();
this.numeroPartita = numeroPartita;
this.torneo = torneo;
}
/**
* Costruttore di PartitaSingola con dati i soli sfidanti all'interno di un torneo
*
* @param numeroPartita
* @param torneo
* @param sfidanteA
* @param sfidanteB
*/
public PartitaSingola(int numeroPartita, int torneo, String sfidanteA, String sfidanteB) {
super();
this.numeroPartita = numeroPartita;
this.torneo = torneo;
this.sfidanteA = sfidanteA;
this.sfidanteB = sfidanteB;
}
/**
* @return data
*/
public Date getData() {
return data;
}
/**
* @param data the data to set
*/
public void setData(Date data) {
this.data = data;
}
/**
* @return ora
*/
public Time getOra() {
return ora;
}
/**
* @param ora the ora to set
*/
public void setOra(Time ora) {
this.ora = ora;
}
/**
* @return campo
*/
public int getCampo() {
return campo;
}
/**
* @param campo the campo to set
*/
public void setCampo(int campo) {
this.campo = campo;
}
/**
* @return circolo
*/
public String getCircolo() {
return circolo;
}
/**
* @param circolo the circolo to set
*/
public void setCircolo(String circolo) {
this.circolo = circolo;
}
/**
* @return sfidanteA
*/
public String getSfidanteA() {
return sfidanteA;
}
/**
* @param sfidanteA the sfidanteA to set
*/
public void setSfidanteA(String sfidanteA) {
this.sfidanteA = sfidanteA;
}
/**
* @return sfidanteB
*/
public String getSfidanteB() {
return sfidanteB;
}
/**
* @param sfidanteB the sfidanteB to set
*/
public void setSfidanteB(String sfidanteB) {
this.sfidanteB = sfidanteB;
}
/**
* @return vittoriaSfidanteA
*/
public boolean isvittoriaSfidanteA() {
return vittoriaSfidanteA;
}
/**
* @return vittoriaSfidanteA
*/
public boolean getVittoriaSfidanteA() {
return vittoriaSfidanteA;
}
/**
* @param vittoriaSfidanteA the vittoriaSfidanteA to set
*/
public void setvittoriaSfidanteA(boolean vittoriaSfidanteA) {
this.vittoriaSfidanteA = vittoriaSfidanteA;
}
/**
* @return set1
*/
public Risultato getSet1() {
return set1;
}
/**
* @param set1 the set1 to set
*/
public void setSet1(Risultato set1) {
this.set1 = set1;
}
/**
* @return set2
*/
public Risultato getSet2() {
return set2;
}
/**
* @param set2 the set2 to set
*/
public void setSet2(Risultato set2) {
this.set2 = set2;
}
/**
* @return set3
*/
public Risultato getSet3() {
return set3;
}
/**
* @param set3 the set3 to set
*/
public void setSet3(Risultato set3) {
this.set3 = set3;
}
/**
* @return numeroPartita
*/
public int getNumeroPartita() {
return numeroPartita;
}
/**
* @return torneo
*/
public int getTorneo() {
return torneo;
}
/**
* @param vittoriaSfidanteA
*/
public void setVittoriaSfidanteA(boolean vittoriaSfidanteA) {
this.vittoriaSfidanteA = vittoriaSfidanteA;
}
/**
* @return forfait
*/
public boolean getForfait() {
return forfait;
}
/**
* @param vittoriaSfidanteA
*/
public void setForfait(boolean forfait) {
this.forfait = forfait;
}
/**
* @return nomeTorneo
*/
public String getNomeTorneo() {
return nomeTorneo;
}
/**
* @param nomeTorneo the nomeTorneo to set
*/
public void setNomeTorneo(String nomeTorneo) {
this.nomeTorneo = nomeTorneo;
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.app.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onosproject.app.ApplicationEvent;
import org.onosproject.app.ApplicationListener;
import org.onosproject.app.ApplicationState;
import org.onosproject.app.ApplicationStoreAdapter;
import org.onosproject.common.app.ApplicationArchive;
import org.onosproject.common.event.impl.TestEventDispatcher;
import org.onosproject.core.Application;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.DefaultApplication;
import org.onosproject.core.DefaultApplicationId;
import java.io.InputStream;
import java.net.URI;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import static org.junit.Assert.*;
import static org.onosproject.app.ApplicationEvent.Type.*;
import static org.onosproject.app.ApplicationState.ACTIVE;
import static org.onosproject.app.ApplicationState.INSTALLED;
import static org.onosproject.app.DefaultApplicationDescriptionTest.*;
import static org.onosproject.net.NetTestTools.injectEventDispatcher;
/**
* Test of the application manager implementation.
*/
public class ApplicationManagerTest {
public static final DefaultApplicationId APP_ID = new DefaultApplicationId(1, APP_NAME);
private ApplicationManager mgr = new ApplicationManager();
private ApplicationListener listener = new TestListener();
private boolean deactivated = false;
@Before
public void setUp() {
injectEventDispatcher(mgr, new TestEventDispatcher());
mgr.featuresService = new TestFeaturesService();
mgr.store = new TestStore();
mgr.activate();
mgr.addListener(listener);
}
@After
public void tearDown() {
mgr.removeListener(listener);
mgr.deactivate();
}
private void validate(Application app) {
assertEquals("incorrect name", APP_NAME, app.id().name());
assertEquals("incorrect version", VER, app.version());
assertEquals("incorrect origin", ORIGIN, app.origin());
assertEquals("incorrect description", DESC, app.description());
assertEquals("incorrect features URI", FURL, app.featuresRepo().get());
assertEquals("incorrect features", FEATURES, app.features());
}
@Test
public void install() {
InputStream stream = ApplicationArchive.class.getResourceAsStream("app.zip");
Application app = mgr.install(stream);
validate(app);
assertEquals("incorrect features URI used", app.featuresRepo().get(),
((TestFeaturesService) mgr.featuresService).uri);
assertEquals("incorrect app count", 1, mgr.getApplications().size());
assertEquals("incorrect app", app, mgr.getApplication(APP_ID));
assertEquals("incorrect app state", INSTALLED, mgr.getState(APP_ID));
mgr.registerDeactivateHook(app.id(), this::deactivateHook);
}
private void deactivateHook() {
deactivated = true;
}
@Test
public void uninstall() {
install();
mgr.uninstall(APP_ID);
assertEquals("incorrect app count", 0, mgr.getApplications().size());
}
@Test
public void activate() {
install();
mgr.activate(APP_ID);
assertEquals("incorrect app state", ACTIVE, mgr.getState(APP_ID));
assertFalse("preDeactivate hook wrongly called", deactivated);
}
@Test
public void deactivate() {
activate();
mgr.deactivate(APP_ID);
assertEquals("incorrect app state", INSTALLED, mgr.getState(APP_ID));
assertTrue("preDeactivate hook not called", deactivated);
}
private class TestListener implements ApplicationListener {
private ApplicationEvent event;
@Override
public void event(ApplicationEvent event) {
this.event = event;
}
}
private class TestStore extends ApplicationStoreAdapter {
private Application app;
private ApplicationState state;
@Override
public Application create(InputStream appDescStream) {
app = new DefaultApplication(APP_ID, VER, TITLE, DESC, ORIGIN, CATEGORY,
URL, README, ICON, ROLE, PERMS,
Optional.of(FURL), FEATURES, ImmutableList.of());
state = INSTALLED;
delegate.notify(new ApplicationEvent(APP_INSTALLED, app));
return app;
}
@Override
public Set<Application> getApplications() {
return app != null ? ImmutableSet.of(app) : ImmutableSet.of();
}
@Override
public Application getApplication(ApplicationId appId) {
return app;
}
@Override
public void remove(ApplicationId appId) {
delegate.notify(new ApplicationEvent(APP_UNINSTALLED, app));
app = null;
state = null;
}
@Override
public ApplicationState getState(ApplicationId appId) {
return state;
}
@Override
public void activate(ApplicationId appId) {
state = ApplicationState.ACTIVE;
delegate.notify(new ApplicationEvent(APP_ACTIVATED, app));
}
@Override
public void deactivate(ApplicationId appId) {
state = INSTALLED;
delegate.notify(new ApplicationEvent(APP_DEACTIVATED, app));
}
@Override
public ApplicationId getId(String name) {
return new DefaultApplicationId(0, name);
}
}
private class TestFeaturesService extends FeaturesServiceAdapter {
private URI uri;
private Set<String> features = new HashSet<>();
@Override
public void addRepository(URI uri) throws Exception {
this.uri = uri;
}
@Override
public void removeRepository(URI uri) throws Exception {
this.uri = null;
}
@Override
public void installFeature(String name) throws Exception {
features.add(name);
}
@Override
public void uninstallFeature(String name) throws Exception {
features.remove(name);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.iterative.rule;
import com.facebook.presto.Session;
import com.facebook.presto.matching.Capture;
import com.facebook.presto.matching.Captures;
import com.facebook.presto.matching.Pattern;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.iterative.Lookup;
import com.facebook.presto.sql.planner.iterative.Rule;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.Assignments;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.tree.CoalesceExpression;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.NullLiteral;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.facebook.presto.SystemSessionProperties.shouldPushAggregationThroughJoin;
import static com.facebook.presto.matching.Capture.newCapture;
import static com.facebook.presto.sql.planner.ExpressionSymbolInliner.inlineSymbols;
import static com.facebook.presto.sql.planner.optimizations.DistinctOutputQueryUtil.isDistinct;
import static com.facebook.presto.sql.planner.plan.Patterns.aggregation;
import static com.facebook.presto.sql.planner.plan.Patterns.join;
import static com.facebook.presto.sql.planner.plan.Patterns.source;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
/**
* This optimizer pushes aggregations below outer joins when: the aggregation
* is on top of the outer join, it groups by all columns in the outer table, and
* the outer rows are guaranteed to be distinct.
* <p>
* When the aggregation is pushed down, we still need to perform aggregations
* on the null values that come out of the absent values in an outer
* join. We add a cross join with a row of aggregations on null literals,
* and coalesce the aggregation that results from the left outer join with
* the result of the aggregation over nulls.
* <p>
* Example:
* <pre>
* - Filter ("nationkey" > "avg")
* - Aggregate(Group by: all columns from the left table, aggregation:
* avg("n2.nationkey"))
* - LeftJoin("regionkey" = "regionkey")
* - AssignUniqueId (nation)
* - Tablescan (nation)
* - Tablescan (nation)
* </pre>
* </p>
* Is rewritten to:
* <pre>
* - Filter ("nationkey" > "avg")
* - project(regionkey, coalesce("avg", "avg_over_null")
* - CrossJoin
* - LeftJoin("regionkey" = "regionkey")
* - AssignUniqueId (nation)
* - Tablescan (nation)
* - Aggregate(Group by: regionkey, aggregation:
* avg(nationkey))
* - Tablescan (nation)
* - Aggregate
* avg(null_literal)
* - Values (null_literal)
* </pre>
*/
public class PushAggregationThroughOuterJoin
implements Rule<AggregationNode>
{
private static final Capture<JoinNode> JOIN = newCapture();
private static final Pattern<AggregationNode> PATTERN = aggregation()
.with(source().matching(join().capturedAs(JOIN)));
@Override
public Pattern<AggregationNode> getPattern()
{
return PATTERN;
}
@Override
public boolean isEnabled(Session session)
{
return shouldPushAggregationThroughJoin(session);
}
@Override
public Result apply(AggregationNode aggregation, Captures captures, Context context)
{
JoinNode join = captures.get(JOIN);
if (join.getFilter().isPresent()
|| !(join.getType() == JoinNode.Type.LEFT || join.getType() == JoinNode.Type.RIGHT)
|| !groupsOnAllOuterTableColumns(aggregation, context.getLookup().resolve(getOuterTable(join)))
|| !isDistinct(context.getLookup().resolve(getOuterTable(join)), context.getLookup()::resolve)) {
return Result.empty();
}
List<Symbol> groupingKeys = join.getCriteria().stream()
.map(join.getType() == JoinNode.Type.RIGHT ? JoinNode.EquiJoinClause::getLeft : JoinNode.EquiJoinClause::getRight)
.collect(toImmutableList());
AggregationNode rewrittenAggregation = new AggregationNode(
aggregation.getId(),
getInnerTable(join),
aggregation.getAggregations(),
ImmutableList.of(groupingKeys),
aggregation.getStep(),
aggregation.getHashSymbol(),
aggregation.getGroupIdSymbol());
JoinNode rewrittenJoin;
if (join.getType() == JoinNode.Type.LEFT) {
rewrittenJoin = new JoinNode(
join.getId(),
join.getType(),
join.getLeft(),
rewrittenAggregation,
join.getCriteria(),
ImmutableList.<Symbol>builder()
.addAll(join.getLeft().getOutputSymbols())
.addAll(rewrittenAggregation.getAggregations().keySet())
.build(),
join.getFilter(),
join.getLeftHashSymbol(),
join.getRightHashSymbol(),
join.getDistributionType());
}
else {
rewrittenJoin = new JoinNode(
join.getId(),
join.getType(),
rewrittenAggregation,
join.getRight(),
join.getCriteria(),
ImmutableList.<Symbol>builder()
.addAll(rewrittenAggregation.getAggregations().keySet())
.addAll(join.getRight().getOutputSymbols())
.build(),
join.getFilter(),
join.getLeftHashSymbol(),
join.getRightHashSymbol(),
join.getDistributionType());
}
return Result.ofPlanNode(coalesceWithNullAggregation(rewrittenAggregation, rewrittenJoin, context.getSymbolAllocator(), context.getIdAllocator(), context.getLookup()));
}
private static PlanNode getInnerTable(JoinNode join)
{
checkState(join.getType() == JoinNode.Type.LEFT || join.getType() == JoinNode.Type.RIGHT, "expected LEFT or RIGHT JOIN");
PlanNode innerNode;
if (join.getType().equals(JoinNode.Type.LEFT)) {
innerNode = join.getRight();
}
else {
innerNode = join.getLeft();
}
return innerNode;
}
private static PlanNode getOuterTable(JoinNode join)
{
checkState(join.getType() == JoinNode.Type.LEFT || join.getType() == JoinNode.Type.RIGHT, "expected LEFT or RIGHT JOIN");
PlanNode outerNode;
if (join.getType().equals(JoinNode.Type.LEFT)) {
outerNode = join.getLeft();
}
else {
outerNode = join.getRight();
}
return outerNode;
}
private static boolean groupsOnAllOuterTableColumns(AggregationNode node, PlanNode outerTable)
{
return new HashSet<>(node.getGroupingKeys()).equals(new HashSet<>(outerTable.getOutputSymbols()));
}
// When the aggregation is done after the join, there will be a null value that gets aggregated over
// where rows did not exist in the inner table. For some aggregate functions, such as count, the result
// of an aggregation over a single null row is one or zero rather than null. In order to ensure correct results,
// we add a coalesce function with the output of the new outer join and the agggregation performed over a single
// null row.
private PlanNode coalesceWithNullAggregation(AggregationNode aggregationNode, PlanNode outerJoin, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Lookup lookup)
{
// Create an aggregation node over a row of nulls.
MappedAggregationInfo aggregationOverNullInfo = createAggregationOverNull(aggregationNode, symbolAllocator, idAllocator, lookup);
AggregationNode aggregationOverNull = aggregationOverNullInfo.getAggregation();
Map<Symbol, Symbol> sourceAggregationToOverNullMapping = aggregationOverNullInfo.getSymbolMapping();
// Do a cross join with the aggregation over null
JoinNode crossJoin = new JoinNode(
idAllocator.getNextId(),
JoinNode.Type.INNER,
outerJoin,
aggregationOverNull,
ImmutableList.of(),
ImmutableList.<Symbol>builder()
.addAll(outerJoin.getOutputSymbols())
.addAll(aggregationOverNull.getOutputSymbols())
.build(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty());
// Add coalesce expressions for all aggregation functions
Assignments.Builder assignmentsBuilder = Assignments.builder();
for (Symbol symbol : outerJoin.getOutputSymbols()) {
if (aggregationNode.getAggregations().containsKey(symbol)) {
assignmentsBuilder.put(symbol, new CoalesceExpression(symbol.toSymbolReference(), sourceAggregationToOverNullMapping.get(symbol).toSymbolReference()));
}
else {
assignmentsBuilder.put(symbol, symbol.toSymbolReference());
}
}
return new ProjectNode(idAllocator.getNextId(), crossJoin, assignmentsBuilder.build());
}
private MappedAggregationInfo createAggregationOverNull(AggregationNode referenceAggregation, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Lookup lookup)
{
// Create a values node that consists of a single row of nulls.
// Map the output symbols from the referenceAggregation's source
// to symbol references for the new values node.
NullLiteral nullLiteral = new NullLiteral();
ImmutableList.Builder<Symbol> nullSymbols = ImmutableList.builder();
ImmutableList.Builder<Expression> nullLiterals = ImmutableList.builder();
ImmutableMap.Builder<Symbol, SymbolReference> sourcesSymbolMappingBuilder = ImmutableMap.builder();
for (Symbol sourceSymbol : lookup.resolve(referenceAggregation.getSource()).getOutputSymbols()) {
nullLiterals.add(nullLiteral);
Symbol nullSymbol = symbolAllocator.newSymbol(nullLiteral, symbolAllocator.getTypes().get(sourceSymbol));
nullSymbols.add(nullSymbol);
sourcesSymbolMappingBuilder.put(sourceSymbol, nullSymbol.toSymbolReference());
}
ValuesNode nullRow = new ValuesNode(
idAllocator.getNextId(),
nullSymbols.build(),
ImmutableList.of(nullLiterals.build()));
Map<Symbol, SymbolReference> sourcesSymbolMapping = sourcesSymbolMappingBuilder.build();
// For each aggregation function in the reference node, create a corresponding aggregation function
// that points to the nullRow. Map the symbols from the aggregations in referenceAggregation to the
// symbols in these new aggregations.
ImmutableMap.Builder<Symbol, Symbol> aggregationsSymbolMappingBuilder = ImmutableMap.builder();
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregationsOverNullBuilder = ImmutableMap.builder();
for (Map.Entry<Symbol, AggregationNode.Aggregation> entry : referenceAggregation.getAggregations().entrySet()) {
Symbol aggregationSymbol = entry.getKey();
AggregationNode.Aggregation aggregation = entry.getValue();
AggregationNode.Aggregation overNullAggregation = new AggregationNode.Aggregation(
(FunctionCall) inlineSymbols(sourcesSymbolMapping, aggregation.getCall()),
aggregation.getSignature(),
aggregation.getMask().map(x -> Symbol.from(sourcesSymbolMapping.get(x))));
Symbol overNullSymbol = symbolAllocator.newSymbol(overNullAggregation.getCall(), symbolAllocator.getTypes().get(aggregationSymbol));
aggregationsOverNullBuilder.put(overNullSymbol, overNullAggregation);
aggregationsSymbolMappingBuilder.put(aggregationSymbol, overNullSymbol);
}
Map<Symbol, Symbol> aggregationsSymbolMapping = aggregationsSymbolMappingBuilder.build();
// create an aggregation node whose source is the null row.
AggregationNode aggregationOverNullRow = new AggregationNode(
idAllocator.getNextId(),
nullRow,
aggregationsOverNullBuilder.build(),
ImmutableList.of(ImmutableList.of()),
AggregationNode.Step.SINGLE,
Optional.empty(),
Optional.empty());
return new MappedAggregationInfo(aggregationOverNullRow, aggregationsSymbolMapping);
}
private static class MappedAggregationInfo
{
private final AggregationNode aggregationNode;
private final Map<Symbol, Symbol> symbolMapping;
public MappedAggregationInfo(AggregationNode aggregationNode, Map<Symbol, Symbol> symbolMapping)
{
this.aggregationNode = aggregationNode;
this.symbolMapping = symbolMapping;
}
public Map<Symbol, Symbol> getSymbolMapping()
{
return symbolMapping;
}
public AggregationNode getAggregation()
{
return aggregationNode;
}
}
}
| |
package erica.beakon.Pages;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.ValueEventListener;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import erica.beakon.Adapters.FirebaseHandler;
import erica.beakon.Adapters.FollowerAdapter;
import erica.beakon.Adapters.HashtagAdapter;
import erica.beakon.MainActivity;
import erica.beakon.Objects.Hashtag;
import erica.beakon.Objects.Movement;
import erica.beakon.Objects.User;
import erica.beakon.R;
public class ExpandedMovementPage extends Fragment {
ArrayList<String> hashtagNameList = new ArrayList<>(); //list of movement IDs (stored in hashtag)
ArrayList<String> userIDList = new ArrayList<>(); //list of user IDs (stored in hashtag)
ArrayList<Hashtag> hashtagList = new ArrayList<>(); //list of movements fetched using movement IDs
ArrayList<User> followerList = new ArrayList<>(); //list of followers fetched using user IDs
Movement movement;
String ID = "no ID";
String name = "no name";
ArrayList<String> hashtagsShown = new ArrayList<>(); //prevents duplication
ArrayList<String> followersShown = new ArrayList<>(); //prevents duplication
FirebaseHandler firebaseHandler;
//create TVs
TextView numFollowersTV;
LinearLayout hashtagLayout;
TextView movementNameTV;
TextView description;
TextView steps;
TextView resources;
FollowerAdapter followerAdapter;
ImageButton backButton;
ImageButton followButton;
public ExpandedMovementPage() {}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
final View view = inflater.inflate(R.layout.fragment_expanded_movement_page, container, false);
Bundle bundle = this.getArguments();
if (bundle != null){
ID = bundle.getString("ID");
name = bundle.getString("name");
}
firebaseHandler = ((MainActivity) getActivity()).getHandler();
//create TVs
numFollowersTV = (TextView) view.findViewById(R.id.num_followers);
hashtagLayout = (LinearLayout) view.findViewById(R.id.hashtag_layout);
movementNameTV = (TextView) view.findViewById(R.id.movement_name);
description = (TextView) view.findViewById(R.id.movement_description);
steps = (TextView) view.findViewById(R.id.movement_steps);
resources = (TextView) view.findViewById(R.id.movement_resources);
//create buttons
backButton = (ImageButton) view.findViewById(R.id.backButtonMovement);
followButton = (ImageButton) view.findViewById(R.id.followButtonMovement);
//create follower list view and its adapter
ListView followerLV = (ListView) view.findViewById(R.id.movement_followers_list);
followerAdapter = new FollowerAdapter(getActivity(), followerList);
followerLV.setAdapter(followerAdapter); //starts empty
populatePage();
backButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getActivity().onBackPressed(); //might not work if multiple backs pressed in a row?
}
});
followButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
User currentUser = ((MainActivity) getActivity()).getCurrentUser();
firebaseHandler.getUser(currentUser.getId(), new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
ArrayList<String> hashtagList = ((MainActivity) getActivity()).getHashtagList(dataSnapshot);
HashMap<String, HashMap<String, Boolean>> movementList = ((MainActivity) getActivity()).getMovements(dataSnapshot);
User user = new User(dataSnapshot.child("id").getValue().toString(), dataSnapshot.child("name").getValue().toString(), hashtagList, movementList);
firebaseHandler.addUsertoMovement(user, movement);
followButton.setImageResource(R.drawable.check);
}
@Override
public void onCancelled(DatabaseError databaseError) {}
});
}
});
return view;
}
@Override
public void onResume() {
super.onResume();
populatePage();
}
@Override
public void onPause() {
super.onPause();
hashtagsShown.clear();
}
private void setOnClickHashtag(final TextView tv){
tv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String hashtagName = (String) tv.getText();
ExpandedHashtagPage hashtagFragment = new ExpandedHashtagPage();
Bundle bundle = new Bundle();
bundle.putString("name", hashtagName); //give new fragment the hashtag it's expanding
hashtagFragment.setArguments(bundle);
((MainActivity) getContext()).changeFragment(hashtagFragment, "expandedHashtagPage"); //changes fragments
}
});
}
private String getFormattedHashtag(String hashtag) {
return "#" + hashtag + " ";
}
private TextView createHashtagTextView(String hashtag) {
TextView tv = new TextView(getContext());
tv.setText(getFormattedHashtag(hashtag));
tv.setTextColor(getContext().getResources().getColor(R.color.colorPrimaryDark));
setOnClickHashtag(tv);
return tv;
}
public void addHashtagtoView(String hashtag, LinearLayout hashtagLayout) {
TextView tv = createHashtagTextView(hashtag);
hashtagLayout.addView(tv);
}
private void populatePage() {
movementNameTV.setText(name);
firebaseHandler.getMovement(ID, new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
if (dataSnapshot.getValue() != null) { // if the movement exists in database
movement = dataSnapshot.getValue(Movement.class); // store movement info in movement object
description.setText(movement.getDescription());
steps.setText(movement.getSteps());
resources.setText(movement.getResources());
if (movement.getHashtagList() != null){
hashtagNameList = movement.getHashtagList();}; // get hashtag name list from movement}
if(movement.getFollowers() != null) {
DecimalFormat formatter = new DecimalFormat("#,###");
numFollowersTV.setText(formatter.format(movement.getFollowers().size()));
userIDList = movement.getFollowers();} // get user id list from movement
if (hashtagNameList != null){ // if the hashtag list isn't empty
firebaseHandler.getBatchHashtags(hashtagNameList, new ValueEventListener() { //get all the movements
@Override
public void onDataChange(DataSnapshot hashtagSnapshot) {
Hashtag hashtag = hashtagSnapshot.getValue(Hashtag.class); //store hashtag info in hashtag object
if (!hashtagsShown.contains(hashtag.getName())) { //if already being shown, don't show movement again
addHashtagtoView(hashtag.getName(), hashtagLayout);
hashtagsShown.add(hashtag.getName());
}
} //updates gradually so you don't end up with a blank screen for a while
@Override
public void onCancelled(DatabaseError databaseError) {
}
});}
if (userIDList != null){ // if the user list isn't empty
if (userIDList.contains(((MainActivity) getActivity()).getCurrentUser().getId())){
followButton.setImageResource(R.drawable.check);
}
firebaseHandler.getBatchUsers(userIDList, new ValueEventListener() { //get all the users
@Override
public void onDataChange(DataSnapshot userSnapshot) {
ArrayList<String> hashtagList = ((MainActivity) getActivity()).getHashtagList(userSnapshot);
HashMap<String, HashMap<String, Boolean>> movementList = ((MainActivity) getActivity()).getMovements(userSnapshot);
// User follower = userSnapshot.getValue(User.class); //store user info in user object
User follower = new User(userSnapshot.child("id").getValue().toString(), userSnapshot.child("name").getValue().toString(), hashtagList, movementList);
if (!followersShown.contains(follower.getId())) { //if already being shown, don't show movement again
followerAdapter.add(follower); //add user to follower adapter, updates list view
followersShown.add(follower.getId());
}
} //updates gradually (each iteration) so you don't end up with a blank screen for a while
@Override
public void onCancelled(DatabaseError databaseError) {
}
});}
}
}
@Override
public void onCancelled(DatabaseError databaseError) {}
});
}
}
| |
/**
*/
package org.tud.inf.st.mbt.actions.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.tud.inf.st.mbt.actions.ActionsPackage;
import org.tud.inf.st.mbt.actions.RemoveBagAction;
import org.tud.inf.st.mbt.data.DataBag;
import org.tud.inf.st.mbt.functions.IArithmetricFunction;
import org.tud.inf.st.mbt.terms.Term;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Remove Bag Action</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.tud.inf.st.mbt.actions.impl.RemoveBagActionImpl#getBag <em>Bag</em>}</li>
* <li>{@link org.tud.inf.st.mbt.actions.impl.RemoveBagActionImpl#getIndex <em>Index</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class RemoveBagActionImpl extends ReconfigurationActionImpl implements RemoveBagAction {
/**
* The cached value of the '{@link #getBag() <em>Bag</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBag()
* @generated
* @ordered
*/
protected DataBag bag;
/**
* The cached value of the '{@link #getIndex() <em>Index</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getIndex()
* @generated
* @ordered
*/
protected IArithmetricFunction index;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected RemoveBagActionImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ActionsPackage.Literals.REMOVE_BAG_ACTION;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DataBag getBag() {
if (bag != null && bag.eIsProxy()) {
InternalEObject oldBag = (InternalEObject)bag;
bag = (DataBag)eResolveProxy(oldBag);
if (bag != oldBag) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, ActionsPackage.REMOVE_BAG_ACTION__BAG, oldBag, bag));
}
}
return bag;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DataBag basicGetBag() {
return bag;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setBag(DataBag newBag) {
DataBag oldBag = bag;
bag = newBag;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ActionsPackage.REMOVE_BAG_ACTION__BAG, oldBag, bag));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public IArithmetricFunction getIndex() {
return index;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetIndex(IArithmetricFunction newIndex, NotificationChain msgs) {
IArithmetricFunction oldIndex = index;
index = newIndex;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ActionsPackage.REMOVE_BAG_ACTION__INDEX, oldIndex, newIndex);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setIndex(IArithmetricFunction newIndex) {
if (newIndex != index) {
NotificationChain msgs = null;
if (index != null)
msgs = ((InternalEObject)index).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ActionsPackage.REMOVE_BAG_ACTION__INDEX, null, msgs);
if (newIndex != null)
msgs = ((InternalEObject)newIndex).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ActionsPackage.REMOVE_BAG_ACTION__INDEX, null, msgs);
msgs = basicSetIndex(newIndex, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ActionsPackage.REMOVE_BAG_ACTION__INDEX, newIndex, newIndex));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ActionsPackage.REMOVE_BAG_ACTION__INDEX:
return basicSetIndex(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ActionsPackage.REMOVE_BAG_ACTION__BAG:
if (resolve) return getBag();
return basicGetBag();
case ActionsPackage.REMOVE_BAG_ACTION__INDEX:
return getIndex();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ActionsPackage.REMOVE_BAG_ACTION__BAG:
setBag((DataBag)newValue);
return;
case ActionsPackage.REMOVE_BAG_ACTION__INDEX:
setIndex((IArithmetricFunction)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ActionsPackage.REMOVE_BAG_ACTION__BAG:
setBag((DataBag)null);
return;
case ActionsPackage.REMOVE_BAG_ACTION__INDEX:
setIndex((IArithmetricFunction)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ActionsPackage.REMOVE_BAG_ACTION__BAG:
return bag != null;
case ActionsPackage.REMOVE_BAG_ACTION__INDEX:
return index != null;
}
return super.eIsSet(featureID);
}
@Override
public String toString() {
return "unset "+bag+"["+index+"]";
}
} //RemoveBagActionImpl
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest.handler.async;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.rest.HttpMethodWrapper;
import org.apache.flink.runtime.rest.handler.HandlerRequest;
import org.apache.flink.runtime.rest.handler.HandlerRequestException;
import org.apache.flink.runtime.rest.handler.RestHandlerException;
import org.apache.flink.runtime.rest.messages.EmptyMessageParameters;
import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
import org.apache.flink.runtime.rest.messages.MessageHeaders;
import org.apache.flink.runtime.rest.messages.MessageParameters;
import org.apache.flink.runtime.rest.messages.MessagePathParameter;
import org.apache.flink.runtime.rest.messages.MessageQueryParameter;
import org.apache.flink.runtime.rest.messages.TriggerId;
import org.apache.flink.runtime.rest.messages.TriggerIdPathParameter;
import org.apache.flink.runtime.rest.messages.queue.QueueStatus;
import org.apache.flink.runtime.webmonitor.RestfulGateway;
import org.apache.flink.runtime.webmonitor.TestingRestfulGateway;
import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.FlinkException;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
/**
* Tests for the {@link AbstractAsynchronousOperationHandlers}.
*/
public class AbstractAsynchronousOperationHandlersTest extends TestLogger {
private static final CompletableFuture<String> localRestAddress = CompletableFuture.completedFuture("localhost");
private static final Time TIMEOUT = Time.seconds(10L);
private TestingAsynchronousOperationHandlers testingAsynchronousOperationHandlers;
private TestingAsynchronousOperationHandlers.TestingTriggerHandler testingTriggerHandler;
private TestingAsynchronousOperationHandlers.TestingStatusHandler testingStatusHandler;
@Before
public void setup() {
testingAsynchronousOperationHandlers = new TestingAsynchronousOperationHandlers();
testingTriggerHandler = testingAsynchronousOperationHandlers.new TestingTriggerHandler(
localRestAddress,
() -> null,
TIMEOUT,
Collections.emptyMap(),
TestingTriggerMessageHeaders.INSTANCE);
testingStatusHandler = testingAsynchronousOperationHandlers.new TestingStatusHandler(
localRestAddress,
() -> null,
TIMEOUT,
Collections.emptyMap(),
TestingStatusMessageHeaders.INSTANCE);
}
/**
* Tests the triggering and successful completion of an asynchronous operation.
*/
@Test
public void testOperationCompletion() throws Exception {
final CompletableFuture<String> savepointFuture = new CompletableFuture<>();
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder()
.setTriggerSavepointFunction((JobID jobId, String directory) -> savepointFuture)
.build();
// trigger the operation
final TriggerId triggerId = testingTriggerHandler.handleRequest(
triggerOperationRequest(),
testingRestfulGateway).get().getTriggerId();
AsynchronousOperationResult<OperationResult> operationResult = testingStatusHandler.handleRequest(
statusOperationRequest(triggerId),
testingRestfulGateway).get();
assertThat(operationResult.queueStatus().getId(), is(QueueStatus.inProgress().getId()));
// complete the operation
final String savepointPath = "foobar";
savepointFuture.complete(savepointPath);
operationResult = testingStatusHandler.handleRequest(
statusOperationRequest(triggerId),
testingRestfulGateway).get();
assertThat(operationResult.queueStatus().getId(), is(QueueStatus.completed().getId()));
assertThat(operationResult.resource().value, is(savepointPath));
}
/**
* Tests the triggering and exceptional completion of an asynchronous operation.
*/
@Test
public void testOperationFailure() throws Exception {
final FlinkException testException = new FlinkException("Test exception");
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder()
.setTriggerSavepointFunction((JobID jobId, String directory) -> FutureUtils.completedExceptionally(testException))
.build();
// trigger the operation
final TriggerId triggerId = testingTriggerHandler.handleRequest(
triggerOperationRequest(),
testingRestfulGateway).get().getTriggerId();
AsynchronousOperationResult<OperationResult> operationResult = testingStatusHandler.handleRequest(
statusOperationRequest(triggerId),
testingRestfulGateway).get();
assertThat(operationResult.queueStatus().getId(), is(QueueStatus.completed().getId()));
final OperationResult resource = operationResult.resource();
assertThat(resource.throwable, is(testException));
}
/**
* Tests that an querying an unknown trigger id will return an exceptionally completed
* future.
*/
@Test
public void testUnknownTriggerId() throws Exception {
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder().build();
try {
testingStatusHandler.handleRequest(
statusOperationRequest(new TriggerId()),
testingRestfulGateway).get();
fail("This should have failed with a RestHandlerException.");
} catch (ExecutionException ee) {
final Optional<RestHandlerException> optionalRestHandlerException = ExceptionUtils.findThrowable(ee, RestHandlerException.class);
assertThat(optionalRestHandlerException.isPresent(), is(true));
final RestHandlerException restHandlerException = optionalRestHandlerException.get();
assertThat(restHandlerException.getMessage(), containsString("Operation not found"));
assertThat(restHandlerException.getHttpResponseStatus(), is(HttpResponseStatus.NOT_FOUND));
}
}
/**
* Tests that the future returned by {@link AbstractAsynchronousOperationHandlers.StatusHandler#closeAsync()}
* completes when the result of the asynchronous operation is served.
*/
@Test
public void testCloseShouldFinishOnFirstServedResult() throws Exception {
final CompletableFuture<String> savepointFuture = new CompletableFuture<>();
final TestingRestfulGateway testingRestfulGateway = new TestingRestfulGateway.Builder()
.setTriggerSavepointFunction((JobID jobId, String directory) -> savepointFuture)
.build();
final TriggerId triggerId = testingTriggerHandler.handleRequest(
triggerOperationRequest(),
testingRestfulGateway).get().getTriggerId();
final CompletableFuture<Void> closeFuture = testingStatusHandler.closeAsync();
testingStatusHandler.handleRequest(statusOperationRequest(triggerId), testingRestfulGateway).get();
assertThat(closeFuture.isDone(), is(false));
savepointFuture.complete("foobar");
testingStatusHandler.handleRequest(statusOperationRequest(triggerId), testingRestfulGateway).get();
assertThat(closeFuture.isDone(), is(true));
}
private static HandlerRequest<EmptyRequestBody, EmptyMessageParameters> triggerOperationRequest() throws HandlerRequestException {
return new HandlerRequest<>(EmptyRequestBody.getInstance(), EmptyMessageParameters.getInstance());
}
private static HandlerRequest<EmptyRequestBody, TriggerMessageParameters> statusOperationRequest(TriggerId triggerId) throws HandlerRequestException {
return new HandlerRequest<>(
EmptyRequestBody.getInstance(),
new TriggerMessageParameters(),
Collections.singletonMap(TriggerIdPathParameter.KEY, triggerId.toString()),
Collections.emptyMap());
}
private static final class TestOperationKey extends OperationKey {
protected TestOperationKey(TriggerId triggerId) {
super(triggerId);
}
}
private static final class TriggerMessageParameters extends MessageParameters {
private final TriggerIdPathParameter triggerIdPathParameter = new TriggerIdPathParameter();
@Override
public Collection<MessagePathParameter<?>> getPathParameters() {
return Collections.singleton(triggerIdPathParameter);
}
@Override
public Collection<MessageQueryParameter<?>> getQueryParameters() {
return Collections.emptyList();
}
}
private static final class OperationResult {
@Nullable
private final Throwable throwable;
@Nullable
private final String value;
OperationResult(@Nullable String value, @Nullable Throwable throwable) {
this.value = value;
this.throwable = throwable;
}
}
private static final class TestingTriggerMessageHeaders extends AsynchronousOperationTriggerMessageHeaders<EmptyRequestBody, EmptyMessageParameters> {
static final TestingTriggerMessageHeaders INSTANCE = new TestingTriggerMessageHeaders();
private TestingTriggerMessageHeaders() {}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public String getDescription() {
return "";
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public EmptyMessageParameters getUnresolvedMessageParameters() {
return EmptyMessageParameters.getInstance();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.POST;
}
@Override
public String getTargetRestEndpointURL() {
return "barfoo";
}
}
private static final class TestingStatusMessageHeaders extends AsynchronousOperationStatusMessageHeaders<OperationResult, TriggerMessageParameters> {
private static final TestingStatusMessageHeaders INSTANCE = new TestingStatusMessageHeaders();
private TestingStatusMessageHeaders() {}
@Override
protected Class<OperationResult> getValueClass() {
return OperationResult.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.OK;
}
@Override
public Class<EmptyRequestBody> getRequestClass() {
return EmptyRequestBody.class;
}
@Override
public TriggerMessageParameters getUnresolvedMessageParameters() {
return new TriggerMessageParameters();
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return "foobar";
}
@Override
public String getDescription() {
return "";
}
}
private static final class TestingAsynchronousOperationHandlers extends AbstractAsynchronousOperationHandlers<TestOperationKey, String> {
class TestingTriggerHandler extends TriggerHandler<RestfulGateway, EmptyRequestBody, EmptyMessageParameters> {
protected TestingTriggerHandler(CompletableFuture<String> localRestAddress, GatewayRetriever<? extends RestfulGateway> leaderRetriever, Time timeout, Map<String, String> responseHeaders, MessageHeaders<EmptyRequestBody, TriggerResponse, EmptyMessageParameters> messageHeaders) {
super(localRestAddress, leaderRetriever, timeout, responseHeaders, messageHeaders);
}
@Override
protected CompletableFuture<String> triggerOperation(HandlerRequest<EmptyRequestBody, EmptyMessageParameters> request, RestfulGateway gateway) throws RestHandlerException {
return gateway.triggerSavepoint(new JobID(), null, false, timeout);
}
@Override
protected TestOperationKey createOperationKey(HandlerRequest<EmptyRequestBody, EmptyMessageParameters> request) {
return new TestOperationKey(new TriggerId());
}
}
class TestingStatusHandler extends StatusHandler<RestfulGateway, OperationResult, TriggerMessageParameters> {
protected TestingStatusHandler(CompletableFuture<String> localRestAddress, GatewayRetriever<? extends RestfulGateway> leaderRetriever, Time timeout, Map<String, String> responseHeaders, MessageHeaders<EmptyRequestBody, AsynchronousOperationResult<OperationResult>, TriggerMessageParameters> messageHeaders) {
super(localRestAddress, leaderRetriever, timeout, responseHeaders, messageHeaders);
}
@Override
protected TestOperationKey getOperationKey(HandlerRequest<EmptyRequestBody, TriggerMessageParameters> request) {
final TriggerId triggerId = request.getPathParameter(TriggerIdPathParameter.class);
return new TestOperationKey(triggerId);
}
@Override
protected OperationResult exceptionalOperationResultResponse(Throwable throwable) {
return new OperationResult(null, throwable);
}
@Override
protected OperationResult operationResultResponse(String operationResult) {
return new OperationResult(operationResult, null);
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.gluedatabrew.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/databrew-2017-07-25/CreateRecipe" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateRecipeRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* A description for the recipe.
* </p>
*/
private String description;
/**
* <p>
* A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.), and
* space.
* </p>
*/
private String name;
/**
* <p>
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe action and
* (optionally) an array of condition expressions.
* </p>
*/
private java.util.List<RecipeStep> steps;
/**
* <p>
* Metadata tags to apply to this recipe.
* </p>
*/
private java.util.Map<String, String> tags;
/**
* <p>
* A description for the recipe.
* </p>
*
* @param description
* A description for the recipe.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description for the recipe.
* </p>
*
* @return A description for the recipe.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description for the recipe.
* </p>
*
* @param description
* A description for the recipe.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.), and
* space.
* </p>
*
* @param name
* A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.),
* and space.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.), and
* space.
* </p>
*
* @return A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.),
* and space.
*/
public String getName() {
return this.name;
}
/**
* <p>
* A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.), and
* space.
* </p>
*
* @param name
* A unique name for the recipe. Valid characters are alphanumeric (A-Z, a-z, 0-9), hyphen (-), period (.),
* and space.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe action and
* (optionally) an array of condition expressions.
* </p>
*
* @return An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe
* action and (optionally) an array of condition expressions.
*/
public java.util.List<RecipeStep> getSteps() {
return steps;
}
/**
* <p>
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe action and
* (optionally) an array of condition expressions.
* </p>
*
* @param steps
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe
* action and (optionally) an array of condition expressions.
*/
public void setSteps(java.util.Collection<RecipeStep> steps) {
if (steps == null) {
this.steps = null;
return;
}
this.steps = new java.util.ArrayList<RecipeStep>(steps);
}
/**
* <p>
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe action and
* (optionally) an array of condition expressions.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSteps(java.util.Collection)} or {@link #withSteps(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param steps
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe
* action and (optionally) an array of condition expressions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest withSteps(RecipeStep... steps) {
if (this.steps == null) {
setSteps(new java.util.ArrayList<RecipeStep>(steps.length));
}
for (RecipeStep ele : steps) {
this.steps.add(ele);
}
return this;
}
/**
* <p>
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe action and
* (optionally) an array of condition expressions.
* </p>
*
* @param steps
* An array containing the steps to be performed by the recipe. Each recipe step consists of one recipe
* action and (optionally) an array of condition expressions.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest withSteps(java.util.Collection<RecipeStep> steps) {
setSteps(steps);
return this;
}
/**
* <p>
* Metadata tags to apply to this recipe.
* </p>
*
* @return Metadata tags to apply to this recipe.
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* <p>
* Metadata tags to apply to this recipe.
* </p>
*
* @param tags
* Metadata tags to apply to this recipe.
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* <p>
* Metadata tags to apply to this recipe.
* </p>
*
* @param tags
* Metadata tags to apply to this recipe.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
/**
* Add a single Tags entry
*
* @see CreateRecipeRequest#withTags
* @returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateRecipeRequest clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getSteps() != null)
sb.append("Steps: ").append(getSteps()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateRecipeRequest == false)
return false;
CreateRecipeRequest other = (CreateRecipeRequest) obj;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getSteps() == null ^ this.getSteps() == null)
return false;
if (other.getSteps() != null && other.getSteps().equals(this.getSteps()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getSteps() == null) ? 0 : getSteps().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateRecipeRequest clone() {
return (CreateRecipeRequest) super.clone();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Seiji Sogabe, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.lifecycle;
import hudson.Functions;
import hudson.model.ManagementLink;
import jenkins.model.Jenkins;
import hudson.AbortException;
import hudson.Extension;
import hudson.util.StreamTaskListener;
import hudson.util.jna.DotNet;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.tools.ant.taskdefs.Move;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.DefaultLogger;
import org.apache.tools.ant.types.FileSet;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.net.URL;
/**
* {@link ManagementLink} that allows the installation as a Windows service.
*
* @author Kohsuke Kawaguchi
*/
public class WindowsInstallerLink extends ManagementLink {
/**
* Location of the jenkins.war.
* In general case, we can't determine this value, yet having this is a requirement for the installer.
*/
private final File hudsonWar;
/**
* If the installation is completed, this value holds the installation directory.
*/
private volatile File installationDir;
private WindowsInstallerLink(File jenkinsWar) {
this.hudsonWar = jenkinsWar;
}
public String getIconFileName() {
return "installer.gif";
}
public String getUrlName() {
return "install";
}
public String getDisplayName() {
return Messages.WindowsInstallerLink_DisplayName();
}
public String getDescription() {
return Messages.WindowsInstallerLink_Description();
}
/**
* Is the installation successful?
*/
public boolean isInstalled() {
return installationDir!=null;
}
/**
* Performs installation.
*/
public void doDoInstall(StaplerRequest req, StaplerResponse rsp, @QueryParameter("dir") String _dir) throws IOException, ServletException {
if(installationDir!=null) {
// installation already complete
sendError("Installation is already complete",req,rsp);
return;
}
if(!DotNet.isInstalled(2,0)) {
sendError(".NET Framework 2.0 or later is required for this feature",req,rsp);
return;
}
Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER);
File dir = new File(_dir).getAbsoluteFile();
dir.mkdirs();
if(!dir.exists()) {
sendError("Failed to create installation directory: "+dir,req,rsp);
return;
}
try {
// copy files over there
copy(req, rsp, dir, getClass().getResource("/windows-service/jenkins.exe"), "jenkins.exe");
copy(req, rsp, dir, getClass().getResource("/windows-service/jenkins.xml"), "jenkins.xml");
if(!hudsonWar.getCanonicalFile().equals(new File(dir,"jenkins.war").getCanonicalFile()))
copy(req, rsp, dir, hudsonWar.toURI().toURL(), "jenkins.war");
// install as a service
ByteArrayOutputStream baos = new ByteArrayOutputStream();
StreamTaskListener task = new StreamTaskListener(baos);
task.getLogger().println("Installing a service");
int r = WindowsSlaveInstaller.runElevated(
new File(dir, "jenkins.exe"), "install", task, dir);
if(r!=0) {
sendError(baos.toString(),req,rsp);
return;
}
// installation was successful
installationDir = dir;
rsp.sendRedirect(".");
} catch (AbortException e) {
// this exception is used as a signal to terminate processing. the error should have been already reported
} catch (InterruptedException e) {
throw new ServletException(e);
}
}
/**
* Copies a single resource into the target folder, by the given name, and handle errors gracefully.
*/
private void copy(StaplerRequest req, StaplerResponse rsp, File dir, URL src, String name) throws ServletException, IOException {
try {
FileUtils.copyURLToFile(src,new File(dir, name));
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Failed to copy "+name,e);
sendError("Failed to copy "+name+": "+e.getMessage(),req,rsp);
throw new AbortException();
}
}
public void doRestart(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
if(installationDir==null) {
// if the user reloads the page after Hudson has restarted,
// it comes back here. In such a case, don't let this restart Hudson.
// so just send them back to the top page
rsp.sendRedirect(req.getContextPath()+"/");
return;
}
Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER);
rsp.forward(this,"_restart",req);
final File oldRoot = Jenkins.getInstance().getRootDir();
// initiate an orderly shutdown after we finished serving this request
new Thread("terminator") {
public void run() {
try {
Thread.sleep(1000);
// let the service start after we close our sockets, to avoid conflicts
Runtime.getRuntime().addShutdownHook(new Thread("service starter") {
public void run() {
try {
if(!oldRoot.equals(installationDir)) {
LOGGER.info("Moving data");
Move mv = new Move();
Project p = new Project();
p.addBuildListener(createLogger());
mv.setProject(p);
FileSet fs = new FileSet();
fs.setDir(oldRoot);
fs.setExcludes("war/**"); // we can't really move the exploded war.
mv.addFileset(fs);
mv.setTodir(installationDir);
mv.setFailOnError(false); // plugins can also fail to move
mv.execute();
}
LOGGER.info("Starting a Windows service");
StreamTaskListener task = StreamTaskListener.fromStdout();
int r = WindowsSlaveInstaller.runElevated(
new File(installationDir, "jenkins.exe"), "start", task, installationDir);
task.getLogger().println(r==0?"Successfully started":"start service failed. Exit code="+r);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private DefaultLogger createLogger() {
DefaultLogger logger = new DefaultLogger();
logger.setOutputPrintStream(System.out);
logger.setErrorPrintStream(System.err);
return logger;
}
});
System.exit(0);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}.start();
}
/**
* Displays the error in a page.
*/
protected final void sendError(Exception e, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException {
sendError(e.getMessage(),req,rsp);
}
protected final void sendError(String message, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException {
req.setAttribute("message",message);
req.setAttribute("pre",true);
rsp.forward(Jenkins.getInstance(),"error",req);
}
/**
* Decide if {@link WindowsInstallerLink} should show up in UI, and if so, register it.
*/
@Extension
public static WindowsInstallerLink registerIfApplicable() {
if(!Functions.isWindows())
return null; // this is a Windows only feature
if(Lifecycle.get() instanceof WindowsServiceLifecycle)
return null; // already installed as Windows service
// this system property is set by the launcher when we run "java -jar jenkins.war"
// and this is how we know where is jenkins.war.
String war = System.getProperty("executable-war");
if(war!=null && new File(war).exists()) {
WindowsInstallerLink link = new WindowsInstallerLink(new File(war));
// in certain situations where we know the user is just trying Jenkins (like when Jenkins is launched
// from JNLP from https://hudson.dev.java.net/), also put this link on the navigation bar to increase
// visibility
if(System.getProperty(WindowsInstallerLink.class.getName()+".prominent")!=null)
Jenkins.getInstance().getActions().add(link);
return link;
}
return null;
}
private static final Logger LOGGER = Logger.getLogger(WindowsInstallerLink.class.getName());
}
| |
package api.sc2geeks.entity.playerperson;
import api.sc2geeks.entity.EntityWithImageInfo;
import java.sql.Timestamp;
/**
* Created with IntelliJ IDEA.
* User: robert
* Date: 7/29/12
* Time: 10:11 AM
* To change this template use File | Settings | File Templates.
*/
public class PlayerPerson extends EntityWithImageInfo
{
private int personId;
public int getPersonId()
{
return personId;
}
private String gameId;
public String getGameId()
{
return gameId;
}
public void setGameId(String gameId)
{
this.gameId = gameId;
}
public void setPersonId(int personId)
{
this.personId = personId;
}
private String enFullName;
public String getEnFullName()
{
return enFullName;
}
public void setEnFullName(String enFullName)
{
this.enFullName = enFullName;
}
private String nativeFullName;
public String getNativeFullName()
{
return nativeFullName;
}
public void setNativeFullName(String nativeFullName)
{
this.nativeFullName = nativeFullName;
}
private String team;
public String getTeam()
{
return team;
}
public void setTeam(String team)
{
this.team = team;
}
private String altIds;
public String getAltIds()
{
return altIds;
}
public void setAltIds(String altIds)
{
this.altIds = altIds;
}
private String race;
public String getRace()
{
return race;
}
public void setRace(String race)
{
this.race = race;
}
private String wikiUrl;
public String getWikiUrl()
{
return wikiUrl;
}
public void setWikiUrl(String wikiUrl)
{
this.wikiUrl = wikiUrl;
}
private String stream;
public String getStream()
{
return stream;
}
public void setStream(String stream)
{
this.stream = stream;
}
private String twitterHandle;
public String getTwitterHandle()
{
return twitterHandle;
}
private String country;
public String getCountry()
{
return country;
}
public void setCountry(String country)
{
this.country = country;
}
private String birthday;
public String getBirthday()
{
return birthday;
}
public void setBirthday(String birthday)
{
this.birthday = birthday;
}
private String fanPage;
public String getFanPage()
{
return fanPage;
}
public void setFanPage(String fanPage)
{
this.fanPage = fanPage;
}
public void setTwitterHandle(String twitterHandle)
{
this.twitterHandle = twitterHandle;
}
private Timestamp inDate;
public Timestamp getInDate()
{
return inDate;
}
public void setInDate(Timestamp inDate)
{
this.inDate = inDate;
}
private String inUser;
public String getInUser()
{
return inUser;
}
public void setInUser(String inUser)
{
this.inUser = inUser;
}
private Timestamp lastEditDate;
public Timestamp getLastEditDate()
{
return lastEditDate;
}
public void setLastEditDate(Timestamp lastEditDate)
{
this.lastEditDate = lastEditDate;
}
private String lastEditUser;
public String getLastEditUser()
{
return lastEditUser;
}
public void setLastEditUser(String lastEditUser)
{
this.lastEditUser = lastEditUser;
}
private String wikiImageUrl;
public String getWikiImageUrl()
{
return wikiImageUrl;
}
public void setWikiImageUrl(String wikiImageUrl)
{
this.wikiImageUrl = wikiImageUrl;
}
private String localImageName;
public String getLocalImageName()
{
return localImageName;
}
public void setLocalImageName(String localImageName)
{
this.localImageName = localImageName;
}
private int ReplayCount;
public int getReplayCount()
{
return ReplayCount;
}
public void setReplayCount(int replayCount)
{
ReplayCount = replayCount;
}
@Override
public boolean equals(Object o)
{
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PlayerPerson that = (PlayerPerson) o;
// if (personId != that.personId) return false;
if (gameId != null ? !gameId.equals(that.gameId) : that.gameId!= null) return false;
if (altIds != null ? !altIds.equals(that.altIds) : that.altIds != null) return false;
if (enFullName != null ? !enFullName.equals(that.enFullName) : that.enFullName != null) return false;
if (wikiImageUrl != null ? !wikiImageUrl.equals(that.wikiImageUrl) : that.wikiImageUrl != null) return false;
// if (inDate != null ? !inDate.equals(that.inDate) : that.inDate != null) return false;
// if (inUser != null ? !inUser.equals(that.inUser) : that.inUser != null) return false;
// if (lastEditDate != null ? !lastEditDate.equals(that.lastEditDate) : that.lastEditDate != null) return false;
// if (lastEditUser != null ? !lastEditUser.equals(that.lastEditUser) : that.lastEditUser != null) return false;
if (nativeFullName != null ? !nativeFullName.equals(that.nativeFullName) : that.nativeFullName != null)
return false;
if (race != null ? !race.equals(that.race) : that.race != null) return false;
if (stream != null ? !stream.equals(that.stream) : that.stream != null) return false;
if (team != null ? !team.equals(that.team) : that.team != null) return false;
if (twitterHandle != null ? !twitterHandle.equals(that.twitterHandle) : that.twitterHandle != null)
return false;
if (wikiUrl != null ? !wikiUrl.equals(that.wikiUrl) : that.wikiUrl != null) return false;
if (fanPage != null ? !fanPage.equals(that.fanPage) : that.fanPage != null) return false;
if (country!= null ? !country.equals(that.country) : that.country != null) return false;
if (birthday != null ? !birthday.equals(that.birthday) : that.birthday != null) return false;
return true;
}
@Override
public int hashCode()
{
int result = personId;
result = 31 * result + (enFullName != null ? enFullName.hashCode() : 0);
result = 31 * result + (nativeFullName != null ? nativeFullName.hashCode() : 0);
result = 31 * result + (team != null ? team.hashCode() : 0);
result = 31 * result + (altIds != null ? altIds.hashCode() : 0);
result = 31 * result + (race != null ? race.hashCode() : 0);
result = 31 * result + (wikiUrl != null ? wikiUrl.hashCode() : 0);
result = 31 * result + (stream != null ? stream.hashCode() : 0);
result = 31 * result + (twitterHandle != null ? twitterHandle.hashCode() : 0);
result = 31 * result + (inDate != null ? inDate.hashCode() : 0);
result = 31 * result + (inUser != null ? inUser.hashCode() : 0);
result = 31 * result + (lastEditDate != null ? lastEditDate.hashCode() : 0);
result = 31 * result + (lastEditUser != null ? lastEditUser.hashCode() : 0);
return result;
}
public String toString()
{
return "id" + personId + ", FullName:" + enFullName + ", nativeFullName:" + nativeFullName;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.master;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.MasterNodeChangePredicate;
import org.elasticsearch.cluster.NotMasterException;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException;
import org.elasticsearch.discovery.MasterNotDiscoveredException;
import org.elasticsearch.node.NodeClosedException;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportService;
import java.util.function.Predicate;
import java.util.function.Supplier;
/**
* A base class for operations that needs to be performed on the master node.
*/
public abstract class TransportMasterNodeAction<Request extends MasterNodeRequest<Request>, Response extends ActionResponse>
extends HandledTransportAction<Request, Response> {
protected final ThreadPool threadPool;
protected final TransportService transportService;
protected final ClusterService clusterService;
protected final IndexNameExpressionResolver indexNameExpressionResolver;
private final String executor;
protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, Supplier<Request> request) {
this(settings, actionName, true, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, request);
}
protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters,
Writeable.Reader<Request> request, IndexNameExpressionResolver indexNameExpressionResolver) {
this(settings, actionName, true, transportService, clusterService, threadPool, actionFilters, request, indexNameExpressionResolver);
}
protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker,
TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<Request> request) {
super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request);
this.transportService = transportService;
this.clusterService = clusterService;
this.threadPool = threadPool;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.executor = executor();
}
protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker,
TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
ActionFilters actionFilters, Writeable.Reader<Request> request,
IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request);
this.transportService = transportService;
this.clusterService = clusterService;
this.threadPool = threadPool;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.executor = executor();
}
protected abstract String executor();
protected abstract Response newResponse();
protected abstract void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws Exception;
/**
* Override this operation if access to the task parameter is needed
*/
protected void masterOperation(Task task, Request request, ClusterState state, ActionListener<Response> listener) throws Exception {
masterOperation(request, state, listener);
}
protected boolean localExecute(Request request) {
return false;
}
protected abstract ClusterBlockException checkBlock(Request request, ClusterState state);
@Override
protected void doExecute(Task task, final Request request, ActionListener<Response> listener) {
new AsyncSingleAction(task, request, listener).start();
}
class AsyncSingleAction {
private final ActionListener<Response> listener;
private final Request request;
private volatile ClusterStateObserver observer;
private final Task task;
AsyncSingleAction(Task task, Request request, ActionListener<Response> listener) {
this.task = task;
this.request = request;
if (task != null) {
request.setParentTask(clusterService.localNode().getId(), task.getId());
}
this.listener = listener;
}
public void start() {
ClusterState state = clusterService.state();
this.observer
= new ClusterStateObserver(state, clusterService, request.masterNodeTimeout(), logger, threadPool.getThreadContext());
doStart(state);
}
protected void doStart(ClusterState clusterState) {
try {
final Predicate<ClusterState> masterChangePredicate = MasterNodeChangePredicate.build(clusterState);
final DiscoveryNodes nodes = clusterState.nodes();
if (nodes.isLocalNodeElectedMaster() || localExecute(request)) {
// check for block, if blocked, retry, else, execute locally
final ClusterBlockException blockException = checkBlock(request, clusterState);
if (blockException != null) {
if (!blockException.retryable()) {
listener.onFailure(blockException);
} else {
logger.trace("can't execute due to a cluster block, retrying", blockException);
retry(blockException, newState -> {
try {
ClusterBlockException newException = checkBlock(request, newState);
return (newException == null || !newException.retryable());
} catch (Exception e) {
// accept state as block will be rechecked by doStart() and listener.onFailure() then called
logger.trace("exception occurred during cluster block checking, accepting state", e);
return true;
}
});
}
} else {
ActionListener<Response> delegate = new ActionListener<Response>() {
@Override
public void onResponse(Response response) {
listener.onResponse(response);
}
@Override
public void onFailure(Exception t) {
if (t instanceof FailedToCommitClusterStateException || t instanceof NotMasterException) {
logger.debug(() -> new ParameterizedMessage("master could not publish cluster state or " +
"stepped down before publishing action [{}], scheduling a retry", actionName), t);
retry(t, masterChangePredicate);
} else {
listener.onFailure(t);
}
}
};
threadPool.executor(executor).execute(new ActionRunnable<Response>(delegate) {
@Override
protected void doRun() throws Exception {
masterOperation(task, request, clusterState, delegate);
}
});
}
} else {
if (nodes.getMasterNode() == null) {
logger.debug("no known master node, scheduling a retry");
retry(null, masterChangePredicate);
} else {
DiscoveryNode masterNode = nodes.getMasterNode();
final String actionName = getMasterActionName(masterNode);
transportService.sendRequest(masterNode, actionName, request, new ActionListenerResponseHandler<Response>(listener,
TransportMasterNodeAction.this::newResponse) {
@Override
public void handleException(final TransportException exp) {
Throwable cause = exp.unwrapCause();
if (cause instanceof ConnectTransportException) {
// we want to retry here a bit to see if a new master is elected
logger.debug("connection exception while trying to forward request with action name [{}] to " +
"master node [{}], scheduling a retry. Error: [{}]",
actionName, nodes.getMasterNode(), exp.getDetailedMessage());
retry(cause, masterChangePredicate);
} else {
listener.onFailure(exp);
}
}
});
}
}
} catch (Exception e) {
listener.onFailure(e);
}
}
private void retry(final Throwable failure, final Predicate<ClusterState> statePredicate) {
observer.waitForNextChange(
new ClusterStateObserver.Listener() {
@Override
public void onNewClusterState(ClusterState state) {
doStart(state);
}
@Override
public void onClusterServiceClose() {
listener.onFailure(new NodeClosedException(clusterService.localNode()));
}
@Override
public void onTimeout(TimeValue timeout) {
logger.debug(() -> new ParameterizedMessage("timed out while retrying [{}] after failure (timeout [{}])",
actionName, timeout), failure);
listener.onFailure(new MasterNotDiscoveredException(failure));
}
}, statePredicate
);
}
}
/**
* Allows to conditionally return a different master node action name in the case an action gets renamed.
* This mainly for backwards compatibility should be used rarely
*/
protected String getMasterActionName(DiscoveryNode node) {
return actionName;
}
}
| |
/*-------------------------------------------------------------------------------------------------------------------*\
| Copyright (C) 2014-15 PayPal |
| |
| Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance |
| with the License. |
| |
| You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software distributed under the License is distributed |
| on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for |
| the specific language governing permissions and limitations under the License. |
\*-------------------------------------------------------------------------------------------------------------------*/
package com.paypal.selion.platform.dataprovider.impl;
import java.io.IOException;
import java.lang.reflect.Array;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.jxpath.JXPathContext;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.yaml.snakeyaml.Yaml;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
import com.google.gson.Gson;
import com.paypal.selion.logger.SeLionLogger;
import com.paypal.selion.platform.dataprovider.DataProviderException;
import com.paypal.selion.platform.dataprovider.DataProviderFactory;
import com.paypal.selion.platform.dataprovider.DataResource;
import com.paypal.selion.platform.dataprovider.SeLionDataProvider;
import com.paypal.selion.platform.dataprovider.filter.DataProviderFilter;
import com.paypal.test.utilities.logging.SimpleLogger;
/**
* A utility class intended to serve as a helper class for miscellaneous operations being done by
* {@link ExcelDataProviderImpl} and {@link YamlDataProviderImpl} and {@link XmlDataProviderImpl}.
*
*/
public final class DataProviderHelper {
private static SimpleLogger logger = SeLionLogger.getLogger();
// Hidden default constructor for class that provides static methods.
private DataProviderHelper() {
}
/**
* This function will parse the index string into separated individual indexes as needed. Calling the method with a
* string containing "1, 3, 5-7, 11, 12-14, 8" would return an list of integers {1, 3, 5, 6, 7, 11, 12, 13, 14, 8}.
* Use ',' to separate values, and use '-' to specify a continuous range. Presence of an invalid character would
* result in {@link DataProviderException}.
*
* @param value
* the input string represent the indexes to be parse.
* @return a list of indexes as an integer array
*/
public static int[] parseIndexString(String value) {
logger.entering(value);
List<Integer> indexes = new ArrayList<>();
int begin;
int end;
String[] parsed;
String[] parsedIndex = value.split(",");
for (String index : parsedIndex) {
if (index.contains("-")) {
parsed = index.split("-");
begin = Integer.parseInt(parsed[0].trim());
end = Integer.parseInt(parsed[1].trim());
for (int i = begin; i <= end; i++) {
indexes.add(i);
}
} else {
try {
indexes.add(Integer.parseInt(index.trim()));
} catch (NumberFormatException e) {
String msg = new StringBuilder("Index '").append(index)
.append("' is invalid. Please provide either individual numbers or ranges.")
.append("\n Range needs to be de-marked by '-'").toString();
throw new DataProviderException(msg, e);
}
}
}
int[] indexArray = Ints.toArray(indexes);
logger.exiting(indexArray);
return indexArray;
}
/**
* Converts any object into 2 dimensional array representing TestNG DataProvider.
*
* Following are the results for various types represented by {@code object}, in the order the {@code object} is
* processed:
*
* <pre>
* Object - By default, an object is returned at position [0][0] of Object[1][1].
* </pre>
*
* <pre>
* LinkedHashMap - When
* the root type of object is a {@link LinkedHashMap} (with <i>n<i> number of key-value mappings), the value at each
* <i>index</i> of the map is returned at position [index][0] of Object[<i>n</i>][1].
*
* </pre>
*
* <pre>
* ArrayList - When
* the root type of object is an {@link ArrayList} (with <i>n<i> number of items), the item at each
* <i>index</i> of the list is returned at position [index][0] of Object[<i>n</i>][1].
*
* When the value is {@link LinkedHashMap} having child value as type of {@link LinkedHashMap} or {@link ArrayList}, the child value is returned instead.
* </pre>
*
* <pre>
* Array of primitive types - When
* the root type of object is single dimensional {@link Array} (with <i>n<i> number of items), the value at each
* <i>index</i> of the array is returned at position [index][0] of Object[<i>n</i>][1].
* </pre>
*
* <pre>
* Array of Object types - When
* the root type of object is single dimensional {@link Array} (with <i>n<i> number of objects), the object at each
* <i>index</i> of the array is returned at position [index][0] of Object[<i>n</i>][1].
* </pre>
*
* @param object
* Object of any type.
* @return Object[][] two dimensional object to be used with TestNG DataProvider
*/
public static Object[][] convertToObjectArray(Object object) {
logger.entering(object);
// Converts single instance of any type as an object at position [0][0] in an Object double array of size
// [1][1].
Object[][] objArray = new Object[][] { { object } };
Class<?> rootClass = object.getClass();
// Convert a LinkedHashMap (e.g. Yaml Associative Array) to an Object double array.
if (rootClass.equals(LinkedHashMap.class)) { // NOSONAR
LinkedHashMap<?, ?> objAsLinkedHashMap = (LinkedHashMap<?, ?>) object;
Collection<?> allValues = objAsLinkedHashMap.values();
objArray = new Object[allValues.size()][1];
int i = 0;
for (Object eachValue : allValues) {
objArray[i][0] = eachValue;
i++;
}
}
// Converts an ArrayList (e.g. Yaml List) to an Object double array.
else if (rootClass.equals(ArrayList.class)) { // NOSONAR
ArrayList<?> objAsArrayList = (ArrayList<?>) object;
objArray = new Object[objAsArrayList.size()][1];
int i = 0;
for (Object eachArrayListObject : objAsArrayList) {
/*
* Handles LinkedHashMap nested in a LinkedHashMap (e.g. Yaml associative array). This block removes the
* first mapping since that data serves as visual organization of data within a Yaml. If the parent is a
* LinkedHashMap and the child is a LinkedHashMap or an ArrayList, then assign the child to the Object
* double array instead of the parent.
*/
objArray[i][0] = eachArrayListObject;
if (eachArrayListObject.getClass().equals(LinkedHashMap.class)) { // NOSONAR
LinkedHashMap<?, ?> eachArrayListObjectAsHashMap = (LinkedHashMap<?, ?>) eachArrayListObject;
for (Object eachEntry : eachArrayListObjectAsHashMap.values()) {
if (eachEntry.getClass().equals(LinkedHashMap.class) // NOSONAR
|| eachEntry.getClass().equals(ArrayList.class)) { // NOSONAR
objArray[i][0] = eachEntry;
}
}
}
i++;
}
}
// Converts an List of simple types to an Object double array.
else if (rootClass.isArray()) {
int i = 0;
if (!(object instanceof Object[])) {
if (object instanceof int[]) {
objArray = new Object[((int[]) object).length][1];
for (int item : (int[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof char[]) {
objArray = new Object[((char[]) object).length][1];
for (char item : (char[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof short[]) {
objArray = new Object[((short[]) object).length][1];
for (short item : (short[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof boolean[]) {
objArray = new Object[((boolean[]) object).length][1];
for (boolean item : (boolean[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof long[]) {
objArray = new Object[((long[]) object).length][1];
for (long item : (long[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof double[]) {
objArray = new Object[((double[]) object).length][1];
for (double item : (double[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof float[]) {
objArray = new Object[((float[]) object).length][1];
for (float item : (float[]) object) {
objArray[i++][0] = item;
}
} else if (object instanceof byte[]) {
objArray = new Object[((byte[]) object).length][1];
for (byte item : (byte[]) object) {
objArray[i++][0] = item;
}
}
}
// Converts unknown object Array to an Object double array.
else {
objArray = new Object[((Object[]) object).length][1];
for (Object item : (Object[]) object) {
objArray[i++][0] = item;
}
}
}
// Passing no arguments to exiting() because implementation to print 2D array could be highly recursive.
logger.exiting();
return objArray;
}
/**
* Converts in particular LinkedHashMap of Objects, ArrayList of Objects, Array of Object into ArrayList after
* applying the given filter.
*
* @param object
* Object of any type.
* @return List<Object> a ArrayList of objects to be used with TestNG DataProvider
*/
public static List<Object[]> filterToListOfObjects(Object object, DataProviderFilter dataFilter) {
logger.entering(object);
List<Object[]> objs = new ArrayList<>();
Class<?> rootClass = object.getClass();
// Convert a LinkedHashMap (e.g. Yaml Associative Array) to an array list after applying filter.
if (rootClass.equals(LinkedHashMap.class)) { // NOSONAR
LinkedHashMap<?, ?> objAsLinkedHashMap = (LinkedHashMap<?, ?>) object;
Collection<?> allValues = objAsLinkedHashMap.values();
for (Object eachValue : allValues) {
if (dataFilter.filter(eachValue)) {
objs.add(new Object[] { eachValue });
}
}
}
// Converts an ArrayList to an array list after applying filter.
else if (rootClass.equals(ArrayList.class)) { // NOSONAR
ArrayList<?> objAsArrayList = (ArrayList<?>) object;
for (Object eachArrayListObject : objAsArrayList) {
/*
* Handles LinkedHashMap nested in a LinkedHashMap (e.g. Yaml/xml associative array). This block removes
* the first mapping since that data serves as visual organization of data within a Yaml/xml. If the
* parent is a LinkedHashMap and the child is a LinkedHashMap or an ArrayList, then assign the child to
* the Object double array instead of the parent.
*/
if (dataFilter.filter(eachArrayListObject)) {
objs.add(new Object[] { eachArrayListObject });
}
if (eachArrayListObject.getClass().equals(LinkedHashMap.class)) { // NOSONAR
LinkedHashMap<?, ?> eachArrayListObjectAsHashMap = (LinkedHashMap<?, ?>) eachArrayListObject;
for (Object eachEntry : eachArrayListObjectAsHashMap.values()) {
if (eachEntry.getClass().equals(LinkedHashMap.class)
|| eachEntry.getClass().equals(ArrayList.class)) { // NOSONAR
if (dataFilter.filter(eachEntry)) {
objs.add(new Object[] { eachEntry });
}
}
}
}
}
}
// Converts an List of simple types to an array list after applying filter.
else if (rootClass.isArray()) {
if (!(object instanceof Object[])) {
if (object instanceof int[]) {
for (int item : (int[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof char[]) {
for (char item : (char[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof short[]) {
for (short item : (short[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof boolean[]) {
for (boolean item : (boolean[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof long[]) {
for (long item : (long[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof double[]) {
for (double item : (double[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof float[]) {
for (float item : (float[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
} else if (object instanceof byte[]) {
for (byte item : (byte[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
}
}
// Converts unknown object Array to an array list after applying filter.
else {
for (Object item : (Object[]) object) {
if (dataFilter.filter(item)) {
objs.add(new Object[] { item });
}
}
}
}
logger.exiting(objs);
return objs;
}
/**
* Filters a map by keys specified as a list.
*
* @param map
* The Map containing keys.
* @param keys
* Non-empty array of string keys.
* @return Object[][] two dimensional object to be used with TestNG DataProvider.
* @throws IllegalArgumentException
* When the argument to {@code keys} is null, or any keys is not contained by the {@code map}.
*/
public static Object[][] getDataByKeys(Map<?, ?> map, String[] keys) {
logger.entering(new Object[] { map, keys });
if (ArrayUtils.isEmpty(keys)) {
throw new IllegalArgumentException("Keys cannot be null or empty.");
}
Map<String, Object> requestedMap = new LinkedHashMap<>();
for (String key : keys) {
Object obj = map.get(key);
if (obj == null) {
throw new IllegalArgumentException("Key not found, returned null value: " + key);
}
requestedMap.put(key, obj);
}
Object[][] objArray = DataProviderHelper.convertToObjectArray(requestedMap);
// Passing no arguments to exiting() because implementation to print 2D array could be highly recursive.
logger.exiting();
return objArray;
}
/**
* Converts multiple data from multiple 2D DataProvider arrays of various types into one DataProvider 2D array. This
* helps when test data is managed in multiple resource files.
*
* <br>
* Example DataProvider:
*
* <pre>
* public static Object[][] dataProviderGetMultipleArguments() throws IOException {
* Object[][] data = null;
* FileSystemResource resource1 = new FileSystemResource(pathName, listOfUsersInYaml, User.class);
* FileSystemResource resource2 = new FileSystemResource(pathName, listOfAddressesInXml, Address.class);
*
* Object[][] data1 = YamlDataProvider.getAllData(resource1);
* Object[][] data2 = XmlDataProvider.getAllData(resource2);
*
* data = new DataProviderHelper.getAllDataMultipleArgs(data1, data2);
*
* return data;
* }
* </pre>
*
*
* Test method signature example 1:
*
* <pre>
* public void testExample(User user, Address address)
* </pre>
*
* Test method signature example 2:
*
* <pre>
* public void testExample(User fromUser, User toUser)
* </pre>
*
* Test method signature example 2:
*
* <pre>
* public void testExample(User user, Address address, CreditCardInfo creditCard)
* </pre>
*
*
* @param dataproviders
* An array of multiple 2D DataProvider arrays of various types that are to be clubbed together.
* @return Object[][] Two dimensional object to be used with TestNG DataProvider
*/
public static Object[][] getAllDataMultipleArgs(Object[][]... dataproviders) {
logger.entering();
Object[][] data;
int maxLength = 0;
for (Object[][] d : dataproviders) {
if (d.length > maxLength) {
maxLength = d.length;
}
}
data = new Object[maxLength][dataproviders.length];
int i = 0;
for (Object[][] d : dataproviders) {
for (int j = 0; j < maxLength; j++) {
try {
data[j][i] = d[j][0];
} catch (ArrayIndexOutOfBoundsException ex) {
data[j][i] = null;
}
}
i++;
}
// Passing no arguments to exiting() because implementation to print 2D array could be highly recursive.
logger.exiting();
return data;
}
/**
* Utility method to convert raw Json strings into a type.
*
* @param jsonString
* The Json data as a {@link String}
* @param typeToMap
* The type to which the jsonString must be mapped to
* @return An {@link Object} that corresponds to the type specified.
*/
public static Object convertJsonStringToObject(String jsonString, Type typeToMap) {
Preconditions.checkArgument(typeToMap != null, "typeToMap argument cannot be null");
Preconditions.checkArgument(!StringUtils.isEmpty(jsonString),
"A valid string is required to convert the Json to Object");
logger.entering(new Object[] { jsonString, typeToMap });
Gson jsonParser = new Gson();
Object parsedData = jsonParser.fromJson(jsonString, typeToMap);
logger.exiting(parsedData);
return parsedData;
}
/**
* Traverses the object graph by following an XPath expression and returns the desired type from object matched at
* the XPath.
*
* Supports single object retrieval. Also see {@link DataProviderHelper#readListByXpath(Object, Class, String)}.
*
* Note: Need {@code object} and {@code cls} to have getter and setter properties defined to allow object graph
* traversal.
*
* @param object
* An object of any type
* @param cls
* Type of the property being evaluated at the given {@code xpath}.
* @param xpath
* The XPath expression equivalent to the object graph.
* @return An object of desired type.
*/
@SuppressWarnings("unchecked")
public static <T> T readObjectByXpath(Object object, Class<T> cls, String xpath) {
logger.entering(new Object[] { object, cls, xpath });
JXPathContext context = JXPathContext.newContext(object);
T value = (T) context.getValue(xpath);
logger.exiting(value);
return value;
}
/**
* Traverses the object graph by following an XPath expression and returns a list of desired type from object
* matched at the XPath.
*
* Only supports multiple object retrieval as a list. See // *
* {@link DataProviderHelper#readObjectByXpath(Object, Class, String)} for single object retrieval.
*
* Note: Need {@code object} and {@code cls} to have getter and setter properties defined to allow object graph
* traversal.
*
* @param object
* An object of any type
* @param cls
* Type of the property being evaluated at the given {@code xpath}.
* @param xpath
* The XPath expression equivalent to the object graph.
* @return An object of desired type.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> List<T> readListByXpath(Object object, Class<T> cls, String xpath) {
logger.entering(new Object[] { object, cls, xpath });
JXPathContext context = JXPathContext.newContext(object);
List<T> values = new ArrayList<>();
for (Iterator iter = context.iterate(xpath); iter.hasNext();) {
T value = (T) iter.next();
values.add(value);
}
logger.exiting(values);
return values;
}
/**
* Use this utility method to print and return a yaml string to help serialize the object passed in.
*
* @param object
* The Object that is to be serialised.
* @return a yaml string representation of the object passed in
*/
public static String serializeObjectToYamlString(Object object) {
logger.entering(object);
Yaml yaml = new Yaml();
String output = yaml.dump(object);
logger.exiting(output);
return output;
}
/**
* Use this utility method to print and return a yaml string to help serialize the object passed in as an ArrayList.
*
* @param objects
* One or more objects that are to be serialised.
* @return a yaml string representation of the object(s) passed in
*/
public static String serializeObjectToYamlStringAsList(Object... objects) {
logger.entering(new Object[] { objects });
String output = serializeObjectToYamlString(Arrays.asList(objects).iterator());
logger.exiting(output);
return output;
}
/**
* Use this utility method to print and return a yaml string to help serialize the object passed in as a
* LinkedHashMap.
*
* @param objects
* One or more objects that are to be serialised.
* @return a yaml string representation of the object(s) passed in
*/
public static String serializeObjectToYamlStringAsDocuments(Object... objects) {
logger.entering(new Object[] { objects });
Yaml yaml = new Yaml();
String output = yaml.dumpAll(Arrays.asList(objects).iterator());
logger.exiting(output);
return output;
}
/**
* Use this utility method to print and return a yaml string to help serialize the object passed in as multiple
* documents.
*
* @param objects
* The objects that are to be serialised.
* @return a yaml string representation of the object(s) passed in
*/
public static String serializeObjectToYamlStringAsMap(Object... objects) {
logger.entering(new Object[] { objects });
HashMap<String, Object> objMap = new LinkedHashMap<>();
String key;
int i = 0;
for (Object obj : objects) {
key = "uniqueKey" + Integer.toString(i);
objMap.put(key, obj);
i++;
}
String output = serializeObjectToYamlString(objMap);
logger.exiting(output);
return output;
}
/**
* Gets yaml data for tests that require multiple arguments. Saves a tester from needing to define another JavaBean
* just to get multiple arguments passed in as one.
*
* <br>
* <br>
* Example dataprovider:
*
* <pre>
* public static Object[][] dataProviderGetMultipleArguments() throws IOException {
* Object[][] data = null;
* List<YamlResource> yamlResources = new ArrayList<YamlResource>();
* yamlResources.add(new YamlResource(pathName, userDocuments, USER.class));
* yamlResources.add(new YamlResource(pathName, user2Documents, USER.class));
*
* data = DataProviderHelper.getAllDataMultipleArgsFromYAML(yamlResources);
*
* return data;
* }
* </pre>
*
* Test method signature example:
*
* <pre>
* public void testExample(USER user1, USER user2)
* </pre>
*
* @return Object[][] Two dimensional object to be used with TestNG DataProvider
* @throws IOException
*/
public static Object[][] getAllDataMultipleArgsFromYAML(List<DataResource> resources) throws IOException {
logger.entering(resources);
if (resources == null) {
throw new DataProviderException("Resource can not be null");
}
List<Object[][]> dataproviders = new ArrayList<>();
int maxLength = 0;
for (DataResource r : resources) {
SeLionDataProvider dataProvider = DataProviderFactory.getDataProvider(r);
Object[][] resourceData = dataProvider.getAllData();
dataproviders.add(resourceData);
if (resourceData.length > maxLength) {
maxLength = resourceData.length;
}
}
Object[][] data = new Object[maxLength][resources.size()];
int i = 0;
for (Object[][] d : dataproviders) {
for (int j = 0; j < maxLength; j++) {
try {
data[j][i] = d[j][0];
} catch (ArrayIndexOutOfBoundsException ex) {
data[j][i] = null;
}
}
i++;
}
logger.exiting((Object[]) data);
return data;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.