gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ilm;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.Version;
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.component.Lifecycle.State;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata;
import org.elasticsearch.xpack.core.ilm.LifecycleExecutionState;
import org.elasticsearch.xpack.core.ilm.LifecyclePolicy;
import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata;
import org.elasticsearch.xpack.core.ilm.LifecycleSettings;
import org.elasticsearch.xpack.core.ilm.MockAction;
import org.elasticsearch.xpack.core.ilm.OperationMode;
import org.elasticsearch.xpack.core.ilm.Phase;
import org.elasticsearch.xpack.core.ilm.ShrinkAction;
import org.elasticsearch.xpack.core.ilm.ShrinkStep;
import org.elasticsearch.xpack.core.ilm.Step;
import org.elasticsearch.xpack.core.scheduler.SchedulerEngine;
import org.junit.After;
import org.junit.Before;
import org.mockito.Mockito;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Collections;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import static org.elasticsearch.node.Node.NODE_MASTER_SETTING;
import static org.elasticsearch.xpack.core.ilm.AbstractStepTestCase.randomStepKey;
import static org.elasticsearch.xpack.core.ilm.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY;
import static org.elasticsearch.xpack.core.ilm.LifecyclePolicyTestsUtils.newTestLifecyclePolicy;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class IndexLifecycleServiceTests extends ESTestCase {
private ClusterService clusterService;
private IndexLifecycleService indexLifecycleService;
private String nodeId;
private DiscoveryNode masterNode;
private IndicesAdminClient indicesClient;
private long now;
private ThreadPool threadPool;
@Before
public void prepareServices() {
nodeId = randomAlphaOfLength(10);
ExecutorService executorService = mock(ExecutorService.class);
clusterService = mock(ClusterService.class);
masterNode = DiscoveryNode.createLocal(settings(Version.CURRENT)
.put(NODE_MASTER_SETTING.getKey(), true).build(),
new TransportAddress(TransportAddress.META_ADDRESS, 9300), nodeId);
now = randomNonNegativeLong();
Clock clock = Clock.fixed(Instant.ofEpochMilli(now), ZoneId.of(randomFrom(ZoneId.getAvailableZoneIds())));
doAnswer(invocationOnMock -> null).when(clusterService).addListener(any());
doAnswer(invocationOnMock -> {
Runnable runnable = (Runnable) invocationOnMock.getArguments()[0];
runnable.run();
return null;
}).when(executorService).execute(any());
Settings settings = Settings.builder().put(LifecycleSettings.LIFECYCLE_POLL_INTERVAL, "1s").build();
when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings,
Collections.singleton(LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING)));
when(clusterService.lifecycleState()).thenReturn(State.STARTED);
Client client = mock(Client.class);
AdminClient adminClient = mock(AdminClient.class);
indicesClient = mock(IndicesAdminClient.class);
when(client.admin()).thenReturn(adminClient);
when(adminClient.indices()).thenReturn(indicesClient);
when(client.settings()).thenReturn(Settings.EMPTY);
threadPool = new TestThreadPool("test");
indexLifecycleService = new IndexLifecycleService(Settings.EMPTY, client, clusterService, threadPool,
clock, () -> now, null);
Mockito.verify(clusterService).addListener(indexLifecycleService);
Mockito.verify(clusterService).addStateApplier(indexLifecycleService);
}
@After
public void cleanup() {
when(clusterService.lifecycleState()).thenReturn(randomFrom(State.STOPPED, State.CLOSED));
indexLifecycleService.close();
threadPool.shutdownNow();
}
public void testStoppedModeSkip() {
String policyName = randomAlphaOfLengthBetween(1, 20);
IndexLifecycleRunnerTests.MockClusterStateActionStep mockStep =
new IndexLifecycleRunnerTests.MockClusterStateActionStep(randomStepKey(), randomStepKey());
MockAction mockAction = new MockAction(Collections.singletonList(mockStep));
Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction));
LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase));
SortedMap<String, LifecyclePolicyMetadata> policyMap = new TreeMap<>();
policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Collections.emptyMap(),
randomNonNegativeLong(), randomNonNegativeLong()));
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
IndexMetaData indexMetadata = IndexMetaData.builder(index.getName())
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), policyName))
.numberOfShards(randomIntBetween(1, 5)).numberOfReplicas(randomIntBetween(0, 5)).build();
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.<String, IndexMetaData> builder()
.fPut(index.getName(), indexMetadata);
MetaData metaData = MetaData.builder()
.putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(policyMap, OperationMode.STOPPED))
.indices(indices.build())
.persistentSettings(settings(Version.CURRENT).build())
.build();
ClusterState currentState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
.nodes(DiscoveryNodes.builder().localNodeId(nodeId).masterNodeId(nodeId).add(masterNode).build())
.build();
ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE);
indexLifecycleService.applyClusterState(event);
indexLifecycleService.triggerPolicies(currentState, randomBoolean());
assertThat(mockStep.getExecuteCount(), equalTo(0L));
}
public void testRequestedStopOnShrink() {
Step.StepKey mockShrinkStep = new Step.StepKey(randomAlphaOfLength(4), ShrinkAction.NAME, ShrinkStep.NAME);
String policyName = randomAlphaOfLengthBetween(1, 20);
IndexLifecycleRunnerTests.MockClusterStateActionStep mockStep =
new IndexLifecycleRunnerTests.MockClusterStateActionStep(mockShrinkStep, randomStepKey());
MockAction mockAction = new MockAction(Collections.singletonList(mockStep));
Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction));
LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase));
SortedMap<String, LifecyclePolicyMetadata> policyMap = new TreeMap<>();
policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Collections.emptyMap(),
randomNonNegativeLong(), randomNonNegativeLong()));
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder();
lifecycleState.setPhase(mockShrinkStep.getPhase());
lifecycleState.setAction(mockShrinkStep.getAction());
lifecycleState.setStep(mockShrinkStep.getName());
IndexMetaData indexMetadata = IndexMetaData.builder(index.getName())
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), policyName))
.putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.build().asMap())
.numberOfShards(randomIntBetween(1, 5)).numberOfReplicas(randomIntBetween(0, 5)).build();
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.<String, IndexMetaData> builder()
.fPut(index.getName(), indexMetadata);
MetaData metaData = MetaData.builder()
.putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(policyMap, OperationMode.STOPPING))
.indices(indices.build())
.persistentSettings(settings(Version.CURRENT).build())
.build();
ClusterState currentState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
.nodes(DiscoveryNodes.builder().localNodeId(nodeId).masterNodeId(nodeId).add(masterNode).build())
.build();
ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE);
SetOnce<Boolean> changedOperationMode = new SetOnce<>();
doAnswer(invocationOnMock -> {
changedOperationMode.set(true);
return null;
}).when(clusterService).submitStateUpdateTask(eq("ilm_operation_mode_update"), any(OperationModeUpdateTask.class));
indexLifecycleService.applyClusterState(event);
indexLifecycleService.triggerPolicies(currentState, true);
assertNull(changedOperationMode.get());
}
public void testRequestedStopInShrinkActionButNotShrinkStep() {
// test all the shrink action steps that ILM can be stopped during (basically all of them minus the actual shrink)
ShrinkAction action = new ShrinkAction(1);
action.toSteps(mock(Client.class), "warm", randomStepKey()).stream()
.map(sk -> sk.getKey().getName())
.filter(name -> name.equals(ShrinkStep.NAME) == false)
.forEach(this::verifyCanStopWithStep);
}
// Check that ILM can stop when in the shrink action on the provided step
private void verifyCanStopWithStep(String stoppableStep) {
Step.StepKey mockShrinkStep = new Step.StepKey(randomAlphaOfLength(4), ShrinkAction.NAME, stoppableStep);
String policyName = randomAlphaOfLengthBetween(1, 20);
IndexLifecycleRunnerTests.MockClusterStateActionStep mockStep =
new IndexLifecycleRunnerTests.MockClusterStateActionStep(mockShrinkStep, randomStepKey());
MockAction mockAction = new MockAction(Collections.singletonList(mockStep));
Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction));
LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase));
SortedMap<String, LifecyclePolicyMetadata> policyMap = new TreeMap<>();
policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Collections.emptyMap(),
randomNonNegativeLong(), randomNonNegativeLong()));
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder();
lifecycleState.setPhase(mockShrinkStep.getPhase());
lifecycleState.setAction(mockShrinkStep.getAction());
lifecycleState.setStep(mockShrinkStep.getName());
IndexMetaData indexMetadata = IndexMetaData.builder(index.getName())
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), policyName))
.putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.build().asMap())
.numberOfShards(randomIntBetween(1, 5)).numberOfReplicas(randomIntBetween(0, 5)).build();
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.<String, IndexMetaData> builder()
.fPut(index.getName(), indexMetadata);
MetaData metaData = MetaData.builder()
.putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(policyMap, OperationMode.STOPPING))
.indices(indices.build())
.persistentSettings(settings(Version.CURRENT).build())
.build();
ClusterState currentState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
.nodes(DiscoveryNodes.builder().localNodeId(nodeId).masterNodeId(nodeId).add(masterNode).build())
.build();
ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE);
SetOnce<Boolean> changedOperationMode = new SetOnce<>();
doAnswer(invocationOnMock -> {
changedOperationMode.set(true);
return null;
}).when(clusterService).submitStateUpdateTask(eq("ilm_operation_mode_update"), any(OperationModeUpdateTask.class));
indexLifecycleService.applyClusterState(event);
indexLifecycleService.triggerPolicies(currentState, true);
assertTrue(changedOperationMode.get());
}
public void testRequestedStopOnSafeAction() {
String policyName = randomAlphaOfLengthBetween(1, 20);
Step.StepKey currentStepKey = randomStepKey();
IndexLifecycleRunnerTests.MockClusterStateActionStep mockStep =
new IndexLifecycleRunnerTests.MockClusterStateActionStep(currentStepKey, randomStepKey());
MockAction mockAction = new MockAction(Collections.singletonList(mockStep));
Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction));
LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase));
SortedMap<String, LifecyclePolicyMetadata> policyMap = new TreeMap<>();
policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Collections.emptyMap(),
randomNonNegativeLong(), randomNonNegativeLong()));
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder();
lifecycleState.setPhase(currentStepKey.getPhase());
lifecycleState.setAction(currentStepKey.getAction());
lifecycleState.setStep(currentStepKey.getName());
IndexMetaData indexMetadata = IndexMetaData.builder(index.getName())
.settings(settings(Version.CURRENT).put(LifecycleSettings.LIFECYCLE_NAME_SETTING.getKey(), policyName))
.putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.build().asMap())
.numberOfShards(randomIntBetween(1, 5)).numberOfReplicas(randomIntBetween(0, 5)).build();
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.<String, IndexMetaData> builder()
.fPut(index.getName(), indexMetadata);
MetaData metaData = MetaData.builder()
.putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(policyMap, OperationMode.STOPPING))
.indices(indices.build())
.persistentSettings(settings(Version.CURRENT).build())
.build();
ClusterState currentState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
.nodes(DiscoveryNodes.builder().localNodeId(nodeId).masterNodeId(nodeId).add(masterNode).build())
.build();
ClusterChangedEvent event = new ClusterChangedEvent("_source", currentState, ClusterState.EMPTY_STATE);
SetOnce<Boolean> ranPolicy = new SetOnce<>();
SetOnce<Boolean> moveToMaintenance = new SetOnce<>();
doAnswer(invocationOnMock -> {
ranPolicy.set(true);
throw new AssertionError("invalid invocation");
}).when(clusterService).submitStateUpdateTask(anyString(), any(ExecuteStepsUpdateTask.class));
doAnswer(invocationOnMock -> {
OperationModeUpdateTask task = (OperationModeUpdateTask) invocationOnMock.getArguments()[1];
assertThat(task.getILMOperationMode(), equalTo(OperationMode.STOPPED));
moveToMaintenance.set(true);
return null;
}).when(clusterService).submitStateUpdateTask(eq("ilm_operation_mode_update"), any(OperationModeUpdateTask.class));
indexLifecycleService.applyClusterState(event);
indexLifecycleService.triggerPolicies(currentState, randomBoolean());
assertNull(ranPolicy.get());
assertTrue(moveToMaintenance.get());
}
public void testTriggeredDifferentJob() {
Mockito.reset(clusterService);
SchedulerEngine.Event schedulerEvent = new SchedulerEngine.Event("foo", randomLong(), randomLong());
indexLifecycleService.triggered(schedulerEvent);
Mockito.verifyZeroInteractions(indicesClient, clusterService);
}
public void testParsingOriginationDateBeforeIndexCreation() {
Settings indexSettings = Settings.builder().put(LifecycleSettings.LIFECYCLE_PARSE_ORIGINATION_DATE, true).build();
Index index = new Index("invalid_index_name", UUID.randomUUID().toString());
expectThrows(IllegalArgumentException.class,
"The parse origination date setting was configured for index " + index.getName() +
" but the index name did not match the expected format",
() -> indexLifecycleService.beforeIndexAddedToCluster(index, indexSettings)
);
// disabling the parsing origination date setting should prevent the validation from throwing exception
try {
indexLifecycleService.beforeIndexAddedToCluster(index, Settings.EMPTY);
} catch (Exception e) {
fail("Did not expect the before index validation to throw an exception as the parse origination date setting was not set");
}
}
}
| |
/*
* Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.spot.goci.service.junidecode;
/**
* Character map for Unicode characters with codepoint U+CExx.
* @author Giuseppe Cardone
* @version 0.1
*/
class Xce {
public static final String[] map = new String[]{
"cwik", // 0x00
"cwit", // 0x01
"cwip", // 0x02
"cwih", // 0x03
"cyu", // 0x04
"cyug", // 0x05
"cyugg", // 0x06
"cyugs", // 0x07
"cyun", // 0x08
"cyunj", // 0x09
"cyunh", // 0x0a
"cyud", // 0x0b
"cyul", // 0x0c
"cyulg", // 0x0d
"cyulm", // 0x0e
"cyulb", // 0x0f
"cyuls", // 0x10
"cyult", // 0x11
"cyulp", // 0x12
"cyulh", // 0x13
"cyum", // 0x14
"cyub", // 0x15
"cyubs", // 0x16
"cyus", // 0x17
"cyuss", // 0x18
"cyung", // 0x19
"cyuj", // 0x1a
"cyuc", // 0x1b
"cyuk", // 0x1c
"cyut", // 0x1d
"cyup", // 0x1e
"cyuh", // 0x1f
"ceu", // 0x20
"ceug", // 0x21
"ceugg", // 0x22
"ceugs", // 0x23
"ceun", // 0x24
"ceunj", // 0x25
"ceunh", // 0x26
"ceud", // 0x27
"ceul", // 0x28
"ceulg", // 0x29
"ceulm", // 0x2a
"ceulb", // 0x2b
"ceuls", // 0x2c
"ceult", // 0x2d
"ceulp", // 0x2e
"ceulh", // 0x2f
"ceum", // 0x30
"ceub", // 0x31
"ceubs", // 0x32
"ceus", // 0x33
"ceuss", // 0x34
"ceung", // 0x35
"ceuj", // 0x36
"ceuc", // 0x37
"ceuk", // 0x38
"ceut", // 0x39
"ceup", // 0x3a
"ceuh", // 0x3b
"cyi", // 0x3c
"cyig", // 0x3d
"cyigg", // 0x3e
"cyigs", // 0x3f
"cyin", // 0x40
"cyinj", // 0x41
"cyinh", // 0x42
"cyid", // 0x43
"cyil", // 0x44
"cyilg", // 0x45
"cyilm", // 0x46
"cyilb", // 0x47
"cyils", // 0x48
"cyilt", // 0x49
"cyilp", // 0x4a
"cyilh", // 0x4b
"cyim", // 0x4c
"cyib", // 0x4d
"cyibs", // 0x4e
"cyis", // 0x4f
"cyiss", // 0x50
"cying", // 0x51
"cyij", // 0x52
"cyic", // 0x53
"cyik", // 0x54
"cyit", // 0x55
"cyip", // 0x56
"cyih", // 0x57
"ci", // 0x58
"cig", // 0x59
"cigg", // 0x5a
"cigs", // 0x5b
"cin", // 0x5c
"cinj", // 0x5d
"cinh", // 0x5e
"cid", // 0x5f
"cil", // 0x60
"cilg", // 0x61
"cilm", // 0x62
"cilb", // 0x63
"cils", // 0x64
"cilt", // 0x65
"cilp", // 0x66
"cilh", // 0x67
"cim", // 0x68
"cib", // 0x69
"cibs", // 0x6a
"cis", // 0x6b
"ciss", // 0x6c
"cing", // 0x6d
"cij", // 0x6e
"cic", // 0x6f
"cik", // 0x70
"cit", // 0x71
"cip", // 0x72
"cih", // 0x73
"ka", // 0x74
"kag", // 0x75
"kagg", // 0x76
"kags", // 0x77
"kan", // 0x78
"kanj", // 0x79
"kanh", // 0x7a
"kad", // 0x7b
"kal", // 0x7c
"kalg", // 0x7d
"kalm", // 0x7e
"kalb", // 0x7f
"kals", // 0x80
"kalt", // 0x81
"kalp", // 0x82
"kalh", // 0x83
"kam", // 0x84
"kab", // 0x85
"kabs", // 0x86
"kas", // 0x87
"kass", // 0x88
"kang", // 0x89
"kaj", // 0x8a
"kac", // 0x8b
"kak", // 0x8c
"kat", // 0x8d
"kap", // 0x8e
"kah", // 0x8f
"kae", // 0x90
"kaeg", // 0x91
"kaegg", // 0x92
"kaegs", // 0x93
"kaen", // 0x94
"kaenj", // 0x95
"kaenh", // 0x96
"kaed", // 0x97
"kael", // 0x98
"kaelg", // 0x99
"kaelm", // 0x9a
"kaelb", // 0x9b
"kaels", // 0x9c
"kaelt", // 0x9d
"kaelp", // 0x9e
"kaelh", // 0x9f
"kaem", // 0xa0
"kaeb", // 0xa1
"kaebs", // 0xa2
"kaes", // 0xa3
"kaess", // 0xa4
"kaeng", // 0xa5
"kaej", // 0xa6
"kaec", // 0xa7
"kaek", // 0xa8
"kaet", // 0xa9
"kaep", // 0xaa
"kaeh", // 0xab
"kya", // 0xac
"kyag", // 0xad
"kyagg", // 0xae
"kyags", // 0xaf
"kyan", // 0xb0
"kyanj", // 0xb1
"kyanh", // 0xb2
"kyad", // 0xb3
"kyal", // 0xb4
"kyalg", // 0xb5
"kyalm", // 0xb6
"kyalb", // 0xb7
"kyals", // 0xb8
"kyalt", // 0xb9
"kyalp", // 0xba
"kyalh", // 0xbb
"kyam", // 0xbc
"kyab", // 0xbd
"kyabs", // 0xbe
"kyas", // 0xbf
"kyass", // 0xc0
"kyang", // 0xc1
"kyaj", // 0xc2
"kyac", // 0xc3
"kyak", // 0xc4
"kyat", // 0xc5
"kyap", // 0xc6
"kyah", // 0xc7
"kyae", // 0xc8
"kyaeg", // 0xc9
"kyaegg", // 0xca
"kyaegs", // 0xcb
"kyaen", // 0xcc
"kyaenj", // 0xcd
"kyaenh", // 0xce
"kyaed", // 0xcf
"kyael", // 0xd0
"kyaelg", // 0xd1
"kyaelm", // 0xd2
"kyaelb", // 0xd3
"kyaels", // 0xd4
"kyaelt", // 0xd5
"kyaelp", // 0xd6
"kyaelh", // 0xd7
"kyaem", // 0xd8
"kyaeb", // 0xd9
"kyaebs", // 0xda
"kyaes", // 0xdb
"kyaess", // 0xdc
"kyaeng", // 0xdd
"kyaej", // 0xde
"kyaec", // 0xdf
"kyaek", // 0xe0
"kyaet", // 0xe1
"kyaep", // 0xe2
"kyaeh", // 0xe3
"keo", // 0xe4
"keog", // 0xe5
"keogg", // 0xe6
"keogs", // 0xe7
"keon", // 0xe8
"keonj", // 0xe9
"keonh", // 0xea
"keod", // 0xeb
"keol", // 0xec
"keolg", // 0xed
"keolm", // 0xee
"keolb", // 0xef
"keols", // 0xf0
"keolt", // 0xf1
"keolp", // 0xf2
"keolh", // 0xf3
"keom", // 0xf4
"keob", // 0xf5
"keobs", // 0xf6
"keos", // 0xf7
"keoss", // 0xf8
"keong", // 0xf9
"keoj", // 0xfa
"keoc", // 0xfb
"keok", // 0xfc
"keot", // 0xfd
"keop", // 0xfe
"keoh" // 0xff
};
}
| |
package com.pacificmetrics.orca.mbeans;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import org.apache.myfaces.custom.fileupload.UploadedFile;
import com.pacificmetrics.common.web.ManagerException;
import com.pacificmetrics.common.web.WebUtils;
import com.pacificmetrics.orca.ServerConfiguration;
import com.pacificmetrics.orca.ejb.ItemServices;
/**
* Managed bean to support Psychometrics starting page and Psychometrics Reports
* page
*
* @author amiliteev
* @modifier maumock
*
*/
@ManagedBean(name = "psychometrics")
@ViewScoped
public class PsychometricsManager extends AbstractManager {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger
.getLogger(PsychometricsManager.class.getName());
private static final String REPORTS = "REPORTS";
private static final String IMPORT = "IMPORT";
private static final String SETTINGS = "SETTINGS";
private String selectedPageName;
private String reportContent;
private boolean uploading;
private boolean displayingInvalidItems;
private UploadedFile uploadedFile;
private Collection<Long> itemIds;
private Collection<String> invalidItemIds;
private String renderFlag = "";
@EJB
private transient ItemServices itemServices;
public PsychometricsManager() {
}
public void setReportsPageSelected() {
this.selectedPageName = REPORTS;
}
public void setImportPageSelected() {
this.selectedPageName = IMPORT;
}
public void setSettingsPageSelected() {
this.selectedPageName = SETTINGS;
}
public boolean isReportsPageSelected() {
return REPORTS.equals(this.selectedPageName);
}
public boolean isImportPageSelected() {
return IMPORT.equals(this.selectedPageName);
}
public boolean isSettingsPageSelected() {
return SETTINGS.equals(this.selectedPageName);
}
public String getSelectedPageName() {
return this.selectedPageName;
}
public void setSelectedPageName(String selectedPageName) {
this.selectedPageName = selectedPageName;
}
@PostConstruct
public void load() {
renderFlag = getParameter("psychoRenderFlag");
}
public String getRenderFlag() {
return renderFlag;
}
public void setRenderFlag(String renderFlag) {
this.renderFlag = renderFlag;
}
/**
* Method returns URL to Jasper server that should be used to invoke
* psychometrics report; URL constructed using server properties:
* jasper.http.url, jasper.user.name, jasper.password,
* jasper.reports.directory If item ids have been set, they are included in
* respective parameter of the report
*
* @return
*/
public String getReportUrl() {
String jasperHttpUrl = ServerConfiguration
.getProperty("jasper.http.url");
String directory = ServerConfiguration
.getProperty("jasper.reports.directory");
String itemViewURL = ServerConfiguration.getProperty("item.view.url");
return jasperHttpUrl
+ "/flow.html?standAlone=true&_flowId=viewReportFlow&reportUnit=/Reports"
+ directory
+ "/Psychometrics1&decorate=no"
+
"&ItemViewURL="
+ WebUtils.encodeURL(itemViewURL)
+ (this.itemIds != null ? "&ItemIds="
+ this.itemIds.toString().replaceAll("\\[|\\]| ", "")
: "");
}
public String getReportContent() {
return this.reportContent;
}
public boolean isUploading() {
return this.uploading;
}
public void setUploading(boolean uploading) {
this.uploading = uploading;
}
public void initUpload() {
this.uploading = true;
}
public void cancelUpload() {
this.uploading = false;
}
/**
* Method should be invoked to process uploaded file: read item id(s)
* separated by comma/newline, check items' existence, populate itemIds and
* invalidItemIds
*
* @throws ManagerException
*/
public void upload() throws ManagerException {
LOGGER.info("Uploading file: " + this.uploadedFile);
if (this.uploadedFile != null) {
try {
String[] externalIds = new String(this.uploadedFile.getBytes())
.split("( |,|\\n|\\r\\n)+");
Map<Long, String> itemIdMap = this.itemServices
.getItemIds(Arrays.asList(externalIds));
this.itemIds = itemIdMap.keySet();
populateInvalidItemIds(externalIds, itemIdMap.values());
LOGGER.info("Uploaded " + this.itemIds + " item(s) from file "
+ this.uploadedFile.getName() + "; "
+ this.invalidItemIds.size() + " invalid item(s) found");
} catch (IOException e) {
throw new ManagerException(e);
}
} else {
LOGGER.warning("uploadedFile is null");
}
setUploading(false);
}
private void populateInvalidItemIds(String[] uploadedIds,
Collection<String> foundIds) {
Set<String> upperCasedItemIds = new HashSet<String>();
for (String itemId : foundIds) {
upperCasedItemIds.add(itemId.toUpperCase());
}
this.invalidItemIds = new ArrayList<String>(Arrays.asList(uploadedIds));
for (Iterator<String> ii = this.invalidItemIds.iterator(); ii.hasNext();) {
String itemId = ii.next();
if (upperCasedItemIds.contains(itemId.toUpperCase())) {
ii.remove();
}
}
}
public UploadedFile getUploadedFile() {
return this.uploadedFile;
}
public void setUploadedFile(UploadedFile uploadedFile) {
this.uploadedFile = uploadedFile;
}
public String getUploadedFileName() {
return this.uploadedFile != null ? this.uploadedFile.getName() : null;
}
public void clearUploaded() {
this.uploadedFile = null;
this.itemIds = null;
}
public void displayInvalidItems() {
this.displayingInvalidItems = true;
}
public void hideInvalidItems() {
this.displayingInvalidItems = false;
}
public Collection<Long> getItemIds() {
return this.itemIds;
}
public int getItemCount() {
return this.itemIds != null ? this.itemIds.size() : 0;
}
public int getInvalidItemCount() {
return this.invalidItemIds != null ? this.invalidItemIds.size() : 0;
}
public void setItemIds(List<Long> itemIds) {
this.itemIds = itemIds;
}
public boolean isDisplayingInvalidItems() {
return this.displayingInvalidItems;
}
public void setDisplayingInvalidItems(boolean displayingInvalidItems) {
this.displayingInvalidItems = displayingInvalidItems;
}
public Collection<String> getInvalidItemIds() {
return this.invalidItemIds;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.security.query;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.apache.geode.cache.RegionShortcut.PARTITION;
import static org.apache.geode.cache.RegionShortcut.REPLICATE;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.query.QueryService;
import org.apache.geode.security.query.data.QueryTestObject;
import org.apache.geode.test.junit.categories.OQLQueryTest;
import org.apache.geode.test.junit.categories.SecurityTest;
import org.apache.geode.test.junit.runners.CategoryWithParameterizedRunnerFactory;
@RunWith(Parameterized.class)
@Category({SecurityTest.class, OQLQueryTest.class})
@Parameterized.UseParametersRunnerFactory(CategoryWithParameterizedRunnerFactory.class)
public class QuerySecurityForbiddenQueriesDistributedTest
extends AbstractQuerySecurityDistributedTest {
@Parameterized.Parameters(name = "User:{0}, RegionType:{1}")
public static Object[] usersAndRegionTypes() {
return new Object[][] {
{"super-user", REPLICATE}, {"super-user", PARTITION},
{"dataReader", REPLICATE}, {"dataReader", PARTITION},
{"dataReaderRegion", REPLICATE}, {"dataReaderRegion", PARTITION},
{"dataReaderRegionKey", REPLICATE}, {"dataReaderRegionKey", PARTITION},
{"clusterManagerDataReader", REPLICATE}, {"clusterManagerDataReader", PARTITION},
{"clusterManagerDataReaderRegion", REPLICATE}, {"clusterManagerDataReaderRegion", PARTITION}
};
}
@Parameterized.Parameter
public String user;
@Parameterized.Parameter(1)
public RegionShortcut regionShortcut;
private String regexForExpectedExceptions =
".*Unauthorized access.*|.*dataReaderRegionKey not authorized for DATA:READ:region.*";
@Before
public void setUp() throws Exception {
super.setUp(user, regionShortcut);
keys = new Object[] {"key-0", "key-1"};
values = new Object[] {
new QueryTestObject(1, "John"),
new QueryTestObject(3, "Beth")
};
putIntoRegion(superUserClient, keys, values, regionName);
}
/* ----- Implicit Getter Tests ----- */
@Test
public void queryWithImplicitMethodInvocationOnWhereClauseShouldThrowSecurityException() {
String query = "SELECT * FROM " + SEPARATOR + regionName + " r WHERE r.name = 'Beth'";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
@Test
public void queryWithImplicitMethodInvocationOnSelectClauseShouldThrowSecurityException() {
String query = "SELECT r.name FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
@Test
public void queriesWithImplicitMethodInvocationUsedWithinAggregateFunctionsShouldThrowSecurityException() {
String queryCount = "SELECT COUNT(r.name) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryCount,
regexForExpectedExceptions);
String queryMax = "SELECT MAX(r.name) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryMax,
regexForExpectedExceptions);
String queryMin = "SELECT MIN(r.name) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryMin,
regexForExpectedExceptions);
String queryAvg = "SELECT AVG(r.name) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryAvg,
regexForExpectedExceptions);
String querySum = "SELECT SUM(r.name) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, querySum,
regexForExpectedExceptions);
}
@Test
public void queryWithImplicitMethodInvocationUsedWithinDistinctClauseShouldThrowSecurityException() {
String query = "<TRACE> SELECT DISTINCT * from " + SEPARATOR + regionName
+ " r WHERE r.name IN SET('John', 'Beth') ORDER BY r.id asc LIMIT 2";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
@Test
public void queryWithImplicitMethodInvocationOnInnerQueriesShouldThrowSecurityException() {
String query = "SELECT * FROM " + SEPARATOR + regionName
+ " r1 WHERE r1.name IN (SELECT r2.name FROM " + SEPARATOR
+ regionName + " r2)";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
/* ----- Direct Getter Tests ----- */
@Test
public void queryWithExplicitMethodInvocationOnWhereClauseShouldThrowSecurityException() {
String query = "SELECT * FROM " + SEPARATOR + regionName + " r WHERE r.getName = 'Beth'";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
@Test
public void queryWithExplicitMethodInvocationOnSelectClauseShouldThrowSecurityException() {
String query = "SELECT r.getName FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
@Test
public void queriesWithExplicitMethodInvocationUsedWithinAggregateFunctionsShouldThrowSecurityException() {
String queryCount = "SELECT COUNT(r.getName) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryCount,
regexForExpectedExceptions);
String queryMax = "SELECT MAX(r.getName) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryMax,
regexForExpectedExceptions);
String queryMin = "SELECT MIN(r.getName) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryMin,
regexForExpectedExceptions);
String queryAvg = "SELECT AVG(r.getName) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryAvg,
regexForExpectedExceptions);
String querySum = "SELECT SUM(r.getName) FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, querySum,
regexForExpectedExceptions);
}
@Test
public void queryWithExplicitMethodInvocationUsedWithinDistinctClauseShouldThrowSecurityException() {
String query = "<TRACE> SELECT DISTINCT * from " + SEPARATOR + regionName
+ " r WHERE r.getName IN SET('John', 'Beth') ORDER BY r.id asc LIMIT 2";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
@Test
public void queryWithExplicitMethodInvocationOnInnerQueriesShouldThrowSecurityException() {
String query = "SELECT * FROM " + SEPARATOR + regionName
+ " r1 WHERE r1.getName IN (SELECT r2.getName FROM " + SEPARATOR + regionName + " r2)";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query,
regexForExpectedExceptions);
}
/* ----- Region Methods ----- */
@Test
public void queriesWithAllowedRegionMethodInvocationsShouldThrowSecurityExceptionForNonAuthorizedUsers() {
Assume.assumeTrue(user.equals("dataReaderRegionKey"));
String queryValues = "SELECT * FROM " + SEPARATOR + regionName + ".values";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryValues,
regexForExpectedExceptions);
String queryKeySet = "SELECT * FROM " + SEPARATOR + regionName + ".keySet";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryKeySet,
regexForExpectedExceptions);
String queryContainsKey =
"SELECT * FROM " + SEPARATOR + regionName + ".containsKey('" + keys[0] + "')";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryContainsKey,
regexForExpectedExceptions);
String queryEntrySet = "SELECT * FROM " + SEPARATOR + regionName + ".get('" + keys[0] + "')";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryEntrySet,
regexForExpectedExceptions);
}
@Test
public void queriesWithRegionMutatorMethodInvocationsShouldThrowSecurityException() {
String queryCreate = "SELECT * FROM " + SEPARATOR + regionName + ".create('key2', 15)";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryCreate,
regexForExpectedExceptions);
assertRegionData(superUserClient, Arrays.asList(values));
String queryPut = "SELECT * FROM " + SEPARATOR + regionName + ".put('key-2', 'something')";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryPut,
regexForExpectedExceptions);
assertRegionData(superUserClient, Arrays.asList(values));
String queryRemove = "SELECT * FROM " + SEPARATOR + regionName + ".remove('key-0')";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryRemove,
regexForExpectedExceptions);
assertRegionData(superUserClient, Arrays.asList(values));
String queryDestroy = "SELECT * FROM " + SEPARATOR + regionName + ".destroyRegion()";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, queryDestroy,
regexForExpectedExceptions);
}
/* ----- Other Forbidden Methods ----- */
@Test
public void queryWithGetClassShouldThrowSecurityException() {
String query1 = "SELECT * FROM " + SEPARATOR + regionName + " r WHERE r.getClass != '1'";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query1,
regexForExpectedExceptions);
String query2 = "SELECT r.getClass FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query2,
regexForExpectedExceptions);
String query3 = "SELECT r.getClass() FROM " + SEPARATOR + regionName + " r";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query3,
regexForExpectedExceptions);
String query4 = "SELECT * FROM " + SEPARATOR + regionName + " r WHERE r.getClass != 'blah'";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query4,
regexForExpectedExceptions);
String query5 = "SELECT * FROM " + SEPARATOR + regionName + " r WHERE r.getClass() != '1'";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query5,
regexForExpectedExceptions);
String query6 = "SELECT * FROM " + SEPARATOR + regionName + " r WHERE r.Class != '1'";
executeQueryAndAssertThatNoAuthorizedExceptionWasThrown(specificUserClient, query6,
regexForExpectedExceptions);
}
@Test
public void queryWithExplicitNonExistingMethodInvocationShouldReturnUndefined() {
Assume.assumeFalse(user.equals("dataReaderRegionKey"));
String query = "SELECT r.getInterestListRegex() FROM " + SEPARATOR + regionName + " r";
List<Object> expectedResults = Arrays.asList(QueryService.UNDEFINED, QueryService.UNDEFINED);
executeQueryAndAssertExpectedResults(specificUserClient, query, expectedResults);
}
@Test
public void queryWithCloneMethodOnQRegionShouldReturnEmptyResult() {
Assume.assumeFalse(user.equals("dataReaderRegionKey"));
String query = "SELECT * FROM " + SEPARATOR + regionName + ".clone";
List<Object> expectedResults = Collections.emptyList();
executeQueryAndAssertExpectedResults(specificUserClient, query, expectedResults);
}
@Test
public void queryWithExplicitNonExistingMethodInvocationOnQRegionShouldReturnEmptyResult() {
Assume.assumeFalse(user.equals("dataReaderRegionKey"));
String query = "SELECT * FROM " + SEPARATOR + regionName + ".getKey('" + keys[0] + "')";
List<Object> expectedResults = Collections.emptyList();
executeQueryAndAssertExpectedResults(specificUserClient, query, expectedResults);
}
@Test
public void queryWithExplicitCreateMethodInvocationOnRegionShouldReturnUndefinedAndDoNotModifyRegion() {
Assume.assumeFalse(user.equals("dataReaderRegionKey"));
String query = "SELECT r.create('key2', 15) FROM " + SEPARATOR + regionName + " r";
List<Object> expectedResults = Arrays.asList(QueryService.UNDEFINED, QueryService.UNDEFINED);
executeQueryAndAssertExpectedResults(specificUserClient, query, expectedResults);
assertRegionData(specificUserClient, Arrays.asList(values));
}
@Test
public void queryWithExplicitMutatorMethodInvocationsOnRegionShouldReturnEmptyResultAndDoNotModifyRegion() {
Assume.assumeFalse(user.equals("dataReaderRegionKey"));
String queryDestroy =
"SELECT * FROM " + SEPARATOR + regionName + ".destroyKey('" + keys[0] + "')";
executeQueryAndAssertExpectedResults(specificUserClient, queryDestroy, Collections.emptyList());
assertRegionData(superUserClient, Arrays.asList(values));
String queryPutIfAbsent =
"SELECT * FROM " + SEPARATOR + regionName + ".putIfAbsent('key-2', 'something')";
executeQueryAndAssertExpectedResults(specificUserClient, queryPutIfAbsent,
Collections.emptyList());
assertRegionData(superUserClient, Arrays.asList(values));
String queryReplace =
"SELECT * FROM " + SEPARATOR + regionName + ".replace('key-0', 'something')";
executeQueryAndAssertExpectedResults(specificUserClient, queryReplace, Collections.emptyList());
assertRegionData(superUserClient, Arrays.asList(values));
}
}
| |
package com.radiofreederp.nodebbintegration;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import com.radiofreederp.nodebbintegration.bukkit.hooks.OnTimeHook;
import com.radiofreederp.nodebbintegration.bukkit.hooks.VanishNoPacketHook;
import com.radiofreederp.nodebbintegration.bukkit.hooks.VaultHook;
import com.radiofreederp.nodebbintegration.utils.Logger;
import org.bukkit.*;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.plugin.Plugin;
import org.bukkit.scoreboard.Objective;
import org.bukkit.scoreboard.Score;
import org.bukkit.scoreboard.Scoreboard;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.util.*;
import java.util.stream.Collectors;
public class BukkitServer extends MinecraftServerCommon {
private NodeBBIntegrationPlugin plugin;
public BukkitServer(NodeBBIntegrationPlugin plugin) {
this.plugin = plugin;
}
// Handle messaging.
@Override
public void sendMessage(Object receiver, String message) {
((CommandSender)receiver).sendMessage(translateColors("&f[&7n&9BB&7i&f] &r" + message));
}
@Override
public void sendConsoleMessage(String message) {
Logger.log(removeColors(message));
}
@Override
public void sendMessageToOps(String message) {
Bukkit.getOnlinePlayers().stream().filter(player->player.hasPermission("nodebb.admin")).forEach(op->sendMessage(op, "&d" + message));
}
// Handle color.
@Override
public String translateColors(String string) {
return ChatColor.translateAlternateColorCodes('&', string);
}
@Override
public String removeColors(String string) {
return ChatColor.stripColor(translateColors(string));
}
// Get TPS
// TODO: Replace hackery when a better method is found.
private static Object minecraftServer;
private static Field recentTps;
public String getTPS() {
try {
if (minecraftServer == null) {
Server server = Bukkit.getServer();
Field consoleField = server.getClass().getDeclaredField("console");
consoleField.setAccessible(true);
minecraftServer = consoleField.get(server);
}
if (recentTps == null) {
recentTps = minecraftServer.getClass().getSuperclass().getDeclaredField("recentTps");
recentTps.setAccessible(true);
}
return String.valueOf(((double[]) recentTps.get(minecraftServer))[0]);
} catch (IllegalAccessException | NoSuchFieldException ignored) {
}
return "0.000";
}
@Override
public ArrayList<JSONObject> getPlayerList() {
final ArrayList<JSONObject> playerList = new ArrayList<>();
for (Player player : Bukkit.getServer().getOnlinePlayers()) {
if (VanishNoPacketHook.isEnabled()) {
if (VanishNoPacketHook.isVanished(player.getName())) continue;
}
playerList.add(getPlayerJSON(player));
}
return playerList;
}
@Override
public JSONObject getPlayerJSON (Object _player) {
JSONObject playerObj = new JSONObject();
Player player = (Player)_player;
try {
playerObj.put("name", player.getName());
playerObj.put("displayName", player.getDisplayName());
playerObj.put("id", player.getUniqueId());
if (VaultHook.chat != null && VaultHook.permission != null) {
playerObj.put("primaryGroup", VaultHook.chat.getPrimaryGroup(player));
playerObj.put("prefix", VaultHook.chat.getPlayerPrefix(player));
playerObj.put("suffix", VaultHook.chat.getPlayerSuffix(player));
playerObj.put("groups", VaultHook.permission.getPlayerGroups(null, player));
}
if (OnTimeHook.isEnabled()) {
OnTimeHook.onTimeCheckTime(player, playerObj);
}
} catch (JSONException e) {
e.printStackTrace();
}
return playerObj;
}
@Override
public ArrayList<JSONObject> getPluginList() {
final ArrayList<JSONObject> pluginList = new ArrayList<>();
for (Plugin plugin : Bukkit.getServer().getPluginManager().getPlugins()) {
JSONObject pluginObj = new JSONObject();
try {
pluginObj.put("name", plugin.getName());
pluginObj.put("version", plugin.getDescription().getVersion());
pluginList.add(pluginObj);
} catch (JSONException e) {
e.printStackTrace();
}
}
return pluginList;
}
@Override
public String getVersion() {
return Bukkit.getVersion();
}
@Override
public String getServerName() {
return Bukkit.getServerName();
}
@Override
public String getServerIcon() {
String icon = "";
File file = new File("server-icon.png");
if (file.isFile()) {
try {
icon = "data:image/png;base64," + Base64.getEncoder().encodeToString(Files.toByteArray(file));
} catch (IOException e) {
e.printStackTrace();
}
}
return icon;
}
@Override
public String getWorldType() {
return Bukkit.getWorldType();
}
@Override
public String getWorldName() {
return Bukkit.getWorlds().get(0).getName();
}
@Override
public String getMotd() {
return Bukkit.getMotd();
}
@Override
public String getPlayerPrefix(Object _player) {
Player player = (Player)_player;
if (VaultHook.chat != null) {
return VaultHook.chat.getPlayerPrefix(player);
} else {
return null;
}
}
@Override
public int getOnlinePlayers() {
return Bukkit.getOnlinePlayers().size();
}
@Override
public int getMaxPlayers() {
return Bukkit.getMaxPlayers();
}
private JSONArray getPlayerGroups(OfflinePlayer player) {
JSONArray groups = new JSONArray();
if (VaultHook.permission == null) return groups;
Arrays.stream(VaultHook.permission.getPlayerGroups(Bukkit.getWorlds().get(0).getName(), player)).forEach(g -> {
try {
JSONObject group = new JSONObject();
group.put("name", g);
groups.put(group);
} catch (JSONException e) {
e.printStackTrace();
}
});
return groups;
}
@Override
public JSONObject getGroups() {
JSONObject groups = new JSONObject();
if (VaultHook.permission == null) return groups;
Arrays.stream(VaultHook.permission.getGroups()).forEach(g -> {
JSONObject group = new JSONObject();
try {
group.put("name", g);
group.put("members", new JSONArray());
groups.put(g, group);
} catch (JSONException e) {
e.printStackTrace();
}
});
return groups;
}
@Override
public JSONObject getGroupsWithMembers() {
JSONObject data = new JSONObject();
if (VaultHook.permission == null) return data;
if (!VaultHook.permission.hasGroupSupport()) return data;
JSONObject groupsObj = getGroups();
Arrays.stream(Bukkit.getOfflinePlayers()).forEach(p -> Arrays.stream(VaultHook.permission.getPlayerGroups(Bukkit.getWorlds().get(0).getName(), p)).forEach(g -> {
try {
JSONObject player = new JSONObject();
player.put("id", p.getUniqueId());
player.put("name", p.getName());
player.put("lastplayed", p.getLastPlayed());
groupsObj.getJSONObject(g).getJSONArray("members").put(player);
} catch (JSONException e) {
e.printStackTrace();
}
}));
try {
data.put("ranks", groupsObj.toJSONArray(groupsObj.names()));
} catch (JSONException e) {
e.printStackTrace();
}
return data;
}
@Override
public JSONObject getOfflinePlayers() {
JSONObject data = new JSONObject();
JSONArray players = new JSONArray();
Arrays.stream(Bukkit.getOfflinePlayers()).forEach(offlinePlayer -> {
JSONObject player = new JSONObject();
try {
player.put("id", offlinePlayer.getUniqueId());
player.put("name", offlinePlayer.getName());
player.put("lastPlayed", offlinePlayer.getLastPlayed());
player.put("groups", getPlayerGroups(offlinePlayer));
players.put(player);
} catch (JSONException e) {
e.printStackTrace();
}
});
try {
data.put("players", players);
} catch (JSONException e) {
e.printStackTrace();
}
return data;
}
@Override
public JSONArray getScoreboards() {
Set<OfflinePlayer> players = Bukkit.getScoreboardManager().getMainScoreboard().getPlayers();
Set<Objective> objectives = Bukkit.getScoreboardManager().getMainScoreboard().getObjectives();
JSONArray jObjectives = new JSONArray();
objectives.forEach(objective -> {
JSONObject jObjective = new JSONObject();
JSONArray jEntries = new JSONArray();
players.forEach(player -> {
if (objective.getScore(player) != null) {
JSONObject jEntry = new JSONObject();
try {
jEntry.put("id", player.getUniqueId().toString());
jEntry.put("name", player.getName());
jEntry.put("score", objective.getScore(player).getScore());
jEntries.put(jEntry);
} catch (JSONException e) {
e.printStackTrace();
}
}
});
try {
jObjective.put("name", objective.getName());
jObjective.put("displayname", objective.getDisplayName());
jObjective.put("criteria", objective.getCriteria());
jObjective.put("displayslot", objective.getDisplaySlot());
jObjective.put("entries", jEntries);
jObjectives.put(jObjective);
} catch (JSONException e) {
e.printStackTrace();
}
});
return jObjectives;
}
}
| |
package com.xero.api.client;
import org.junit.*;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
import com.xero.api.ApiClient;
import com.xero.models.accounting.*;
import org.threeten.bp.*;
import java.io.IOException;
import java.io.IOException;
import java.util.UUID;
public class AccountingApiReportsTest {
ApiClient defaultClient;
AccountingApi accountingApi;
String accessToken;
String xeroTenantId;
private static boolean setUpIsDone = false;
@Before
public void setUp() {
// Set Access Token and Tenant Id
accessToken = "123";
xeroTenantId = "xyz";
// Init AccountingApi client
defaultClient =
new ApiClient(
"https://xero-accounting.getsandbox.com:443/api.xro/2.0", null, null, null, null);
accountingApi = AccountingApi.getInstance(defaultClient);
// ADDED TO MANAGE RATE LIMITS while using SwaggerHub to mock APIs
if (setUpIsDone) {
return;
}
try {
System.out.println("Sleep for 60 seconds");
Thread.sleep(60);
} catch (InterruptedException e) {
System.out.println(e);
}
// do the setup
setUpIsDone = true;
}
public void tearDown() {
accountingApi = null;
defaultClient = null;
}
@Test
public void getReportAgedPayablesByContactTest() throws IOException {
System.out.println("@Test - getReportAgedPayablesByContact");
UUID contactId = UUID.fromString("8138a266-fb42-49b2-a104-014b7045753d");
LocalDate date = null;
LocalDate fromDate = null;
LocalDate toDate = null;
ReportWithRows response =
accountingApi.getReportAgedPayablesByContact(
accessToken, xeroTenantId, contactId, date, fromDate, toDate);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("AgedPayablesByContact")));
assertThat(
response.getReports().get(0).getReportName(), is(equalTo("Aged Payables By Contact")));
assertThat(response.getReports().get(0).getReportType(), is("AgedPayablesByContact"));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Invoices")));
assertThat(response.getReports().get(0).getReportTitles().get(1), is(equalTo("ABC")));
assertThat(
response.getReports().get(0).getReportTitles().get(2),
is(equalTo("From 10 October 2017 to 22 April 2019")));
assertThat(
response.getReports().get(0).getReportTitles().get(3),
is(equalTo("Showing payments to 22 April 2019")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("22 April 2019")));
assertThat(
response.getReports().get(0).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Date")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("2017-10-10T00:00:00")));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(1).getValue(),
is(equalTo("Opening Balance")));
assertThat(
response.getReports().get(0).getRows().get(2).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("2018-10-09T00:00:00")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(2)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getId(),
is(equalTo("invoiceID")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(2)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getValue()
.toString(),
is(equalTo("1f3960ae-0537-4438-a4dd-76d785e6d7d8")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SUMMARYROW)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getCells().get(0).getValue(),
is(equalTo("Total")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getCells().get(4).getValue(),
is(equalTo("250.00")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getCells().get(7).getValue(),
is(equalTo("250.00")));
// System.out.println(response.getReports().toString());
}
@Test
public void getReportAgedReceivablesByContactTest() throws IOException {
System.out.println("@Test - getReportAgedReceivablesByContact");
UUID contactId = UUID.fromString("8138a266-fb42-49b2-a104-014b7045753d");
LocalDate date = null;
LocalDate fromDate = null;
LocalDate toDate = null;
ReportWithRows response =
accountingApi.getReportAgedReceivablesByContact(
accessToken, xeroTenantId, contactId, date, fromDate, toDate);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("AgedReceivablesByContact")));
assertThat(
response.getReports().get(0).getReportName(), is(equalTo("Aged Receivables By Contact")));
assertThat(response.getReports().get(0).getReportType(), is("AgedReceivablesByContact"));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Invoices")));
assertThat(response.getReports().get(0).getReportTitles().get(1), is(equalTo("ABC")));
assertThat(
response.getReports().get(0).getReportTitles().get(2),
is(equalTo("From 10 October 2017 to 23 April 2019")));
assertThat(
response.getReports().get(0).getReportTitles().get(3),
is(equalTo("Showing payments to 23 April 2019")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("23 April 2019")));
assertThat(
response.getReports().get(0).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Date")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("2017-10-10T00:00:00")));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(1).getValue(),
is(equalTo("Opening Balance")));
assertThat(
response.getReports().get(0).getRows().get(2).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("2018-05-13T00:00:00")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(2)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getId(),
is(equalTo("invoiceID")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(2)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getValue()
.toString(),
is(equalTo("40ebad47-24e2-4dc9-a5f5-579df427671b")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getCells().get(0).getValue(),
is(equalTo("2019-04-23T00:00:00")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getCells().get(4).getValue(),
is(equalTo("50.00")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(1).getCells().get(7).getValue(),
is(equalTo("50.00")));
assertThat(
response.getReports().get(0).getRows().get(3).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(
response.getReports().get(0).getRows().get(3).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SUMMARYROW)));
assertThat(
response.getReports().get(0).getRows().get(3).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Closing Balance")));
assertThat(
response.getReports().get(0).getRows().get(3).getRows().get(0).getCells().get(4).getValue(),
is(equalTo("150.00")));
// System.out.println(response.getReports().get(0).toString());
}
@Test
public void getReportBalanceSheetTest() throws IOException {
System.out.println("@Test - getReportBalanceSheet");
String date = null;
Integer periods = null;
String timeframe = null;
String trackingOptionID1 = null;
String trackingOptionID2 = null;
Boolean standardLayout = null;
Boolean paymentsOnly = null;
ReportWithRows response =
accountingApi.getReportBalanceSheet(
accessToken,
xeroTenantId,
date,
periods,
timeframe,
trackingOptionID1,
trackingOptionID2,
standardLayout,
paymentsOnly);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("BalanceSheet")));
assertThat(response.getReports().get(0).getReportName(), is(equalTo("Balance Sheet")));
assertThat(response.getReports().get(0).getReportType(), is(equalTo("BalanceSheet")));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Balance Sheet")));
assertThat(
response.getReports().get(0).getReportTitles().get(1),
is(equalTo("Dev Evangelist - Sid Test 3 (NZ-2016-02)")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("12 April 2019")));
assertThat(
response.getReports().get(0).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(1).getValue(),
is(equalTo("30 Apr 2019")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(response.getReports().get(0).getRows().get(1).getTitle(), is(equalTo("Assets")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(response.getReports().get(0).getRows().get(2).getTitle(), is(equalTo("Bank")));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(
response.getReports().get(0).getRows().get(2).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Country Savings")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(2)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getId(),
is(equalTo("account")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(2)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getValue()
.toString(),
is(equalTo("041207d2-3d61-4e5d-8c1a-b9236955a71c")));
// System.out.println(response.getReports().toString());
}
@Test
public void getReportBankSummaryTest() throws IOException {
System.out.println("@Test - getReportBankSummary");
LocalDate toDate = null;
LocalDate fromDate = null;
ReportWithRows response =
accountingApi.getReportBankSummary(accessToken, xeroTenantId, fromDate, toDate);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("BankSummary")));
assertThat(response.getReports().get(0).getReportName(), is(equalTo("Bank Summary")));
assertThat(response.getReports().get(0).getReportType(), is("BankSummary"));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Bank Summary")));
assertThat(
response.getReports().get(0).getReportTitles().get(1),
is(equalTo("MindBody Test 10 (AU-2016-02)")));
assertThat(
response.getReports().get(0).getReportTitles().get(2),
is(equalTo("From 1 April 2019 to 30 April 2019")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("23 April 2019")));
assertThat(
response.getReports().get(0).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Bank Accounts")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(1).getValue(),
is(equalTo("Opening Balance")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(2).getValue(),
is(equalTo("Cash Received")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(3).getValue(),
is(equalTo("Cash Spent")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(4).getValue(),
is(equalTo("Closing Balance")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Big City Bank")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(1)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getId(),
is(equalTo("accountID")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(1)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getValue()
.toString(),
is(equalTo("03f9cf1e-2deb-4bf1-b0a8-b57f08672eb8")));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SUMMARYROW)));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(1).getCells().get(0).getValue(),
is(equalTo("Total")));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(1).getCells().get(4).getValue(),
is(equalTo("10.00")));
// System.out.println(response.getReports().get(0).toString());
}
@Test
public void getReportExecutiveSummaryTest() throws IOException {
System.out.println("@Test - getReportExecutiveSummary");
LocalDate date = null;
/*
ReportWithRows response = accountingApi.getReportExecutiveSummary(accessToken,xeroTenantId,date);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("ExecutiveSummary")));
assertThat(response.getReports().get(0).getReportName(), is(equalTo("Executive Summary")));
assertThat(response.getReports().get(0).getReportType(), is("ExecutiveSummary"));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Executive Summary")));
assertThat(response.getReports().get(0).getReportTitles().get(1), is(equalTo("Dev Evangelist - Sid Test 1 (US-2016-06)")));
assertThat(response.getReports().get(0).getReportTitles().get(2), is(equalTo("For the month of April 2019")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("24 April 2019")));
assertThat(response.getReports().get(0).getRows().get(0).getRowType(), is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(response.getReports().get(0).getRows().get(0).getCells().get(1).getValue(), is(equalTo("Apr 2019")));
assertThat(response.getReports().get(0).getRows().get(0).getCells().get(2).getValue(), is(equalTo("Mar 2019")));
assertThat(response.getReports().get(0).getRows().get(0).getCells().get(3).getValue(), is(equalTo("Variance")));
assertThat(response.getReports().get(0).getRows().get(1).getRowType(), is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(response.getReports().get(0).getRows().get(1).getTitle(), is("Cash"));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getRowType(), is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(), is(equalTo("Cash received")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(1).getValue(), is(equalTo("0.00")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(2).getValue(), is(equalTo("0.00")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(3).getValue(), is(equalTo("0.0%")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(1).getRowType(), is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(1).getCells().get(0).getValue(), is(equalTo("Cash spent")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(1).getCells().get(1).getValue(), is(equalTo("0.00")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(1).getCells().get(2).getValue(), is(equalTo("20.00")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(1).getCells().get(3).getValue(), is(equalTo("-100.0%")));
*/
// System.out.println(response.toString());
}
@Test
public void getReportTenNinetyNineTest() throws IOException {
System.out.println("@Test - getReportTenNinetyNine");
String reportYear = null;
Reports response = accountingApi.getReportTenNinetyNine(accessToken, xeroTenantId, reportYear);
assertThat(response.getReports().get(0).getReportName(), is(equalTo("1099 report")));
assertThat(
response.getReports().get(0).getReportDate(), is(equalTo("1 Jan 2016 to 31 Dec 2016")));
assertThat(response.getReports().get(0).getContacts().get(0).getBox3(), is(equalTo(1000.00)));
assertThat(
response.getReports().get(0).getContacts().get(0).getName(), is(equalTo("Bank West")));
assertThat(
response.getReports().get(0).getContacts().get(0).getFederalTaxIDType(),
is(equalTo("SSN")));
assertThat(
response.getReports().get(0).getContacts().get(0).getCity(), is(equalTo("Pinehaven")));
assertThat(response.getReports().get(0).getContacts().get(0).getZip(), is(equalTo("12345")));
assertThat(response.getReports().get(0).getContacts().get(0).getState(), is(equalTo("CA")));
assertThat(
response.getReports().get(0).getContacts().get(0).getEmail(),
is(equalTo("jack@bowest.com")));
assertThat(
response.getReports().get(0).getContacts().get(0).getTaxID(), is(equalTo("234-22-2223")));
assertThat(
response.getReports().get(0).getContacts().get(0).getContactId(),
is(equalTo(UUID.fromString("81d5706a-8057-4338-8511-747cd85f4c68"))));
assertThat(response.getReports().get(0).getContacts().get(2).getBox1(), is(equalTo(5543.75)));
// System.out.println(response.getReports().toString());
}
@Test
public void getReportTrialBalanceTest() throws IOException {
System.out.println("@Test - getReportTrialBalance");
LocalDate date = null;
Boolean paymentsOnly = null;
ReportWithRows response =
accountingApi.getReportTrialBalance(accessToken, xeroTenantId, date, paymentsOnly);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("TrialBalance")));
assertThat(response.getReports().get(0).getReportName(), is(equalTo("Trial Balance")));
assertThat(response.getReports().get(0).getReportType(), is("TrialBalance"));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Trial Balance")));
assertThat(
response.getReports().get(0).getReportTitles().get(1),
is(equalTo("Dev Evangelist - Sid Test 1 (US-2016-06)")));
assertThat(
response.getReports().get(0).getReportTitles().get(2), is(equalTo("As at 24 April 2019")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("24 April 2019")));
assertThat(
response.getReports().get(0).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Account")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(1).getValue(),
is(equalTo("Debit")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(2).getValue(),
is(equalTo("Credit")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(3).getValue(),
is(equalTo("YTD Debit")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(4).getValue(),
is(equalTo("YTD Credit")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(response.getReports().get(0).getRows().get(1).getTitle(), is("Revenue"));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(
response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Big Expense (002)")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(1)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getId(),
is(equalTo("account")));
assertThat(
response
.getReports()
.get(0)
.getRows()
.get(1)
.getRows()
.get(0)
.getCells()
.get(0)
.getAttributes()
.get(0)
.getValue()
.toString(),
is(equalTo("da962997-a8bd-4dff-9616-01cdc199283f")));
// System.out.println(response.getReports().get(0).toString());
}
@Test
public void getReportBudgetSummaryTest() throws IOException {
System.out.println("@Test - getReportBudgetSummary");
LocalDate date = null;
Integer period = null;
Integer timeframe = null;
ReportWithRows response =
accountingApi.getReportBudgetSummary(accessToken, xeroTenantId, date, period, timeframe);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("BudgetSummary")));
assertThat(response.getReports().get(0).getReportName(), is(equalTo("Budget Summary")));
assertThat(response.getReports().get(0).getReportType(), is("BudgetSummary"));
assertThat(
response.getReports().get(0).getReportTitles().get(0), is(equalTo("Overall Budget")));
assertThat(
response.getReports().get(0).getReportTitles().get(1), is(equalTo("Budget Summary")));
assertThat(
response.getReports().get(0).getReportTitles().get(2),
is(equalTo("Mind Body Online Test 11")));
assertThat(
response.getReports().get(0).getReportTitles().get(3),
is(equalTo("November 2019 to October 2022")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("14 November 2019")));
assertThat(
response.getReports().get(0).getUpdatedDateUTCAsDate(),
is(equalTo(OffsetDateTime.parse("2019-11-14T18:10:37.865Z"))));
assertThat(
response.getReports().get(0).getRows().get(0).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(0).getValue(),
is(equalTo("Account")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(1).getValue(),
is(equalTo("Jan-20")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(2).getValue(),
is(equalTo("Apr-20")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(3).getValue(),
is(equalTo("Jul-20")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(4).getValue(),
is(equalTo("Oct-20")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(5).getValue(),
is(equalTo("Jan-21")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(6).getValue(),
is(equalTo("Apr-21")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(7).getValue(),
is(equalTo("Jul-21")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(8).getValue(),
is(equalTo("Oct-21")));
assertThat(
response.getReports().get(0).getRows().get(0).getCells().get(9).getValue(),
is(equalTo("Jan-22")));
assertThat(
response.getReports().get(0).getRows().get(1).getRowType(),
is(equalTo(com.xero.models.accounting.RowType.SECTION)));
/*
assertThat(response.getReports().get(0).getRows().get(1).getTitle(), is("Income"));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getRowType(), is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(), is(equalTo("Big Expense")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getAttributes().get(0).getId(), is(equalTo("account")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getAttributes().get(0).getValue().toString(), is(equalTo("da962997-a8bd-4dff-9616-01cdc199283f")));
*/
// System.out.println(response.toString());
}
@Test
public void getReportProfitAndLossTest() throws IOException {
System.out.println("@Test - getReportProfitAndLoss");
LocalDate fromDate = null;
LocalDate toDate = null;
String timeframe = "MONTH";
Boolean standardLayout = true;
Boolean paymentsOnly = false;
Integer periods = null;
String trackingCategoryID = null;
String trackingCategoryID2 = null;
String trackingOptionID = null;
String trackingOptionID2 = null;
/*
ReportWithRows response = accountingApi.getReportProfitAndLoss(accessToken,xeroTenantId,fromDate, toDate, periods, timeframe, trackingCategoryID, trackingCategoryID2, trackingOptionID, trackingOptionID2, standardLayout, paymentsOnly);
assertThat(response.getReports().get(0).getReportID(), is(equalTo("ProfitAndLoss")));
assertThat(response.getReports().get(0).getReportName(), is(equalTo("Profit and Loss")));
assertThat(response.getReports().get(0).getReportType(), is("ProfitAndLoss"));
assertThat(response.getReports().get(0).getReportTitles().get(0), is(equalTo("Income Statement")));
assertThat(response.getReports().get(0).getReportTitles().get(1), is(equalTo("Dev Evangelist - Sid Test 1 (US-2016-06)")));
assertThat(response.getReports().get(0).getReportTitles().get(2), is(equalTo("10 October 2018 to 24 April 2019")));
assertThat(response.getReports().get(0).getReportDate(), is(equalTo("24 April 2019")));
assertThat(response.getReports().get(0).getRows().get(0).getRowType(), is(equalTo(com.xero.models.accounting.RowType.HEADER)));
assertThat(response.getReports().get(0).getRows().get(0).getCells().get(1).getValue(), is(equalTo("24 Apr 19")));
assertThat(response.getReports().get(0).getRows().get(1).getRowType(), is(equalTo(com.xero.models.accounting.RowType.SECTION)));
assertThat(response.getReports().get(0).getRows().get(1).getTitle(), is("Revenue"));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getRowType(), is(equalTo(com.xero.models.accounting.RowType.ROW)));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getValue(), is(equalTo("Big Expense")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getAttributes().get(0).getId(), is(equalTo("account")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(0).getAttributes().get(0).getValue().toString(), is(equalTo("da962997-a8bd-4dff-9616-01cdc199283f")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(1).getValue(), is(equalTo("480.00")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(1).getAttributes().get(0).getId(), is(equalTo("account")));
assertThat(response.getReports().get(0).getRows().get(1).getRows().get(0).getCells().get(1).getAttributes().get(0).getValue().toString(), is(equalTo("da962997-a8bd-4dff-9616-01cdc199283f")));
*/
// System.out.println(response.toString());
}
@Test
public void getReportBASorGSTTest() throws IOException {
System.out.println("@Test - getReportBASorGST - not implemented");
String reportID = null;
// ReportWithRows response = accountingApi.getReportBASorGST(reportID);
// TODO: test validations
// System.out.println(response.getReports().get(0).toString());
}
@Test
public void getReportBASorGSTListTest() throws IOException {
System.out.println("@Test - getReportBASorGSTList - not implemented");
// ReportWithRows response = accountingApi.getReportBASorGSTList();
// TODO: test validations
// System.out.println(response.getReports().get(0).toString());
}
}
| |
package com.thinkbiganalytics.feedmgr.nifi;
/*-
* #%L
* thinkbig-feed-manager-controller
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.db.PoolingDataSourceService;
import com.thinkbiganalytics.discovery.schema.TableSchema;
import com.thinkbiganalytics.jdbc.util.DatabaseType;
import com.thinkbiganalytics.kerberos.KerberosTicketConfiguration;
import com.thinkbiganalytics.metadata.rest.model.data.JdbcDatasource;
import com.thinkbiganalytics.schema.DBSchemaParser;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.web.api.dto.ControllerServiceDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.sql.DataSource;
/**
* Allow Kylo to use a NiFi database pool connection to display database metadata with tables and columns.
*/
@Service
public class DBCPConnectionPoolTableInfo {
private static final Logger log = LoggerFactory.getLogger(DBCPConnectionPoolTableInfo.class);
@Autowired
private NifiControllerServiceProperties nifiControllerServiceProperties;
@Inject
@Qualifier("kerberosHiveConfiguration")
private KerberosTicketConfiguration kerberosHiveConfiguration;
/**
* Returns a list of table names matching a pattern
*
* @param serviceId a NiFi controller service id
* @param serviceName a NiFi controller service name
* @param schema A schema pattern to look for
* @param tableName A table pattern to look for
* @return a list of schema.table names matching the pattern for the database
*/
public List<String> getTableNamesForControllerService(String serviceId, String serviceName, String schema, String tableName) {
ControllerServiceDTO controllerService = getControllerService(serviceId, serviceName);
if (controllerService != null) {
DescribeTableWithControllerServiceBuilder builder = new DescribeTableWithControllerServiceBuilder(controllerService);
DescribeTableWithControllerService serviceProperties = builder.schemaName(schema).tableName(tableName).build();
return getTableNamesForControllerService(serviceProperties);
} else {
log.error("Cannot getTable Names for Controller Service. Unable to obtain Controller Service for serviceId or Name ({} , {})", serviceId, serviceName);
}
return null;
}
/**
* Returns a list of table names for the specified data source.
*
* @param datasource the data source
* @param schema the schema name, or {@code null} for all schemas
* @param tableName a table pattern to look for
* @return a list of schema.table names, or {@code null} if not accessible
*/
@Nullable
public List<String> getTableNamesForDatasource(@Nonnull final JdbcDatasource datasource, @Nullable final String schema, @Nullable final String tableName) {
final Optional<ControllerServiceDTO> controllerService = Optional.ofNullable(datasource.getControllerServiceId())
.map(id -> getControllerService(id, null));
if (controllerService.isPresent()) {
final DescribeTableWithControllerServiceBuilder builder = new DescribeTableWithControllerServiceBuilder(controllerService.get());
final DescribeTableWithControllerService serviceProperties = builder.schemaName(schema).tableName(tableName).password(datasource.getPassword()).useEnvironmentProperties(false).build();
return getTableNamesForControllerService(serviceProperties);
} else {
log.error("Cannot get table names for data source: {}", datasource);
return null;
}
}
/**
* Describe the database table and fields available for a given NiFi controller service
*
* @param serviceId a NiFi controller service id
* @param serviceName a NiFi controller service name
* @param schema A schema to look for
* @param tableName A table to look for
* @return the database table and fields
*/
public TableSchema describeTableForControllerService(String serviceId, String serviceName, String schema, String tableName) {
ControllerServiceDTO controllerService = getControllerService(serviceId, serviceName);
if (controllerService != null) {
DescribeTableWithControllerServiceBuilder builder = new DescribeTableWithControllerServiceBuilder(controllerService);
DescribeTableWithControllerService serviceProperties = builder.schemaName(schema).tableName(tableName).build();
return describeTableForControllerService(serviceProperties);
} else {
log.error("Cannot describe Table for Controller Service. Unable to obtain Controller Service for serviceId or Name ({} , {})", serviceId, serviceName);
}
return null;
}
/**
* Describes the specified database table accessed through the specified data source.
*
* @param datasource the data source
* @param schema the schema name, or {@code null} to search all schemas
* @param tableName the table name
* @return the database table and fields, or {@code null} if not found
*/
public TableSchema describeTableForDatasource(@Nonnull final JdbcDatasource datasource, @Nullable final String schema, @Nonnull final String tableName) {
final Optional<ControllerServiceDTO> controllerService = Optional.ofNullable(datasource.getControllerServiceId())
.map(id -> getControllerService(id, null));
if (controllerService.isPresent()) {
final DescribeTableWithControllerServiceBuilder builder = new DescribeTableWithControllerServiceBuilder(controllerService.get());
final DescribeTableWithControllerService serviceProperties = builder.schemaName(schema).tableName(tableName).password(datasource.getPassword()).useEnvironmentProperties(false).build();
return describeTableForControllerService(serviceProperties);
} else {
log.error("Cannot describe table for data source: {}", datasource);
return null;
}
}
/**
* Return a list of schema.table_name
*
* @param serviceProperties properties describing where and what to look for
* @return a list of schema.table_name
*/
private List<String> getTableNamesForControllerService(DescribeTableWithControllerService serviceProperties) {
if (serviceProperties != null) {
Map<String, String> properties = serviceProperties.useEnvironmentProperties()
? nifiControllerServiceProperties.mergeNifiAndEnvProperties(serviceProperties.getControllerServiceDTO().getProperties(),
serviceProperties.getControllerServiceName())
: serviceProperties.getControllerServiceDTO().getProperties();
PoolingDataSourceService.DataSourceProperties dataSourceProperties = getDataSourceProperties(properties, serviceProperties);
if (StringUtils.isNotBlank(dataSourceProperties.getPassword()) && dataSourceProperties.getPassword().startsWith("**")) {
String propertyKey = nifiControllerServiceProperties.getEnvironmentControllerServicePropertyPrefix(serviceProperties.getControllerServiceName()) + ".password";
String example = propertyKey + "=PASSWORD";
log.error("Unable to connect to Controller Service {}, {}. You need to specifiy a configuration property as {} with the password for user: {}. ",
serviceProperties.getControllerServiceName(), serviceProperties.getControllerServiceId(), example, dataSourceProperties.getUser());
}
log.info("Search For Tables against Controller Service: {} ({}) with uri of {}. ", serviceProperties.getControllerServiceName(), serviceProperties.getControllerServiceId(),
dataSourceProperties.getUrl());
DataSource dataSource = PoolingDataSourceService.getDataSource(dataSourceProperties);
DBSchemaParser schemaParser = new DBSchemaParser(dataSource, kerberosHiveConfiguration);
return schemaParser.listTables(serviceProperties.getSchemaName(), serviceProperties.getTableName());
}
return null;
}
/**
* get the validation query from the db name that is parsed from the
*/
private String parseValidationQueryFromConnectionString(String connectionString) {
String validationQuery = null;
try {
DatabaseType databaseType = DatabaseType.fromJdbcConnectionString(connectionString);
validationQuery = databaseType.getValidationQuery();
} catch (IllegalArgumentException e) {
//if we cant find it in the map its ok.
}
return validationQuery;
}
private TableSchema describeTableForControllerService(DescribeTableWithControllerService serviceProperties) {
String type = serviceProperties.getControllerServiceType();
if (serviceProperties.getControllerServiceType() != null && serviceProperties.getControllerServiceType().equalsIgnoreCase(type)) {
Map<String, String> properties = serviceProperties.useEnvironmentProperties()
? nifiControllerServiceProperties.mergeNifiAndEnvProperties(serviceProperties.getControllerServiceDTO().getProperties(),
serviceProperties.getControllerServiceName())
: serviceProperties.getControllerServiceDTO().getProperties();
PoolingDataSourceService.DataSourceProperties dataSourceProperties = getDataSourceProperties(properties, serviceProperties);
log.info("describing Table {}.{} against Controller Service: {} ({}) with uri of {} ", serviceProperties.getSchemaName(), serviceProperties.getTableName(),
serviceProperties.getControllerServiceName(), serviceProperties.getControllerServiceId(), dataSourceProperties.getUrl());
DataSource dataSource = PoolingDataSourceService.getDataSource(dataSourceProperties);
DBSchemaParser schemaParser = new DBSchemaParser(dataSource, kerberosHiveConfiguration);
return schemaParser.describeTable(serviceProperties.getSchemaName(), serviceProperties.getTableName());
}
return null;
}
private ControllerServiceDTO getControllerService(String serviceId, String serviceName) {
ControllerServiceDTO controllerService = nifiControllerServiceProperties.getControllerServiceById(serviceId);
if (controllerService == null) {
controllerService = nifiControllerServiceProperties.getControllerServiceByName(serviceName);
}
return controllerService;
}
public PoolingDataSourceService.DataSourceProperties getDataSourceProperties(Map<String, String> properties, DescribeTableWithControllerService serviceProperties) {
String uri = properties.get(serviceProperties.getConnectionStringPropertyKey());
String user = properties.get(serviceProperties.getUserNamePropertyKey());
String password = (serviceProperties.getPassword() != null) ? serviceProperties.getPassword() : properties.get(serviceProperties.getPasswordPropertyKey());
String driverClassName = properties.get(serviceProperties.getDriverClassNamePropertyKey());
if (StringUtils.isBlank(driverClassName)) {
driverClassName = nifiControllerServiceProperties.getEnvironmentPropertyValueForControllerService(serviceProperties.getControllerServiceName(), "database_driver_class_name");
}
if (StringUtils.isBlank(password)) {
password = nifiControllerServiceProperties.getEnvironmentPropertyValueForControllerService(serviceProperties.getControllerServiceName(), "password");
}
String validationQuery = nifiControllerServiceProperties.getEnvironmentPropertyValueForControllerService(serviceProperties.getControllerServiceName(), "validationQuery");
if (StringUtils.isBlank(validationQuery)) {
//attempt to get it from parsing the connection string
validationQuery = parseValidationQueryFromConnectionString(uri);
}
boolean testOnBorrow = StringUtils.isNotBlank(validationQuery);
return new PoolingDataSourceService.DataSourceProperties(user, password, uri, driverClassName, testOnBorrow, validationQuery);
}
private static class DescribeTableWithControllerServiceBuilder {
private String connectionStringPropertyKey;
private String userNamePropertyKey;
private String passwordPropertyKey;
private String driverClassNamePropertyKey;
private String controllerServiceType;
private String controllerServiceName;
private String controllerServiceId;
private String tableName;
private String schemaName;
private ControllerServiceDTO controllerServiceDTO;
private String password;
private boolean useEnvironmentProperties = true;
public DescribeTableWithControllerServiceBuilder(ControllerServiceDTO controllerServiceDTO) {
this.controllerServiceDTO = controllerServiceDTO;
this.controllerServiceType = controllerServiceDTO != null ? controllerServiceDTO.getType() : null;
this.controllerServiceId = controllerServiceDTO != null ? controllerServiceDTO.getId() : null;
this.controllerServiceName = controllerServiceDTO != null ? controllerServiceDTO.getName() : null;
initializePropertiesFromControllerServiceType();
}
public DescribeTableWithControllerServiceBuilder connectionStringPropertyKey(String connectionStringPropertyKey) {
this.connectionStringPropertyKey = connectionStringPropertyKey;
return this;
}
public DescribeTableWithControllerServiceBuilder userNamePropertyKey(String userNamePropertyKey) {
this.userNamePropertyKey = userNamePropertyKey;
return this;
}
public DescribeTableWithControllerServiceBuilder passwordPropertyKey(String passwordPropertyKey) {
this.passwordPropertyKey = passwordPropertyKey;
return this;
}
public DescribeTableWithControllerServiceBuilder driverClassNamePropertyKey(String driverClassNamePropertyKey) {
this.driverClassNamePropertyKey = driverClassNamePropertyKey;
return this;
}
public DescribeTableWithControllerServiceBuilder controllerServiceType(String controllerServiceType) {
this.controllerServiceType = controllerServiceType;
return this;
}
private void initializePropertiesFromControllerServiceType() {
if ("org.apache.nifi.dbcp.DBCPConnectionPool".equalsIgnoreCase(controllerServiceType)) {
this.connectionStringPropertyKey = "Database Connection URL";
this.userNamePropertyKey = "Database User";
this.passwordPropertyKey = "Password";
this.driverClassNamePropertyKey = "Database Driver Class Name";
} else if ("com.thinkbiganalytics.nifi.v2.sqoop.StandardSqoopConnectionService".equalsIgnoreCase(controllerServiceType)) {
this.connectionStringPropertyKey = "Source Connection String";
this.userNamePropertyKey = "Source User Name";
this.passwordPropertyKey = "Password"; // users will need to add this as a different property to the application.properties file
}
}
public DescribeTableWithControllerServiceBuilder controllerService(ControllerServiceDTO controllerServiceDTO) {
this.controllerServiceDTO = controllerServiceDTO;
return this;
}
public DescribeTableWithControllerServiceBuilder controllerServiceName(String controllerServiceName) {
this.controllerServiceName = controllerServiceName;
return this;
}
public DescribeTableWithControllerServiceBuilder controllerServiceId(String controllerServiceId) {
this.controllerServiceId = controllerServiceId;
return this;
}
public DescribeTableWithControllerServiceBuilder tableName(String tableName) {
this.tableName = tableName;
return this;
}
public DescribeTableWithControllerServiceBuilder schemaName(String schemaName) {
this.schemaName = schemaName;
return this;
}
public DescribeTableWithControllerServiceBuilder password(String password) {
this.password = password;
return this;
}
public DescribeTableWithControllerServiceBuilder useEnvironmentProperties(boolean useEnvironmentProperties) {
this.useEnvironmentProperties = useEnvironmentProperties;
return this;
}
public DescribeTableWithControllerService build() {
DescribeTableWithControllerService serviceProperties = new DescribeTableWithControllerService();
serviceProperties.setConnectionStringPropertyKey(this.connectionStringPropertyKey);
serviceProperties.setControllerServiceName(this.controllerServiceName);
serviceProperties.setControllerServiceId(this.controllerServiceId);
serviceProperties.setControllerServiceType(this.controllerServiceType);
serviceProperties.setSchemaName(schemaName);
serviceProperties.setTableName(tableName);
serviceProperties.setUserNamePropertyKey(userNamePropertyKey);
serviceProperties.setPasswordPropertyKey(passwordPropertyKey);
serviceProperties.setControllerServiceDTO(this.controllerServiceDTO);
serviceProperties.setDriverClassNamePropertyKey(this.driverClassNamePropertyKey);
serviceProperties.setPassword(password);
serviceProperties.setUseEnvironmentProperties(useEnvironmentProperties);
return serviceProperties;
}
}
public static class DescribeTableWithControllerService {
private String connectionStringPropertyKey;
private String userNamePropertyKey;
private String passwordPropertyKey;
private String driverClassNamePropertyKey;
private String controllerServiceType;
private String controllerServiceName;
private String controllerServiceId;
private String tableName;
private String schemaName;
private ControllerServiceDTO controllerServiceDTO;
private String password;
private boolean useEnvironmentProperties;
public String getConnectionStringPropertyKey() {
return connectionStringPropertyKey;
}
public void setConnectionStringPropertyKey(String connectionStringPropertyKey) {
this.connectionStringPropertyKey = connectionStringPropertyKey;
}
public String getUserNamePropertyKey() {
return userNamePropertyKey;
}
public void setUserNamePropertyKey(String userNamePropertyKey) {
this.userNamePropertyKey = userNamePropertyKey;
}
public String getPasswordPropertyKey() {
return passwordPropertyKey;
}
public void setPasswordPropertyKey(String passwordPropertyKey) {
this.passwordPropertyKey = passwordPropertyKey;
}
public String getDriverClassNamePropertyKey() {
return driverClassNamePropertyKey;
}
public void setDriverClassNamePropertyKey(String driverClassNamePropertyKey) {
this.driverClassNamePropertyKey = driverClassNamePropertyKey;
}
public String getControllerServiceType() {
return controllerServiceType;
}
public void setControllerServiceType(String controllerServiceType) {
this.controllerServiceType = controllerServiceType;
}
public String getControllerServiceName() {
return controllerServiceName;
}
public void setControllerServiceName(String controllerServiceName) {
this.controllerServiceName = controllerServiceName;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getSchemaName() {
return schemaName;
}
public void setSchemaName(String schemaName) {
this.schemaName = schemaName;
}
public String getControllerServiceId() {
return controllerServiceId;
}
public void setControllerServiceId(String controllerServiceId) {
this.controllerServiceId = controllerServiceId;
}
public ControllerServiceDTO getControllerServiceDTO() {
return controllerServiceDTO;
}
public void setControllerServiceDTO(ControllerServiceDTO controllerServiceDTO) {
this.controllerServiceDTO = controllerServiceDTO;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public boolean useEnvironmentProperties() {
return useEnvironmentProperties;
}
public void setUseEnvironmentProperties(boolean useEnvironmentProperties) {
this.useEnvironmentProperties = useEnvironmentProperties;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DescribeTableWithControllerService that = (DescribeTableWithControllerService) o;
if (connectionStringPropertyKey != null ? !connectionStringPropertyKey.equals(that.connectionStringPropertyKey) : that.connectionStringPropertyKey != null) {
return false;
}
if (controllerServiceName != null ? !controllerServiceName.equals(that.controllerServiceName) : that.controllerServiceName != null) {
return false;
}
if (controllerServiceId != null ? !controllerServiceId.equals(that.controllerServiceId) : that.controllerServiceId != null) {
return false;
}
return schemaName != null ? schemaName.equals(that.schemaName) : that.schemaName == null;
}
@Override
public int hashCode() {
int result = connectionStringPropertyKey != null ? connectionStringPropertyKey.hashCode() : 0;
result = 31 * result + (controllerServiceName != null ? controllerServiceName.hashCode() : 0);
result = 31 * result + (controllerServiceId != null ? controllerServiceId.hashCode() : 0);
result = 31 * result + (schemaName != null ? schemaName.hashCode() : 0);
return result;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.elasticsearch.painless.Definition.Method;
import org.elasticsearch.painless.Definition.RuntimeClass;
import java.lang.invoke.CallSite;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodHandles.Lookup;
import java.lang.invoke.MethodType;
import java.util.BitSet;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Support for dynamic type (def).
* <p>
* Dynamic types can invoke methods, load/store fields, and be passed as parameters to operators without
* compile-time type information.
* <p>
* Dynamic methods, loads, stores, and array/list/map load/stores involve locating the appropriate field
* or method depending on the receiver's class. For these, we emit an {@code invokedynamic} instruction that,
* for each new type encountered will query a corresponding {@code lookupXXX} method to retrieve the appropriate
* method. In most cases, the {@code lookupXXX} methods here will only be called once for a given call site, because
* caching ({@link DefBootstrap}) generally works: usually all objects at any call site will be consistently
* the same type (or just a few types). In extreme cases, if there is type explosion, they may be called every
* single time, but simplicity is still more valuable than performance in this code.
*/
public final class Def {
// TODO: Once Java has a factory for those in java.lang.invoke.MethodHandles, use it:
/** Helper class for isolating MethodHandles and methods to get the length of arrays
* (to emulate a "arraystore" bytecode using MethodHandles).
* See: https://bugs.openjdk.java.net/browse/JDK-8156915
*/
@SuppressWarnings("unused") // getArrayLength() methods are are actually used, javac just does not know :)
private static final class ArrayLengthHelper {
private static final Lookup PRIV_LOOKUP = MethodHandles.lookup();
private static final Map<Class<?>,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap(
Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class,
char[].class, float[].class, double[].class, Object[].class)
.collect(Collectors.toMap(Function.identity(), type -> {
try {
return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}))
);
private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class);
static int getArrayLength(final boolean[] array) { return array.length; }
static int getArrayLength(final byte[] array) { return array.length; }
static int getArrayLength(final short[] array) { return array.length; }
static int getArrayLength(final int[] array) { return array.length; }
static int getArrayLength(final long[] array) { return array.length; }
static int getArrayLength(final char[] array) { return array.length; }
static int getArrayLength(final float[] array) { return array.length; }
static int getArrayLength(final double[] array) { return array.length; }
static int getArrayLength(final Object[] array) { return array.length; }
static MethodHandle arrayLengthGetter(Class<?> arrayType) {
if (!arrayType.isArray()) {
throw new IllegalArgumentException("type must be an array");
}
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
ARRAY_TYPE_MH_MAPPING.get(arrayType) :
OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType));
}
private ArrayLengthHelper() {}
}
/** pointer to Map.get(Object) */
private static final MethodHandle MAP_GET;
/** pointer to Map.put(Object,Object) */
private static final MethodHandle MAP_PUT;
/** pointer to List.get(int) */
private static final MethodHandle LIST_GET;
/** pointer to List.set(int,Object) */
private static final MethodHandle LIST_SET;
/** pointer to Iterable.iterator() */
private static final MethodHandle ITERATOR;
/** pointer to {@link Def#mapIndexNormalize}. */
private static final MethodHandle MAP_INDEX_NORMALIZE;
/** pointer to {@link Def#listIndexNormalize}. */
private static final MethodHandle LIST_INDEX_NORMALIZE;
/** factory for arraylength MethodHandle (intrinsic) from Java 9 (pkg-private for tests) */
static final MethodHandle JAVA9_ARRAY_LENGTH_MH_FACTORY;
static {
final Lookup lookup = MethodHandles.publicLookup();
try {
MAP_GET = lookup.findVirtual(Map.class , "get", MethodType.methodType(Object.class, Object.class));
MAP_PUT = lookup.findVirtual(Map.class , "put", MethodType.methodType(Object.class, Object.class, Object.class));
LIST_GET = lookup.findVirtual(List.class, "get", MethodType.methodType(Object.class, int.class));
LIST_SET = lookup.findVirtual(List.class, "set", MethodType.methodType(Object.class, int.class, Object.class));
ITERATOR = lookup.findVirtual(Iterable.class, "iterator", MethodType.methodType(Iterator.class));
MAP_INDEX_NORMALIZE = lookup.findStatic(Def.class, "mapIndexNormalize",
MethodType.methodType(Object.class, Map.class, Object.class));
LIST_INDEX_NORMALIZE = lookup.findStatic(Def.class, "listIndexNormalize",
MethodType.methodType(int.class, List.class, int.class));
} catch (final ReflectiveOperationException roe) {
throw new AssertionError(roe);
}
// lookup up the factory for arraylength MethodHandle (intrinsic) from Java 9:
// https://bugs.openjdk.java.net/browse/JDK-8156915
MethodHandle arrayLengthMHFactory;
try {
arrayLengthMHFactory = lookup.findStatic(MethodHandles.class, "arrayLength",
MethodType.methodType(MethodHandle.class, Class.class));
} catch (final ReflectiveOperationException roe) {
arrayLengthMHFactory = null;
}
JAVA9_ARRAY_LENGTH_MH_FACTORY = arrayLengthMHFactory;
}
/** Hack to rethrow unknown Exceptions from {@link MethodHandle#invokeExact}: */
@SuppressWarnings("unchecked")
static <T extends Throwable> void rethrow(Throwable t) throws T {
throw (T) t;
}
/** Returns an array length getter MethodHandle for the given array type */
static MethodHandle arrayLengthGetter(Class<?> arrayType) {
if (JAVA9_ARRAY_LENGTH_MH_FACTORY != null) {
try {
return (MethodHandle) JAVA9_ARRAY_LENGTH_MH_FACTORY.invokeExact(arrayType);
} catch (Throwable t) {
rethrow(t);
throw new AssertionError(t);
}
} else {
return ArrayLengthHelper.arrayLengthGetter(arrayType);
}
}
/**
* Looks up method entry for a dynamic method call.
* <p>
* A dynamic method call for variable {@code x} of type {@code def} looks like:
* {@code x.method(args...)}
* <p>
* This method traverses {@code recieverClass}'s class hierarchy (including interfaces)
* until it finds a matching whitelisted method. If one is not found, it throws an exception.
* Otherwise it returns the matching method.
* <p>
* @params definition the whitelist
* @param receiverClass Class of the object to invoke the method on.
* @param name Name of the method.
* @param arity arity of method
* @return matching method to invoke. never returns null.
* @throws IllegalArgumentException if no matching whitelisted method was found.
*/
static Method lookupMethodInternal(Definition definition, Class<?> receiverClass, String name, int arity) {
Definition.MethodKey key = new Definition.MethodKey(name, arity);
// check whitelist for matching method
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
RuntimeClass struct = definition.getRuntimeClass(clazz);
if (struct != null) {
Method method = struct.methods.get(key);
if (method != null) {
return method;
}
}
for (Class<?> iface : clazz.getInterfaces()) {
struct = definition.getRuntimeClass(iface);
if (struct != null) {
Method method = struct.methods.get(key);
if (method != null) {
return method;
}
}
}
}
throw new IllegalArgumentException("Unable to find dynamic method [" + name + "] with [" + arity + "] arguments " +
"for class [" + receiverClass.getCanonicalName() + "].");
}
/**
* Looks up handle for a dynamic method call, with lambda replacement
* <p>
* A dynamic method call for variable {@code x} of type {@code def} looks like:
* {@code x.method(args...)}
* <p>
* This method traverses {@code recieverClass}'s class hierarchy (including interfaces)
* until it finds a matching whitelisted method. If one is not found, it throws an exception.
* Otherwise it returns a handle to the matching method.
* <p>
* @param definition the whitelist
* @param lookup caller's lookup
* @param callSiteType callsite's type
* @param receiverClass Class of the object to invoke the method on.
* @param name Name of the method.
* @param args bootstrap args passed to callsite
* @return pointer to matching method to invoke. never returns null.
* @throws IllegalArgumentException if no matching whitelisted method was found.
* @throws Throwable if a method reference cannot be converted to an functional interface
*/
static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodType callSiteType,
Class<?> receiverClass, String name, Object args[]) throws Throwable {
String recipeString = (String) args[0];
int numArguments = callSiteType.parameterCount();
// simple case: no lambdas
if (recipeString.isEmpty()) {
return lookupMethodInternal(definition, receiverClass, name, numArguments - 1).handle;
}
// convert recipe string to a bitset for convenience (the code below should be refactored...)
BitSet lambdaArgs = new BitSet(recipeString.length());
for (int i = 0; i < recipeString.length(); i++) {
lambdaArgs.set(recipeString.charAt(i));
}
// otherwise: first we have to compute the "real" arity. This is because we have extra arguments:
// e.g. f(a, g(x), b, h(y), i()) looks like f(a, g, x, b, h, y, i).
int arity = callSiteType.parameterCount() - 1;
int upTo = 1;
for (int i = 1; i < numArguments; i++) {
if (lambdaArgs.get(i - 1)) {
String signature = (String) args[upTo++];
int numCaptures = Integer.parseInt(signature.substring(signature.indexOf(',')+1));
arity -= numCaptures;
}
}
// lookup the method with the proper arity, then we know everything (e.g. interface types of parameters).
// based on these we can finally link any remaining lambdas that were deferred.
Method method = lookupMethodInternal(definition, receiverClass, name, arity);
MethodHandle handle = method.handle;
int replaced = 0;
upTo = 1;
for (int i = 1; i < numArguments; i++) {
// its a functional reference, replace the argument with an impl
if (lambdaArgs.get(i - 1)) {
// decode signature of form 'type.call,2'
String signature = (String) args[upTo++];
int separator = signature.lastIndexOf('.');
int separator2 = signature.indexOf(',');
String type = signature.substring(1, separator);
String call = signature.substring(separator+1, separator2);
int numCaptures = Integer.parseInt(signature.substring(separator2+1));
Class<?> captures[] = new Class<?>[numCaptures];
for (int capture = 0; capture < captures.length; capture++) {
captures[capture] = callSiteType.parameterType(i + 1 + capture);
}
MethodHandle filter;
Definition.Type interfaceType = method.arguments.get(i - 1 - replaced);
if (signature.charAt(0) == 'S') {
// the implementation is strongly typed, now that we know the interface type,
// we have everything.
filter = lookupReferenceInternal(definition,
lookup,
interfaceType,
type,
call,
captures);
} else if (signature.charAt(0) == 'D') {
// the interface type is now known, but we need to get the implementation.
// this is dynamically based on the receiver type (and cached separately, underneath
// this cache). It won't blow up since we never nest here (just references)
MethodType nestedType = MethodType.methodType(interfaceType.clazz, captures);
CallSite nested = DefBootstrap.bootstrap(definition,
lookup,
call,
nestedType,
0,
DefBootstrap.REFERENCE,
interfaceType.name);
filter = nested.dynamicInvoker();
} else {
throw new AssertionError();
}
// the filter now ignores the signature (placeholder) on the stack
filter = MethodHandles.dropArguments(filter, 0, String.class);
handle = MethodHandles.collectArguments(handle, i, filter);
i += numCaptures;
replaced += numCaptures;
}
}
return handle;
}
/**
* Returns an implementation of interfaceClass that calls receiverClass.name
* <p>
* This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known,
* so we simply need to lookup the matching implementation method based on receiver type.
*/
static MethodHandle lookupReference(Definition definition, Lookup lookup, String interfaceClass,
Class<?> receiverClass, String name) throws Throwable {
Definition.Type interfaceType = definition.getType(interfaceClass);
Method interfaceMethod = interfaceType.struct.getFunctionalMethod();
if (interfaceMethod == null) {
throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface");
}
int arity = interfaceMethod.arguments.size();
Method implMethod = lookupMethodInternal(definition, receiverClass, name, arity);
return lookupReferenceInternal(definition, lookup, interfaceType, implMethod.owner.name,
implMethod.name, receiverClass);
}
/** Returns a method handle to an implementation of clazz, given method reference signature. */
private static MethodHandle lookupReferenceInternal(Definition definition, Lookup lookup,
Definition.Type clazz, String type, String call, Class<?>... captures)
throws Throwable {
final FunctionRef ref;
if ("this".equals(type)) {
// user written method
Method interfaceMethod = clazz.struct.getFunctionalMethod();
if (interfaceMethod == null) {
throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " +
"to [" + clazz.name + "], not a functional interface");
}
int arity = interfaceMethod.arguments.size() + captures.length;
final MethodHandle handle;
try {
MethodHandle accessor = lookup.findStaticGetter(lookup.lookupClass(),
getUserFunctionHandleFieldName(call, arity),
MethodHandle.class);
handle = (MethodHandle)accessor.invokeExact();
} catch (NoSuchFieldException | IllegalAccessException e) {
// is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail
// because the arity does not match the expected interface type.
if (call.contains("$")) {
throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name +
"] in [" + clazz.clazz + "]");
}
throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments.");
}
ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length);
} else {
// whitelist lookup
ref = new FunctionRef(definition, clazz, type, call, captures.length);
}
final CallSite callSite = LambdaBootstrap.lambdaBootstrap(
lookup,
ref.interfaceMethodName,
ref.factoryMethodType,
ref.interfaceMethodType,
ref.delegateClassName,
ref.delegateInvokeType,
ref.delegateMethodName,
ref.delegateMethodType
);
return callSite.dynamicInvoker().asType(MethodType.methodType(clazz.clazz, captures));
}
/** gets the field name used to lookup up the MethodHandle for a function. */
public static String getUserFunctionHandleFieldName(String name, int arity) {
return "handle$" + name + "$" + arity;
}
/**
* Looks up handle for a dynamic field getter (field load)
* <p>
* A dynamic field load for variable {@code x} of type {@code def} looks like:
* {@code y = x.field}
* <p>
* The following field loads are allowed:
* <ul>
* <li>Whitelisted {@code field} from receiver's class or any superclasses.
* <li>Whitelisted method named {@code getField()} from receiver's class/superclasses/interfaces.
* <li>Whitelisted method named {@code isField()} from receiver's class/superclasses/interfaces.
* <li>The {@code length} field of an array.
* <li>The value corresponding to a map key named {@code field} when the receiver is a Map.
* <li>The value in a list at element {@code field} (integer) when the receiver is a List.
* </ul>
* <p>
* This method traverses {@code recieverClass}'s class hierarchy (including interfaces)
* until it finds a matching whitelisted getter. If one is not found, it throws an exception.
* Otherwise it returns a handle to the matching getter.
* <p>
* @param definition the whitelist
* @param receiverClass Class of the object to retrieve the field from.
* @param name Name of the field.
* @return pointer to matching field. never returns null.
* @throws IllegalArgumentException if no matching whitelisted field was found.
*/
static MethodHandle lookupGetter(Definition definition, Class<?> receiverClass, String name) {
// first try whitelist
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
RuntimeClass struct = definition.getRuntimeClass(clazz);
if (struct != null) {
MethodHandle handle = struct.getters.get(name);
if (handle != null) {
return handle;
}
}
for (final Class<?> iface : clazz.getInterfaces()) {
struct = definition.getRuntimeClass(iface);
if (struct != null) {
MethodHandle handle = struct.getters.get(name);
if (handle != null) {
return handle;
}
}
}
}
// special case: arrays, maps, and lists
if (receiverClass.isArray() && "length".equals(name)) {
// arrays expose .length as a read-only getter
return arrayLengthGetter(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap.key
// wire 'key' as a parameter, its a constant in painless
return MethodHandles.insertArguments(MAP_GET, 1, name);
} else if (List.class.isAssignableFrom(receiverClass)) {
// lists allow access like mylist.0
// wire '0' (index) as a parameter, its a constant. this also avoids
// parsing the same integer millions of times!
try {
int index = Integer.parseInt(name);
return MethodHandles.insertArguments(LIST_GET, 1, index);
} catch (NumberFormatException exception) {
throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "].");
}
}
throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " +
"for class [" + receiverClass.getCanonicalName() + "].");
}
/**
* Looks up handle for a dynamic field setter (field store)
* <p>
* A dynamic field store for variable {@code x} of type {@code def} looks like:
* {@code x.field = y}
* <p>
* The following field stores are allowed:
* <ul>
* <li>Whitelisted {@code field} from receiver's class or any superclasses.
* <li>Whitelisted method named {@code setField()} from receiver's class/superclasses/interfaces.
* <li>The value corresponding to a map key named {@code field} when the receiver is a Map.
* <li>The value in a list at element {@code field} (integer) when the receiver is a List.
* </ul>
* <p>
* This method traverses {@code recieverClass}'s class hierarchy (including interfaces)
* until it finds a matching whitelisted setter. If one is not found, it throws an exception.
* Otherwise it returns a handle to the matching setter.
* <p>
* @param definition the whitelist
* @param receiverClass Class of the object to retrieve the field from.
* @param name Name of the field.
* @return pointer to matching field. never returns null.
* @throws IllegalArgumentException if no matching whitelisted field was found.
*/
static MethodHandle lookupSetter(Definition definition, Class<?> receiverClass, String name) {
// first try whitelist
for (Class<?> clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) {
RuntimeClass struct = definition.getRuntimeClass(clazz);
if (struct != null) {
MethodHandle handle = struct.setters.get(name);
if (handle != null) {
return handle;
}
}
for (final Class<?> iface : clazz.getInterfaces()) {
struct = definition.getRuntimeClass(iface);
if (struct != null) {
MethodHandle handle = struct.setters.get(name);
if (handle != null) {
return handle;
}
}
}
}
// special case: maps, and lists
if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap.key
// wire 'key' as a parameter, its a constant in painless
return MethodHandles.insertArguments(MAP_PUT, 1, name);
} else if (List.class.isAssignableFrom(receiverClass)) {
// lists allow access like mylist.0
// wire '0' (index) as a parameter, its a constant. this also avoids
// parsing the same integer millions of times!
try {
int index = Integer.parseInt(name);
return MethodHandles.insertArguments(LIST_SET, 1, index);
} catch (final NumberFormatException exception) {
throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "].");
}
}
throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " +
"for class [" + receiverClass.getCanonicalName() + "].");
}
/**
* Returns a method handle to normalize the index into an array. This is what makes lists and arrays stored in {@code def} support
* negative offsets.
* @param receiverClass Class of the array to store the value in
* @return a MethodHandle that accepts the receiver as first argument, the index as second argument, and returns the normalized index
* to use with array loads and array stores
*/
static MethodHandle lookupIndexNormalize(Class<?> receiverClass) {
if (receiverClass.isArray()) {
return ArrayIndexNormalizeHelper.arrayIndexNormalizer(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// noop so that mymap[key] doesn't do funny things with negative keys
return MAP_INDEX_NORMALIZE;
} else if (List.class.isAssignableFrom(receiverClass)) {
return LIST_INDEX_NORMALIZE;
}
throw new IllegalArgumentException("Attempting to address a non-array-like type " +
"[" + receiverClass.getCanonicalName() + "] as an array.");
}
/**
* Returns a method handle to do an array store.
* @param receiverClass Class of the array to store the value in
* @return a MethodHandle that accepts the receiver as first argument, the index as second argument,
* and the value to set as 3rd argument. Return value is undefined and should be ignored.
*/
static MethodHandle lookupArrayStore(Class<?> receiverClass) {
if (receiverClass.isArray()) {
return MethodHandles.arrayElementSetter(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap[key]
return MAP_PUT;
} else if (List.class.isAssignableFrom(receiverClass)) {
return LIST_SET;
}
throw new IllegalArgumentException("Attempting to address a non-array type " +
"[" + receiverClass.getCanonicalName() + "] as an array.");
}
/**
* Returns a method handle to do an array load.
* @param receiverClass Class of the array to load the value from
* @return a MethodHandle that accepts the receiver as first argument, the index as second argument.
* It returns the loaded value.
*/
static MethodHandle lookupArrayLoad(Class<?> receiverClass) {
if (receiverClass.isArray()) {
return MethodHandles.arrayElementGetter(receiverClass);
} else if (Map.class.isAssignableFrom(receiverClass)) {
// maps allow access like mymap[key]
return MAP_GET;
} else if (List.class.isAssignableFrom(receiverClass)) {
return LIST_GET;
}
throw new IllegalArgumentException("Attempting to address a non-array type " +
"[" + receiverClass.getCanonicalName() + "] as an array.");
}
/** Helper class for isolating MethodHandles and methods to get iterators over arrays
* (to emulate "enhanced for loop" using MethodHandles). These cause boxing, and are not as efficient
* as they could be, but works.
*/
@SuppressWarnings("unused") // iterator() methods are are actually used, javac just does not know :)
private static final class ArrayIteratorHelper {
private static final Lookup PRIV_LOOKUP = MethodHandles.lookup();
private static final Map<Class<?>,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap(
Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class,
char[].class, float[].class, double[].class, Object[].class)
.collect(Collectors.toMap(Function.identity(), type -> {
try {
return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "iterator", MethodType.methodType(Iterator.class, type));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}))
);
private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class);
static Iterator<Boolean> iterator(final boolean[] array) {
return new Iterator<Boolean>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Boolean next() { return array[index++]; }
};
}
static Iterator<Byte> iterator(final byte[] array) {
return new Iterator<Byte>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Byte next() { return array[index++]; }
};
}
static Iterator<Short> iterator(final short[] array) {
return new Iterator<Short>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Short next() { return array[index++]; }
};
}
static Iterator<Integer> iterator(final int[] array) {
return new Iterator<Integer>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Integer next() { return array[index++]; }
};
}
static Iterator<Long> iterator(final long[] array) {
return new Iterator<Long>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Long next() { return array[index++]; }
};
}
static Iterator<Character> iterator(final char[] array) {
return new Iterator<Character>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Character next() { return array[index++]; }
};
}
static Iterator<Float> iterator(final float[] array) {
return new Iterator<Float>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Float next() { return array[index++]; }
};
}
static Iterator<Double> iterator(final double[] array) {
return new Iterator<Double>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Double next() { return array[index++]; }
};
}
static Iterator<Object> iterator(final Object[] array) {
return new Iterator<Object>() {
int index = 0;
@Override public boolean hasNext() { return index < array.length; }
@Override public Object next() { return array[index++]; }
};
}
static MethodHandle newIterator(Class<?> arrayType) {
if (!arrayType.isArray()) {
throw new IllegalArgumentException("type must be an array");
}
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
ARRAY_TYPE_MH_MAPPING.get(arrayType) :
OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType));
}
private ArrayIteratorHelper() {}
}
/**
* Returns a method handle to do iteration (for enhanced for loop)
* @param receiverClass Class of the array to load the value from
* @return a MethodHandle that accepts the receiver as first argument, returns iterator
*/
static MethodHandle lookupIterator(Class<?> receiverClass) {
if (Iterable.class.isAssignableFrom(receiverClass)) {
return ITERATOR;
} else if (receiverClass.isArray()) {
return ArrayIteratorHelper.newIterator(receiverClass);
} else {
throw new IllegalArgumentException("Cannot iterate over [" + receiverClass.getCanonicalName() + "]");
}
}
// Conversion methods for Def to primitive types.
public static boolean DefToboolean(final Object value) {
return (boolean)value;
}
public static byte DefTobyteImplicit(final Object value) {
return (byte)value;
}
public static short DefToshortImplicit(final Object value) {
if (value instanceof Byte) {
return (byte)value;
} else {
return (short)value;
}
}
public static char DefTocharImplicit(final Object value) {
if (value instanceof Byte) {
return (char)(byte)value;
} else {
return (char)value;
}
}
public static int DefTointImplicit(final Object value) {
if (value instanceof Byte) {
return (byte)value;
} else if (value instanceof Short) {
return (short)value;
} else if (value instanceof Character) {
return (char)value;
} else {
return (int)value;
}
}
public static long DefTolongImplicit(final Object value) {
if (value instanceof Byte) {
return (byte)value;
} else if (value instanceof Short) {
return (short)value;
} else if (value instanceof Character) {
return (char)value;
} else if (value instanceof Integer) {
return (int)value;
} else {
return (long)value;
}
}
public static float DefTofloatImplicit(final Object value) {
if (value instanceof Byte) {
return (byte)value;
} else if (value instanceof Short) {
return (short)value;
} else if (value instanceof Character) {
return (char)value;
} else if (value instanceof Integer) {
return (int)value;
} else if (value instanceof Long) {
return (long)value;
} else {
return (float)value;
}
}
public static double DefTodoubleImplicit(final Object value) {
if (value instanceof Byte) {
return (byte)value;
} else if (value instanceof Short) {
return (short)value;
} else if (value instanceof Character) {
return (char)value;
} else if (value instanceof Integer) {
return (int)value;
} else if (value instanceof Long) {
return (long)value;
} else if (value instanceof Float) {
return (float)value;
} else {
return (double)value;
}
}
public static byte DefTobyteExplicit(final Object value) {
if (value instanceof Character) {
return (byte)(char)value;
} else {
return ((Number)value).byteValue();
}
}
public static short DefToshortExplicit(final Object value) {
if (value instanceof Character) {
return (short)(char)value;
} else {
return ((Number)value).shortValue();
}
}
public static char DefTocharExplicit(final Object value) {
if (value instanceof Character) {
return ((Character)value);
} else {
return (char)((Number)value).intValue();
}
}
public static int DefTointExplicit(final Object value) {
if (value instanceof Character) {
return (char)value;
} else {
return ((Number)value).intValue();
}
}
public static long DefTolongExplicit(final Object value) {
if (value instanceof Character) {
return (char)value;
} else {
return ((Number)value).longValue();
}
}
public static float DefTofloatExplicit(final Object value) {
if (value instanceof Character) {
return (char)value;
} else {
return ((Number)value).floatValue();
}
}
public static double DefTodoubleExplicit(final Object value) {
if (value instanceof Character) {
return (char)value;
} else {
return ((Number)value).doubleValue();
}
}
/**
* "Normalizes" the index into a {@code Map} by making no change to the index.
*/
public static Object mapIndexNormalize(final Map<?, ?> value, Object index) {
return index;
}
/**
* "Normalizes" the idnex into a {@code List} by flipping negative indexes around so they are "from the end" of the list.
*/
public static int listIndexNormalize(final List<?> value, int index) {
return index >= 0 ? index : value.size() + index;
}
/**
* Methods to normalize array indices to support negative indices into arrays stored in {@code def}s.
*/
@SuppressWarnings("unused") // normalizeIndex() methods are are actually used, javac just does not know :)
private static final class ArrayIndexNormalizeHelper {
private static final Lookup PRIV_LOOKUP = MethodHandles.lookup();
private static final Map<Class<?>,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap(
Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class,
char[].class, float[].class, double[].class, Object[].class)
.collect(Collectors.toMap(Function.identity(), type -> {
try {
return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "normalizeIndex",
MethodType.methodType(int.class, type, int.class));
} catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}))
);
private static final MethodHandle OBJECT_ARRAY_MH = ARRAY_TYPE_MH_MAPPING.get(Object[].class);
static int normalizeIndex(final boolean[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final byte[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final short[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final int[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final long[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final char[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final float[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final double[] array, final int index) { return index >= 0 ? index : index + array.length; }
static int normalizeIndex(final Object[] array, final int index) { return index >= 0 ? index : index + array.length; }
static MethodHandle arrayIndexNormalizer(Class<?> arrayType) {
if (!arrayType.isArray()) {
throw new IllegalArgumentException("type must be an array");
}
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
ARRAY_TYPE_MH_MAPPING.get(arrayType) :
OBJECT_ARRAY_MH.asType(OBJECT_ARRAY_MH.type().changeParameterType(0, arrayType));
}
private ArrayIndexNormalizeHelper() {}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.storage.file.datalake.specialized;
import com.azure.core.annotation.ServiceClientBuilder;
import com.azure.core.http.HttpPipeline;
import com.azure.storage.blob.BlobContainerClientBuilder;
import com.azure.storage.blob.specialized.BlobLeaseClientBuilder;
import com.azure.storage.blob.specialized.SpecializedBlobClientBuilder;
import com.azure.storage.file.datalake.DataLakeDirectoryAsyncClient;
import com.azure.storage.file.datalake.DataLakeDirectoryClient;
import com.azure.storage.file.datalake.DataLakeFileAsyncClient;
import com.azure.storage.file.datalake.DataLakeFileClient;
import com.azure.storage.file.datalake.DataLakeFileSystemAsyncClient;
import com.azure.storage.file.datalake.DataLakeFileSystemClient;
import com.azure.storage.file.datalake.DataLakeServiceVersion;
import com.azure.storage.file.datalake.implementation.util.DataLakeImplUtils;
import com.azure.storage.file.datalake.implementation.util.TransformUtils;
import java.net.URL;
import java.util.Objects;
import java.util.UUID;
/**
* This class provides a fluent builder API to help aid the configuration and instantiation of Storage Lease
* clients. Lease clients are able to interact with both file system and path clients and act as a supplement client. A
* new instance of {@link DataLakeLeaseClient} and {@link DataLakeLeaseAsyncClient} are constructed every time
* {@link #buildClient() buildClient} and {@link #buildAsyncClient() buildAsyncClient} are called
* respectively.
*
* <p>When a client is instantiated and a {@link #leaseId(String) leaseId} hasn't been set a {@link UUID} will be used
* as the lease identifier.</p>
*
* <p><strong>Instantiating LeaseClients</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.syncInstantiationWithFileAndLeaseId -->
* <pre>
* DataLakeLeaseClient dataLakeLeaseClient = new DataLakeLeaseClientBuilder()
* .fileClient(fileClient)
* .leaseId(leaseId)
* .buildClient();
* </pre>
* <!-- end com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.syncInstantiationWithFileAndLeaseId -->
*
* <!-- src_embed com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.syncInstantiationWithDirectoryAndLeaseId -->
* <pre>
* DataLakeLeaseClient dataLakeLeaseClient = new DataLakeLeaseClientBuilder()
* .directoryClient(directoryClient)
* .leaseId(leaseId)
* .buildClient();
* </pre>
* <!-- end com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.syncInstantiationWithDirectoryAndLeaseId -->
*
* <!-- src_embed com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.syncInstantiationWithFileSystemAndLeaseId -->
* <pre>
* DataLakeLeaseClient dataLakeLeaseClient = new DataLakeLeaseClientBuilder()
* .fileSystemClient(dataLakeFileSystemClient)
* .leaseId(leaseId)
* .buildClient();
* </pre>
* <!-- end com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.syncInstantiationWithFileSystemAndLeaseId -->
*
* <p><strong>Instantiating LeaseAsyncClients</strong></p>
*
* <!-- src_embed com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.asyncInstantiationWithFileAndLeaseId -->
* <pre>
* DataLakeLeaseAsyncClient dataLakeLeaseAsyncClient = new DataLakeLeaseClientBuilder()
* .fileAsyncClient(fileAsyncClient)
* .leaseId(leaseId)
* .buildAsyncClient();
* </pre>
* <!-- end com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.asyncInstantiationWithFileAndLeaseId -->
*
* <!-- src_embed com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.asyncInstantiationWithDirectoryAndLeaseId -->
* <pre>
* DataLakeLeaseAsyncClient dataLakeLeaseAsyncClient = new DataLakeLeaseClientBuilder()
* .directoryAsyncClient(directoryAsyncClient)
* .leaseId(leaseId)
* .buildAsyncClient();
* </pre>
* <!-- end com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.asyncInstantiationWithDirectoryAndLeaseId -->
*
* <!-- src_embed com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.asyncInstantiationWithFileSystemAndLeaseId -->
* <pre>
* DataLakeLeaseAsyncClient dataLakeLeaseAsyncClient = new DataLakeLeaseClientBuilder()
* .fileSystemAsyncClient(dataLakeFileSystemAsyncClient)
* .leaseId(leaseId)
* .buildAsyncClient();
* </pre>
* <!-- end com.azure.storage.file.datalake.specialized.DataLakeLeaseClientBuilder.asyncInstantiationWithFileSystemAndLeaseId -->
*
* @see DataLakeLeaseClient
* @see DataLakeLeaseAsyncClient
*/
@ServiceClientBuilder(serviceClients = { DataLakeLeaseClient.class, DataLakeLeaseAsyncClient.class })
public final class DataLakeLeaseClientBuilder {
final BlobLeaseClientBuilder blobLeaseClientBuilder;
/**
* Creates a new instance of {@link DataLakeLeaseClientBuilder}.
*/
public DataLakeLeaseClientBuilder() {
blobLeaseClientBuilder = new BlobLeaseClientBuilder();
}
/**
* Creates a {@link DataLakeLeaseClient} based on the configurations set in the builder.
*
* @return a {@link DataLakeLeaseClient} based on the configurations in this builder.
*/
public DataLakeLeaseClient buildClient() {
return new DataLakeLeaseClient(blobLeaseClientBuilder.buildClient());
}
/**
* Creates a {@link DataLakeLeaseAsyncClient} based on the configurations set in the builder.
*
* @return a {@link DataLakeLeaseAsyncClient} based on the configurations in this builder.
*/
public DataLakeLeaseAsyncClient buildAsyncClient() {
return new DataLakeLeaseAsyncClient(blobLeaseClientBuilder.buildAsyncClient());
}
/**
* Configures the builder based on the passed {@link DataLakeFileClient}. This will set the {@link HttpPipeline} and
* {@link URL} that are used to interact with the service.
*
* @param dataLakeFileClient DataLakeFileClient used to configure the builder.
* @return the updated DataLakeLeaseClientBuilder object
* @throws NullPointerException If {@code DataLakeFileClient} is {@code null}.
*/
public DataLakeLeaseClientBuilder fileClient(DataLakeFileClient dataLakeFileClient) {
Objects.requireNonNull(dataLakeFileClient);
blobLeaseClientBuilder.blobClient(
getSpecializedBlobClientBuilder(dataLakeFileClient.getFileUrl(),
dataLakeFileClient.getHttpPipeline(), dataLakeFileClient.getServiceVersion())
.buildBlockBlobClient());
return this;
}
/**
* Configures the builder based on the passed {@link DataLakeFileAsyncClient}. This will set the
* {@link HttpPipeline} and {@link URL} that are used to interact with the service.
*
* @param dataLakeFileAsyncClient DataLakeFileAsyncClient used to configure the builder.
* @return the updated DataLakeLeaseClientBuilder object
* @throws NullPointerException If {@code DataLakeFileAsyncClient} is {@code null}.
*/
public DataLakeLeaseClientBuilder fileAsyncClient(DataLakeFileAsyncClient dataLakeFileAsyncClient) {
Objects.requireNonNull(dataLakeFileAsyncClient);
blobLeaseClientBuilder.blobAsyncClient(
getSpecializedBlobClientBuilder(dataLakeFileAsyncClient.getFileUrl(),
dataLakeFileAsyncClient.getHttpPipeline(), dataLakeFileAsyncClient.getServiceVersion())
.buildBlockBlobAsyncClient());
return this;
}
/**
* Configures the builder based on the passed {@link DataLakeDirectoryClient}. This will set the
* {@link HttpPipeline} and {@link URL} that are used to interact with the service.
*
* @param dataLakeDirectoryClient DataLakeDirectoryClient used to configure the builder.
* @return the updated DataLakeLeaseClientBuilder object
* @throws NullPointerException If {@code DataLakeDirectoryClient} is {@code null}.
*/
public DataLakeLeaseClientBuilder directoryClient(DataLakeDirectoryClient dataLakeDirectoryClient) {
Objects.requireNonNull(dataLakeDirectoryClient);
blobLeaseClientBuilder.blobClient(
getSpecializedBlobClientBuilder(dataLakeDirectoryClient.getDirectoryUrl(),
dataLakeDirectoryClient.getHttpPipeline(), dataLakeDirectoryClient.getServiceVersion())
.buildBlockBlobClient());
return this;
}
/**
* Configures the builder based on the passed {@link DataLakeDirectoryAsyncClient}. This will set the
* {@link HttpPipeline} and {@link URL} that are used to interact with the service.
*
* @param dataLakeDirectoryAsyncClient DataLakeDirectoryAsyncClient used to configure the builder.
* @return the updated DataLakeLeaseClientBuilder object
* @throws NullPointerException If {@code DataLakeDirectoryAsyncClient} is {@code null}.
*/
public DataLakeLeaseClientBuilder directoryAsyncClient(DataLakeDirectoryAsyncClient dataLakeDirectoryAsyncClient) {
Objects.requireNonNull(dataLakeDirectoryAsyncClient);
blobLeaseClientBuilder.blobAsyncClient(
getSpecializedBlobClientBuilder(dataLakeDirectoryAsyncClient.getDirectoryUrl(),
dataLakeDirectoryAsyncClient.getHttpPipeline(), dataLakeDirectoryAsyncClient.getServiceVersion())
.buildBlockBlobAsyncClient());
return this;
}
/**
* Configures the builder based on the passed {@link DataLakeFileSystemClient}. This will set the
* {@link HttpPipeline} and {@link URL} that are used to interact with the service.
*
* @param dataLakeFileSystemClient DataLakeFileSystemClient used to configure the builder.
* @return the updated DataLakeLeaseClientBuilder object
* @throws NullPointerException If {@code dataLakeFileSystemClient} is {@code null}.
*/
public DataLakeLeaseClientBuilder fileSystemClient(DataLakeFileSystemClient dataLakeFileSystemClient) {
Objects.requireNonNull(dataLakeFileSystemClient);
blobLeaseClientBuilder.containerClient(
getBlobContainerClientBuilder(dataLakeFileSystemClient.getFileSystemUrl(),
dataLakeFileSystemClient.getHttpPipeline(), dataLakeFileSystemClient.getServiceVersion())
.buildClient());
return this;
}
/**
* Configures the builder based on the passed {@link DataLakeFileSystemAsyncClient}. This will set the {@link
* HttpPipeline} and {@link URL} that are used to interact with the service.
*
* @param dataLakeFileSystemAsyncClient DataLakeFileSystemAsyncClient used to configure the builder.
* @return the updated DataLakeLeaseClientBuilder object
* @throws NullPointerException If {@code dataLakeFileSystemAsyncClient} is {@code null}.
*/
public DataLakeLeaseClientBuilder fileSystemAsyncClient(
DataLakeFileSystemAsyncClient dataLakeFileSystemAsyncClient) {
Objects.requireNonNull(dataLakeFileSystemAsyncClient);
blobLeaseClientBuilder.containerClient(
getBlobContainerClientBuilder(dataLakeFileSystemAsyncClient.getFileSystemUrl(),
dataLakeFileSystemAsyncClient.getHttpPipeline(),
dataLakeFileSystemAsyncClient.getServiceVersion()).buildClient());
return this;
}
/**
* Sets the identifier for the lease.
*
* <p>If a lease ID isn't set then a {@link UUID} will be used.</p>
*
* @param leaseId Identifier for the lease.
* @return the updated DataLakeLeaseClientBuilder object
*/
public DataLakeLeaseClientBuilder leaseId(String leaseId) {
blobLeaseClientBuilder.leaseId(leaseId);
return this;
}
/**
* Initializes a {@link SpecializedBlobClientBuilder}
* @param dfsEndpoint The endpoint for the {@link SpecializedBlobClientBuilder}
* @param pipeline The {@link HttpPipeline} for the {@link SpecializedBlobClientBuilder}
* @return the {@link SpecializedBlobClientBuilder}
*/
private SpecializedBlobClientBuilder getSpecializedBlobClientBuilder(String dfsEndpoint, HttpPipeline pipeline,
DataLakeServiceVersion version) {
String blobEndpoint = DataLakeImplUtils.endpointToDesiredEndpoint(dfsEndpoint, "blob", "dfs");
return new SpecializedBlobClientBuilder()
.pipeline(pipeline)
.endpoint(blobEndpoint)
.serviceVersion(TransformUtils.toBlobServiceVersion(version));
}
/**
* Initializes a {@link BlobContainerClientBuilder}
* @param dfsEndpoint The endpoint for the {@link BlobContainerClientBuilder}
* @param pipeline The {@link HttpPipeline} for the {@link BlobContainerClientBuilder}
* @return the {@link BlobContainerClientBuilder}
*/
private BlobContainerClientBuilder getBlobContainerClientBuilder(String dfsEndpoint, HttpPipeline pipeline,
DataLakeServiceVersion version) {
String blobEndpoint = DataLakeImplUtils.endpointToDesiredEndpoint(dfsEndpoint, "blob", "dfs");
return new BlobContainerClientBuilder()
.pipeline(pipeline)
.endpoint(blobEndpoint)
.serviceVersion(TransformUtils.toBlobServiceVersion(version));
}
}
| |
package com.atlassian.plugin.osgi.factory;
import com.atlassian.plugin.AutowireCapablePlugin;
import com.atlassian.plugin.IllegalPluginStateException;
import com.atlassian.plugin.ModuleDescriptor;
import com.atlassian.plugin.PluginArtifact;
import com.atlassian.plugin.PluginArtifactBackedPlugin;
import com.atlassian.plugin.PluginState;
import com.atlassian.plugin.event.PluginEventListener;
import com.atlassian.plugin.event.PluginEventManager;
import com.atlassian.plugin.event.events.PluginContainerFailedEvent;
import com.atlassian.plugin.event.events.PluginContainerRefreshedEvent;
import com.atlassian.plugin.event.events.PluginFrameworkShutdownEvent;
import com.atlassian.plugin.event.events.PluginFrameworkStartedEvent;
import com.atlassian.plugin.event.events.PluginRefreshedEvent;
import com.atlassian.plugin.impl.AbstractPlugin;
import com.atlassian.plugin.module.ContainerAccessor;
import com.atlassian.plugin.module.ContainerManagedPlugin;
import com.atlassian.plugin.osgi.container.OsgiContainerException;
import com.atlassian.plugin.osgi.container.OsgiContainerManager;
import com.atlassian.plugin.osgi.event.PluginServiceDependencyWaitEndedEvent;
import com.atlassian.plugin.osgi.event.PluginServiceDependencyWaitStartingEvent;
import com.atlassian.plugin.osgi.event.PluginServiceDependencyWaitTimedOutEvent;
import com.atlassian.plugin.osgi.external.ListableModuleDescriptorFactory;
import com.atlassian.plugin.util.PluginUtils;
import org.apache.commons.lang.Validate;
import org.dom4j.Element;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleEvent;
import org.osgi.framework.BundleException;
import org.osgi.framework.BundleListener;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.SynchronousBundleListener;
import org.osgi.service.packageadmin.PackageAdmin;
import org.osgi.util.tracker.ServiceTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* Plugin that wraps an OSGi bundle that does contain a plugin descriptor. The actual bundle is not created until the
* {@link #install()} method is invoked. Any attempt to access a method that requires a bundle will throw an
* {@link com.atlassian.plugin.IllegalPluginStateException}.
*
* This class uses a {@link OsgiPluginHelper} to represent different behaviors of key methods in different states.
* {@link OsgiPluginUninstalledHelper} implements the methods when the plugin hasn't yet been installed into the
* OSGi container, while {@link OsgiPluginInstalledHelper} implements the methods when the bundle is available. This
* leaves this class to manage the {@link PluginState} and interactions with the event system.
*/
//@Threadsafe
public class OsgiPlugin extends AbstractPlugin implements AutowireCapablePlugin, ContainerManagedPlugin, PluginArtifactBackedPlugin
{
private final Map<String, Element> moduleElements = new HashMap<String, Element>();
private final PluginEventManager pluginEventManager;
private final PackageAdmin packageAdmin;
private final Set<OutstandingDependency> outstandingDependencies = new CopyOnWriteArraySet<OutstandingDependency>();
private volatile boolean treatSpringBeanFactoryCreationAsRefresh = false;
private volatile OsgiPluginHelper helper;
public static final String ATLASSIAN_PLUGIN_KEY = "Atlassian-Plugin-Key";
private final Logger log = LoggerFactory.getLogger(this.getClass());
private final BundleListener bundleStopListener;
private final PluginArtifact originalPluginArtifact;
// Until the framework is actually done starting we want to ignore @RequiresRestart. Where this comes into play
// is when we have one version of a plugin (e.g. via bundled-plugins.zip) installed but then discover a newer
// one in installed-plugins. Clearly we can't "require a restart" between those two stages. And since nothing has
// been published outside of plugins yet (and thus can't be cached by the host app) the @RequiresRestart is
// meaningless.
private volatile boolean frameworkStarted = false;
public OsgiPlugin(final String key, final OsgiContainerManager mgr, final PluginArtifact artifact, final PluginArtifact originalPluginArtifact, final PluginEventManager pluginEventManager)
{
Validate.notNull(key, "The plugin key is required");
Validate.notNull(mgr, "The osgi container is required");
Validate.notNull(artifact, "The osgi container is required");
Validate.notNull(pluginEventManager, "The osgi container is required");
this.originalPluginArtifact = originalPluginArtifact;
this.helper = new OsgiPluginUninstalledHelper(key, mgr, artifact);
this.pluginEventManager = pluginEventManager;
this.packageAdmin = extractPackageAdminFromOsgi(mgr);
this.bundleStopListener = new SynchronousBundleListener()
{
public void bundleChanged(final BundleEvent bundleEvent)
{
if ((bundleEvent.getBundle() == getBundle()) && (bundleEvent.getType() == BundleEvent.STOPPING))
{
helper.onDisable();
setPluginState(PluginState.DISABLED);
}
}
};
}
/**
* Only used for testing
* @param helper The helper to use
*/
OsgiPlugin(PluginEventManager pluginEventManager, OsgiPluginHelper helper)
{
this.helper = helper;
this.pluginEventManager = pluginEventManager;
this.packageAdmin = null;
this.bundleStopListener = null;
this.originalPluginArtifact = null;
}
/**
* @return The active bundle
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public Bundle getBundle() throws IllegalPluginStateException
{
return helper.getBundle();
}
/**
* @return true
*/
public boolean isUninstallable()
{
return true;
}
/**
* @return true
*/
public boolean isDynamicallyLoaded()
{
return true;
}
/**
* @return true
*/
public boolean isDeleteable()
{
return true;
}
public PluginArtifact getPluginArtifact()
{
return originalPluginArtifact;
}
/**
*
* @param clazz The name of the class to be loaded
* @param callingClass The class calling the loading (used to help find a classloader)
* @param <T> The class type
* @return The class instance, loaded from the OSGi bundle
* @throws ClassNotFoundException If the class cannot be found
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public <T> Class<T> loadClass(final String clazz, final Class<?> callingClass) throws ClassNotFoundException, IllegalPluginStateException
{
return helper.loadClass(clazz, callingClass);
}
/**
* @param name The resource name
* @return The resource URL, null if not found
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public URL getResource(final String name) throws IllegalPluginStateException
{
return helper.getResource(name);
}
/**
* @param name The name of the resource to be loaded.
* @return Null if not found
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public InputStream getResourceAsStream(final String name) throws IllegalPluginStateException
{
return helper.getResourceAsStream(name);
}
/**
* @return The classloader to load classes and resources from the bundle
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public ClassLoader getClassLoader() throws IllegalPluginStateException
{
return helper.getClassLoader();
}
/**
* Called when the spring context for the bundle has failed to be created. This means the bundle is still
* active, but the Spring context is not available, so for our purposes, the plugin shouldn't be enabled.
*
* @param event The plugin container failed event
* @throws com.atlassian.plugin.IllegalPluginStateException If the plugin key hasn't been set yet
*/
@PluginEventListener
public void onSpringContextFailed(final PluginContainerFailedEvent event) throws IllegalPluginStateException
{
if (getKey() == null)
{
throw new IllegalPluginStateException("Plugin key must be set");
}
if (getKey().equals(event.getPluginKey()))
{
logAndClearOustandingDependencies();
// TODO: do something with the exception more than logging
getLog().error("Unable to start the Spring context for plugin " + getKey(), event.getCause());
setPluginState(PluginState.DISABLED);
}
}
@PluginEventListener
public void onPluginFrameworkStartedEvent(final PluginFrameworkStartedEvent event)
{
frameworkStarted = true;
}
@PluginEventListener
public void onPluginFrameworkShutdownEvent(final PluginFrameworkShutdownEvent event)
{
frameworkStarted = false;
}
@PluginEventListener
public void onServiceDependencyWaitStarting(PluginServiceDependencyWaitStartingEvent event)
{
if (event.getPluginKey() != null && event.getPluginKey().equals(getKey()))
{
OutstandingDependency dep = new OutstandingDependency(event.getBeanName(), String.valueOf(event.getFilter()));
outstandingDependencies.add(dep);
getLog().info(generateOutstandingDependencyLogMessage(dep, "Waiting for"));
}
}
@PluginEventListener
public void onServiceDependencyWaitEnded(PluginServiceDependencyWaitEndedEvent event)
{
if (event.getPluginKey() != null && event.getPluginKey().equals(getKey()))
{
OutstandingDependency dep = new OutstandingDependency(event.getBeanName(), String.valueOf(event.getFilter()));
outstandingDependencies.remove(dep);
getLog().info(generateOutstandingDependencyLogMessage(dep, "Found"));
}
}
@PluginEventListener
public void onServiceDependencyWaitEnded(PluginServiceDependencyWaitTimedOutEvent event)
{
if (event.getPluginKey() != null && event.getPluginKey().equals(getKey()))
{
OutstandingDependency dep = new OutstandingDependency(event.getBeanName(), String.valueOf(event.getFilter()));
outstandingDependencies.remove(dep);
getLog().error(generateOutstandingDependencyLogMessage(dep, "Timeout waiting for "));
}
}
private String generateOutstandingDependencyLogMessage(OutstandingDependency dep, String action)
{
StringBuilder sb = new StringBuilder();
sb.append(action).append(" ");
sb.append("service '").append(dep.getBeanName()).append("' for plugin '").append(getKey()).append("' with filter ").append(dep.getFilter());
return sb.toString();
}
/**
* Called when the spring context for the bundle has been created or refreshed. If this is the first time the
* context has been refreshed, then it is a new context. Otherwise, this means that the bundle has been reloaded,
* usually due to a dependency upgrade.
*
* @param event The event
* @throws com.atlassian.plugin.IllegalPluginStateException If the plugin key hasn't been set yet
*/
@PluginEventListener
public void onSpringContextRefresh(final PluginContainerRefreshedEvent event) throws IllegalPluginStateException
{
if (getKey() == null)
{
throw new IllegalPluginStateException("Plugin key must be set");
}
if (getKey().equals(event.getPluginKey()))
{
outstandingDependencies.clear();
helper.setPluginContainer(event.getContainer());
if (!compareAndSetPluginState(PluginState.ENABLING, PluginState.ENABLED) &&
getPluginState() != PluginState.ENABLED)
{
log.warn("Ignoring the Spring context that was just created for plugin " + getKey() + ". The plugin " +
"is in an invalid state, " + getPluginState() + ", that doesn't support a transition to " +
"enabled. Most likely, it was disabled due to a timeout.");
helper.setPluginContainer(null);
return;
}
// Only send refresh event on second creation
if (treatSpringBeanFactoryCreationAsRefresh)
{
pluginEventManager.broadcast(new PluginRefreshedEvent(this));
}
else
{
treatSpringBeanFactoryCreationAsRefresh = true;
}
}
}
/**
* Creates and autowires the class, using Spring's autodetection algorithm
*
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public <T> T autowire(final Class<T> clazz) throws IllegalPluginStateException
{
return autowire(clazz, AutowireStrategy.AUTOWIRE_AUTODETECT);
}
/**
* Creates and autowires the class
*
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public <T> T autowire(final Class<T> clazz, final AutowireStrategy autowireStrategy) throws IllegalPluginStateException
{
return helper.autowire(clazz, autowireStrategy);
}
/**
* Autowires the instance using Spring's autodetection algorithm
*
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public void autowire(final Object instance) throws IllegalStateException
{
autowire(instance, AutowireStrategy.AUTOWIRE_AUTODETECT);
}
/**
* Autowires the instance
*
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
public void autowire(final Object instance, final AutowireStrategy autowireStrategy) throws IllegalPluginStateException
{
helper.autowire(instance, autowireStrategy);
}
/**
* Determines which plugins are required for this one to operate based on tracing the "wires" or packages that
* are imported by this plugin. Bundles that provide those packages are determined to be required plugins.
*
* @return A set of bundle symbolic names, or plugin keys. Empty set if none.
* @since 2.2.0
*/
@Override
public Set<String> getRequiredPlugins() throws IllegalPluginStateException
{
return helper.getRequiredPlugins();
}
@Override
public String toString()
{
return getKey();
}
/**
* Installs the plugin artifact into OSGi
*
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
@Override
protected void installInternal() throws IllegalPluginStateException
{
Bundle bundle = helper.install();
helper = new OsgiPluginInstalledHelper(bundle, packageAdmin);
}
/**
* Enables the plugin by setting the OSGi bundle state to enabled.
*
* @return {@link PluginState#ENABLED}if spring isn't necessory or {@link PluginState#ENABLING} if we are waiting
* on a spring context
* @throws OsgiContainerException If the underlying OSGi system threw an exception or we tried to enable the bundle
* when it was in an invalid state
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
@Override
protected synchronized PluginState enableInternal() throws OsgiContainerException, IllegalPluginStateException
{
PluginState stateResult;
try
{
if (getBundle().getState() == Bundle.ACTIVE)
{
stateResult = PluginState.ENABLED;
}
else if ((getBundle().getState() == Bundle.RESOLVED) || (getBundle().getState() == Bundle.INSTALLED))
{
pluginEventManager.register(this);
if (!treatSpringBeanFactoryCreationAsRefresh)
{
stateResult = PluginState.ENABLING;
// Set it immediately, since the Spring context refresh event could happen at any time
setPluginState(stateResult);
}
else
{
stateResult = PluginState.ENABLED;
}
getBundle().start();
final BundleContext ctx = getBundle().getBundleContext();
helper.onEnable(
new ServiceTracker(ctx, ModuleDescriptor.class.getName(),
new ModuleDescriptorServiceTrackerCustomizer(this, pluginEventManager)),
new ServiceTracker(ctx, ListableModuleDescriptorFactory.class.getName(),
new UnrecognizedModuleDescriptorServiceTrackerCustomizer(this, pluginEventManager)));
// ensure the bean factory is removed when the bundle is stopped
ctx.addBundleListener(bundleStopListener);
}
else
{
throw new OsgiContainerException("Cannot enable the plugin '" + getKey() + "' when the bundle is not in the resolved or installed state: "
+ getBundle().getState() + "(" + getBundle().getBundleId() + ")");
}
// Only set state to enabling if it hasn't already been enabled via another thread notifying of a spring
// application context creation during the execution of this method
return (getPluginState() != PluginState.ENABLED ? stateResult : PluginState.ENABLED);
}
catch (final BundleException e)
{
log.error("Detected an error (BundleException) enabling the plugin '" + getKey() + "' : " + e.getMessage() + ". " +
" This error usually occurs when your plugin imports a package from another bundle with a specific version constraint " +
"and either the bundle providing that package doesn't meet those version constraints, or there is no bundle " +
"available that provides the specified package. For more details on how to fix this, see " +
"http://confluence.atlassian.com/x/1xy6D");
throw new OsgiContainerException("Cannot start plugin: " + getKey(), e);
}
}
/**
* Disables the plugin by changing the bundle state back to resolved
*
* @throws OsgiContainerException If the OSGi system threw an exception
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
@Override
protected synchronized void disableInternal() throws OsgiContainerException, IllegalPluginStateException
{
// Only disable underlying bundle if this is a truly dynamic plugin
if (!requiresRestart())
{
try
{
if (getPluginState() == PluginState.DISABLING)
{
logAndClearOustandingDependencies();
}
helper.onDisable();
pluginEventManager.unregister(this);
getBundle().stop();
treatSpringBeanFactoryCreationAsRefresh = false;
}
catch (final BundleException e)
{
throw new OsgiContainerException("Cannot stop plugin: " + getKey(), e);
}
}
}
private boolean requiresRestart()
{
return frameworkStarted && PluginUtils.doesPluginRequireRestart(this);
}
private void logAndClearOustandingDependencies()
{
for (OutstandingDependency dep : outstandingDependencies)
{
getLog().error(generateOutstandingDependencyLogMessage(dep, "Never resolved"));
}
outstandingDependencies.clear();
}
/**
* Uninstalls the bundle from the OSGi container
* @throws OsgiContainerException If the underlying OSGi system threw an exception
* @throws IllegalPluginStateException if the bundle hasn't been created yet
*/
@Override
protected void uninstallInternal() throws OsgiContainerException, IllegalPluginStateException
{
try
{
if (getBundle().getState() != Bundle.UNINSTALLED)
{
pluginEventManager.unregister(this);
getBundle().uninstall();
helper.onUninstall();
setPluginState(PluginState.UNINSTALLED);
}
}
catch (final BundleException e)
{
throw new OsgiContainerException("Cannot uninstall bundle " + getBundle().getSymbolicName());
}
}
/**
* Adds a module descriptor XML element for later processing, needed for dynamic module support
*
* @param key The module key
* @param element The module element
*/
void addModuleDescriptorElement(final String key, final Element element)
{
moduleElements.put(key, element);
}
/**
* Exposes {@link #removeModuleDescriptor(String)} for package-protected classes
*
* @param key The module descriptor key
*/
void clearModuleDescriptor(String key)
{
removeModuleDescriptor(key);
}
/**
* Gets the module elements for dynamic module descriptor handling. Doesn't need to return a copy or anything
* immutable because it is only accessed by package-private helper classes
*
* @return The map of module keys to module XML elements
*/
Map<String, Element> getModuleElements()
{
return moduleElements;
}
/**
* Extracts the {@link PackageAdmin} instance from the OSGi container
* @param mgr The OSGi container manager
* @return The package admin instance, should never be null
*/
private PackageAdmin extractPackageAdminFromOsgi(OsgiContainerManager mgr)
{
// Get the system bundle (always bundle 0)
Bundle bundle = mgr.getBundles()[0];
// We assume the package admin will always be available
final ServiceReference ref = bundle.getBundleContext()
.getServiceReference(PackageAdmin.class.getName());
return (PackageAdmin) bundle.getBundleContext()
.getService(ref);
}
public ContainerAccessor getContainerAccessor()
{
return helper.getContainerAccessor();
}
private static class OutstandingDependency
{
private final String beanName;
private final String filter;
public OutstandingDependency(String beanName, String filter)
{
this.beanName = beanName;
this.filter = filter;
}
public String getBeanName()
{
return beanName;
}
public String getFilter()
{
return filter;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
OutstandingDependency that = (OutstandingDependency) o;
if (beanName != null ? !beanName.equals(that.beanName) : that.beanName != null)
{
return false;
}
if (!filter.equals(that.filter))
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = beanName != null ? beanName.hashCode() : 0;
result = 31 * result + filter.hashCode();
return result;
}
}
}
| |
/*
* Copyright 2016 Federico Tomassetti
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.javaparser.symbolsolver.javaparsermodel.declarations;
import com.github.javaparser.JavaParser;
import com.github.javaparser.ParseException;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.symbolsolver.AbstractTest;
import com.github.javaparser.symbolsolver.javaparser.Navigator;
import com.github.javaparser.symbolsolver.model.declarations.AccessLevel;
import com.github.javaparser.symbolsolver.model.declarations.ConstructorDeclaration;
import com.github.javaparser.symbolsolver.model.declarations.FieldDeclaration;
import com.github.javaparser.symbolsolver.model.declarations.MethodDeclaration;
import com.github.javaparser.symbolsolver.model.resolution.SymbolReference;
import com.github.javaparser.symbolsolver.model.resolution.TypeSolver;
import com.github.javaparser.symbolsolver.model.resolution.UnsolvedSymbolException;
import com.github.javaparser.symbolsolver.model.methods.MethodUsage;
import com.github.javaparser.symbolsolver.model.typesystem.PrimitiveType;
import com.github.javaparser.symbolsolver.model.typesystem.ReferenceType;
import com.github.javaparser.symbolsolver.model.typesystem.ReferenceTypeImpl;
import com.github.javaparser.symbolsolver.reflectionmodel.ReflectionFactory;
import com.github.javaparser.symbolsolver.resolution.typesolvers.CombinedTypeSolver;
import com.github.javaparser.symbolsolver.resolution.typesolvers.JavaParserTypeSolver;
import com.github.javaparser.symbolsolver.resolution.typesolvers.ReflectionTypeSolver;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
public class JavaParserClassDeclarationTest extends AbstractTest {
private TypeSolver typeSolver;
private TypeSolver typeSolverNewCode;
private ReferenceType string;
private ReferenceType listOfBoolean;
@Before
public void setup() {
File src = adaptPath(new File("src/test/resources/javaparser_src/proper_source"));
CombinedTypeSolver combinedTypeSolver = new CombinedTypeSolver();
combinedTypeSolver.add(new ReflectionTypeSolver());
combinedTypeSolver.add(new JavaParserTypeSolver(src));
combinedTypeSolver.add(new JavaParserTypeSolver(adaptPath(new File("src/test/resources/javaparser_src/generated"))));
typeSolver = combinedTypeSolver;
File srcNewCode = adaptPath(new File("src/test/resources/javaparser_new_src/javaparser-core"));
CombinedTypeSolver combinedTypeSolverNewCode = new CombinedTypeSolver();
combinedTypeSolverNewCode.add(new ReflectionTypeSolver());
combinedTypeSolverNewCode.add(new JavaParserTypeSolver(srcNewCode));
combinedTypeSolverNewCode.add(new JavaParserTypeSolver(adaptPath(new File("src/test/resources/javaparser_new_src/javaparser-generated-sources"))));
typeSolverNewCode = combinedTypeSolverNewCode;
TypeSolver ts = new ReflectionTypeSolver();
string = new ReferenceTypeImpl(ts.solveType(String.class.getCanonicalName()), ts);
ReferenceType booleanC = new ReferenceTypeImpl(ts.solveType(Boolean.class.getCanonicalName()), ts);
listOfBoolean = new ReferenceTypeImpl(ts.solveType(List.class.getCanonicalName()), ImmutableList.of(booleanC), ts);
}
///
/// Test misc
///
@Test
public void testIsClass() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(true, compilationUnit.isClass());
}
@Test
public void testIsInterface() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(false, compilationUnit.isInterface());
}
@Test
public void testIsEnum() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(false, compilationUnit.isEnum());
}
@Test
public void testIsTypeVariable() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(false, compilationUnit.isTypeParameter());
}
@Test
public void testIsType() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(true, compilationUnit.isType());
}
@Test
public void testAsType() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(compilationUnit, compilationUnit.asType());
}
@Test
public void testAsClass() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(compilationUnit, compilationUnit.asClass());
}
@Test(expected = UnsupportedOperationException.class)
public void testAsInterface() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
compilationUnit.asInterface();
}
@Test(expected = UnsupportedOperationException.class)
public void testAsEnum() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
compilationUnit.asEnum();
}
@Test
public void testGetPackageName() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals("com.github.javaparser.ast", compilationUnit.getPackageName());
}
@Test
public void testGetClassName() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals("CompilationUnit", compilationUnit.getClassName());
}
@Test
public void testGetQualifiedName() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals("com.github.javaparser.ast.CompilationUnit", compilationUnit.getQualifiedName());
}
///
/// Test ancestors
///
@Test
public void testGetSuperclassWithoutTypeParameters() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals("com.github.javaparser.ast.Node", compilationUnit.getSuperClass().getQualifiedName());
}
@Test
public void testGetSuperclassWithTypeParameters() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
assertEquals("com.github.javaparser.ast.body.BodyDeclaration", compilationUnit.getSuperClass().getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", compilationUnit.getSuperClass().typeParametersMap().getValueBySignature("com.github.javaparser.ast.body.BodyDeclaration.T").get().asReferenceType().getQualifiedName());
}
@Test
public void testGetAllSuperclassesWithoutTypeParameters() {
JavaParserClassDeclaration cu = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(ImmutableSet.of("com.github.javaparser.ast.Node", "java.lang.Object"), cu.getAllSuperClasses().stream().map(i -> i.getQualifiedName()).collect(Collectors.toSet()));
}
@Test
public void testGetAllSuperclassesWithTypeParameters() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
assertEquals(3, constructorDeclaration.getAllSuperClasses().size());
assertEquals(true, constructorDeclaration.getAllSuperClasses().stream().anyMatch(s -> s.getQualifiedName().equals("com.github.javaparser.ast.body.BodyDeclaration")));
assertEquals(true, constructorDeclaration.getAllSuperClasses().stream().anyMatch(s -> s.getQualifiedName().equals("com.github.javaparser.ast.Node")));
assertEquals(true, constructorDeclaration.getAllSuperClasses().stream().anyMatch(s -> s.getQualifiedName().equals("java.lang.Object")));
ReferenceType ancestor = null;
ancestor = constructorDeclaration.getAllSuperClasses().get(0);
assertEquals("com.github.javaparser.ast.body.BodyDeclaration", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.body.BodyDeclaration.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllSuperClasses().get(1);
assertEquals("com.github.javaparser.ast.Node", ancestor.getQualifiedName());
ancestor = constructorDeclaration.getAllSuperClasses().get(2);
assertEquals("java.lang.Object", ancestor.getQualifiedName());
}
@Test
public void testGetInterfacesWithoutParameters() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(ImmutableSet.of(), compilationUnit.getInterfaces().stream().map(i -> i.getQualifiedName()).collect(Collectors.toSet()));
JavaParserClassDeclaration coid = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.body.ClassOrInterfaceDeclaration");
assertEquals(ImmutableSet.of("com.github.javaparser.ast.DocumentableNode"), coid.getInterfaces().stream().map(i -> i.getQualifiedName()).collect(Collectors.toSet()));
}
@Test
public void testGetInterfacesWithParameters() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
assertEquals(7, constructorDeclaration.getInterfaces().size());
ReferenceType interfaze = null;
interfaze = constructorDeclaration.getInterfaces().get(0);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getInterfaces().get(1);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithDeclaration", interfaze.getQualifiedName());
interfaze = constructorDeclaration.getInterfaces().get(2);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithName", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithName.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getInterfaces().get(3);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithModifiers", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithModifiers.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getInterfaces().get(4);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithParameters", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithParameters.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getInterfaces().get(5);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getInterfaces().get(6);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt.T").get().asReferenceType().getQualifiedName());
}
@Test
public void testGetAllInterfacesWithoutParameters() {
JavaParserClassDeclaration compilationUnit = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(ImmutableSet.of("java.lang.Cloneable"), compilationUnit.getAllInterfaces().stream().map(i -> i.getQualifiedName()).collect(Collectors.toSet()));
JavaParserClassDeclaration coid = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.body.ClassOrInterfaceDeclaration");
assertEquals(ImmutableSet.of("java.lang.Cloneable", "com.github.javaparser.ast.NamedNode", "com.github.javaparser.ast.body.AnnotableNode", "com.github.javaparser.ast.DocumentableNode"), coid.getAllInterfaces().stream().map(i -> i.getQualifiedName()).collect(Collectors.toSet()));
}
@Test
public void testGetAllInterfacesWithParameters() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
assertEquals(9, constructorDeclaration.getAllInterfaces().size());
ReferenceType interfaze = null;
interfaze = constructorDeclaration.getAllInterfaces().get(0);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(1);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithDeclaration", interfaze.getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(2);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithName", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithName.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(3);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithModifiers", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithModifiers.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(4);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithParameters", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithParameters.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(5);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(6);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt.T").get().asReferenceType().getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(7);
assertEquals("java.lang.Cloneable", interfaze.getQualifiedName());
interfaze = constructorDeclaration.getAllInterfaces().get(8);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithAnnotations", interfaze.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", interfaze.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.T").get().asReferenceType().getQualifiedName());
}
@Test
public void testGetAncestorsWithTypeParameters() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
assertEquals(8, constructorDeclaration.getAncestors().size());
ReferenceType ancestor = null;
ancestor = constructorDeclaration.getAncestors().get(0);
assertEquals("com.github.javaparser.ast.body.BodyDeclaration", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.body.BodyDeclaration.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(1);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(2);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithDeclaration", ancestor.getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(3);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithName", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithName.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(4);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithModifiers", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithModifiers.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(5);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithParameters", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithParameters.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(6);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAncestors().get(7);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt.T").get().asReferenceType().getQualifiedName());
}
@Test
public void testGetAllAncestorsWithoutTypeParameters() {
JavaParserClassDeclaration cu = (JavaParserClassDeclaration) typeSolver.solveType("com.github.javaparser.ast.CompilationUnit");
assertEquals(ImmutableSet.of("java.lang.Cloneable", "com.github.javaparser.ast.Node", "java.lang.Object"), cu.getAllAncestors().stream().map(i -> i.getQualifiedName()).collect(Collectors.toSet()));
}
@Test
public void testGetAllAncestorsWithTypeParameters() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
assertEquals(12, constructorDeclaration.getAllAncestors().size());
ReferenceType ancestor = null;
ancestor = constructorDeclaration.getAllAncestors().get(0);
assertEquals("com.github.javaparser.ast.body.BodyDeclaration", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.body.BodyDeclaration.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(1);
assertEquals("com.github.javaparser.ast.Node", ancestor.getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(2);
assertEquals("java.lang.Cloneable", ancestor.getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(3);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithAnnotations", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(4);
assertEquals("java.lang.Object", ancestor.getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(5);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(6);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithDeclaration", ancestor.getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(7);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithName", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithName.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(8);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithModifiers", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithModifiers.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(9);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithParameters", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithParameters.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(10);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.T").get().asReferenceType().getQualifiedName());
ancestor = constructorDeclaration.getAllAncestors().get(11);
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt", ancestor.getQualifiedName());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration", ancestor.typeParametersMap().getValueBySignature("com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt.T").get().asReferenceType().getQualifiedName());
}
///
/// Test fields
///
@Test
public void testGetFieldForExistingField() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
FieldDeclaration fieldDeclaration = null;
// declared field
fieldDeclaration = constructorDeclaration.getField("modifiers");
assertEquals("modifiers", fieldDeclaration.getName());
assertEquals("java.util.EnumSet", fieldDeclaration.getType().asReferenceType().getQualifiedName());
assertEquals(AccessLevel.PRIVATE, fieldDeclaration.accessLevel());
assertEquals(false, fieldDeclaration.isStatic());
// inherited field
fieldDeclaration = constructorDeclaration.getField("annotations");
assertEquals("annotations", fieldDeclaration.getName());
assertEquals("java.util.List", fieldDeclaration.getType().asReferenceType().getQualifiedName());
assertEquals(AccessLevel.PRIVATE, fieldDeclaration.accessLevel());
}
@Test(expected = UnsolvedSymbolException.class)
public void testGetFieldForUnexistingField() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
constructorDeclaration.getField("unexisting");
}
@Test
public void testGetAllFields() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
List<FieldDeclaration> allFields = constructorDeclaration.getAllFields();
assertEquals(16, allFields.size());
FieldDeclaration fieldDeclaration = null;
fieldDeclaration = allFields.get(0);
assertEquals("modifiers", fieldDeclaration.getName());
fieldDeclaration = allFields.get(1);
assertEquals("typeParameters", fieldDeclaration.getName());
fieldDeclaration = allFields.get(2);
assertEquals("name", fieldDeclaration.getName());
fieldDeclaration = allFields.get(3);
assertEquals("parameters", fieldDeclaration.getName());
fieldDeclaration = allFields.get(4);
assertEquals("throws_", fieldDeclaration.getName());
fieldDeclaration = allFields.get(5);
assertEquals("body", fieldDeclaration.getName());
fieldDeclaration = allFields.get(6);
assertEquals("annotations", fieldDeclaration.getName());
fieldDeclaration = allFields.get(7);
assertEquals("NODE_BY_BEGIN_POSITION", fieldDeclaration.getName());
fieldDeclaration = allFields.get(8);
assertEquals("range", fieldDeclaration.getName());
fieldDeclaration = allFields.get(9);
assertEquals("parentNode", fieldDeclaration.getName());
fieldDeclaration = allFields.get(10);
assertEquals("childrenNodes", fieldDeclaration.getName());
fieldDeclaration = allFields.get(11);
assertEquals("orphanComments", fieldDeclaration.getName());
fieldDeclaration = allFields.get(12);
assertEquals("userData", fieldDeclaration.getName());
fieldDeclaration = allFields.get(13);
assertEquals("comment", fieldDeclaration.getName());
fieldDeclaration = allFields.get(14);
assertEquals("ABSOLUTE_BEGIN_LINE", fieldDeclaration.getName());
fieldDeclaration = allFields.get(15);
assertEquals("ABSOLUTE_END_LINE", fieldDeclaration.getName());
}
@Test
public void testGetAllGenericFields() throws IOException, ParseException {
TypeSolver typeSolver = new ReflectionTypeSolver();
CompilationUnit cu = JavaParser.parse(adaptPath(new File("src/test/resources/GenericFields.java.txt")));
JavaParserClassDeclaration classDeclaration = new JavaParserClassDeclaration(Navigator.demandClass(cu, "CB"), typeSolver);
assertEquals(3, classDeclaration.getAllFields().size());
ReferenceTypeImpl rtClassDeclaration = new ReferenceTypeImpl(classDeclaration, typeSolver);
assertEquals("s", classDeclaration.getAllFields().get(0).getName());
assertEquals(string, classDeclaration.getAllFields().get(0).getType());
assertEquals(string, rtClassDeclaration.getFieldType("s").get());
assertEquals("t", classDeclaration.getAllFields().get(1).getName());
assertEquals("T", classDeclaration.getAllFields().get(1).getType().describe());
assertEquals(listOfBoolean, rtClassDeclaration.getFieldType("t").get());
assertEquals("i", classDeclaration.getAllFields().get(2).getName());
assertEquals(PrimitiveType.INT, classDeclaration.getAllFields().get(2).getType());
assertEquals(PrimitiveType.INT, rtClassDeclaration.getFieldType("i").get());
}
@Test
public void testGetAllStaticFields() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
List<FieldDeclaration> allFields = constructorDeclaration.getAllStaticFields();
assertEquals(3, allFields.size());
FieldDeclaration fieldDeclaration = null;
fieldDeclaration = allFields.get(0);
assertEquals("NODE_BY_BEGIN_POSITION", fieldDeclaration.getName());
fieldDeclaration = allFields.get(1);
assertEquals("ABSOLUTE_BEGIN_LINE", fieldDeclaration.getName());
fieldDeclaration = allFields.get(2);
assertEquals("ABSOLUTE_END_LINE", fieldDeclaration.getName());
}
@Test
public void testGetAllNonStaticFields() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
List<FieldDeclaration> allFields = constructorDeclaration.getAllNonStaticFields();
assertEquals(13, allFields.size());
FieldDeclaration fieldDeclaration = null;
fieldDeclaration = allFields.get(0);
assertEquals("modifiers", fieldDeclaration.getName());
fieldDeclaration = allFields.get(1);
assertEquals("typeParameters", fieldDeclaration.getName());
fieldDeclaration = allFields.get(2);
assertEquals("name", fieldDeclaration.getName());
fieldDeclaration = allFields.get(3);
assertEquals("parameters", fieldDeclaration.getName());
fieldDeclaration = allFields.get(4);
assertEquals("throws_", fieldDeclaration.getName());
fieldDeclaration = allFields.get(5);
assertEquals("body", fieldDeclaration.getName());
fieldDeclaration = allFields.get(6);
assertEquals("annotations", fieldDeclaration.getName());
fieldDeclaration = allFields.get(7);
assertEquals("range", fieldDeclaration.getName());
fieldDeclaration = allFields.get(8);
assertEquals("parentNode", fieldDeclaration.getName());
fieldDeclaration = allFields.get(9);
assertEquals("childrenNodes", fieldDeclaration.getName());
fieldDeclaration = allFields.get(10);
assertEquals("orphanComments", fieldDeclaration.getName());
fieldDeclaration = allFields.get(11);
assertEquals("userData", fieldDeclaration.getName());
fieldDeclaration = allFields.get(12);
assertEquals("comment", fieldDeclaration.getName());
}
@Test
public void testGetDeclaredFields() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
List<FieldDeclaration> allFields = constructorDeclaration.getDeclaredFields();
assertEquals(6, allFields.size());
FieldDeclaration fieldDeclaration = null;
fieldDeclaration = allFields.get(0);
assertEquals("modifiers", fieldDeclaration.getName());
fieldDeclaration = allFields.get(1);
assertEquals("typeParameters", fieldDeclaration.getName());
fieldDeclaration = allFields.get(2);
assertEquals("name", fieldDeclaration.getName());
fieldDeclaration = allFields.get(3);
assertEquals("parameters", fieldDeclaration.getName());
fieldDeclaration = allFields.get(4);
assertEquals("throws_", fieldDeclaration.getName());
fieldDeclaration = allFields.get(5);
assertEquals("body", fieldDeclaration.getName());
}
///
/// Test methods
///
@Test
public void testGetDeclaredMethods() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
Set<MethodDeclaration> allMethods = constructorDeclaration.getDeclaredMethods();
assertEquals(20, allMethods.size());
List<MethodDeclaration> sortedMethods = allMethods.stream()
.sorted((o1, o2) -> o1.getQualifiedSignature().compareTo(o2.getQualifiedSignature()))
.collect(Collectors.toList());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.accept(com.github.javaparser.ast.visitor.GenericVisitor<R, A>, A)", sortedMethods.get(0).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.accept(com.github.javaparser.ast.visitor.VoidVisitor<A>, A)", sortedMethods.get(1).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getBody()", sortedMethods.get(2).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getDeclarationAsString()", sortedMethods.get(3).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getDeclarationAsString(boolean, boolean)", sortedMethods.get(4).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getDeclarationAsString(boolean, boolean, boolean)", sortedMethods.get(5).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getJavaDoc()", sortedMethods.get(6).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getModifiers()", sortedMethods.get(7).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getName()", sortedMethods.get(8).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getNameExpr()", sortedMethods.get(9).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getParameters()", sortedMethods.get(10).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getThrows()", sortedMethods.get(11).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.getTypeParameters()", sortedMethods.get(12).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setBody(com.github.javaparser.ast.stmt.BlockStmt)", sortedMethods.get(13).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setModifiers(java.util.EnumSet<com.github.javaparser.ast.Modifier>)", sortedMethods.get(14).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setName(java.lang.String)", sortedMethods.get(15).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setNameExpr(com.github.javaparser.ast.expr.NameExpr)", sortedMethods.get(16).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setParameters(java.util.List<com.github.javaparser.ast.body.Parameter>)", sortedMethods.get(17).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setThrows(java.util.List<com.github.javaparser.ast.type.ReferenceType>)", sortedMethods.get(18).getQualifiedSignature());
assertEquals("com.github.javaparser.ast.body.ConstructorDeclaration.setTypeParameters(java.util.List<com.github.javaparser.ast.type.TypeParameter>)", sortedMethods.get(19).getQualifiedSignature());
}
@Test
public void testGetAllMethods() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
Set<MethodUsage> allMethods = constructorDeclaration.getAllMethods();
List<MethodUsage> sortedMethods = allMethods.stream()
.sorted((o1, o2) -> o1.getQualifiedSignature().compareTo(o2.getQualifiedSignature()))
.collect(Collectors.toList());
List<String> signatures = sortedMethods.stream().map(m -> m.getQualifiedSignature()).collect(Collectors.toList());
assertEquals(ImmutableList.of("com.github.javaparser.ast.Node.addOrphanComment(com.github.javaparser.ast.comments.Comment)",
"com.github.javaparser.ast.Node.clone()",
"com.github.javaparser.ast.Node.contains(com.github.javaparser.ast.Node)",
"com.github.javaparser.ast.Node.equals(java.lang.Object)",
"com.github.javaparser.ast.Node.getAllContainedComments()",
"com.github.javaparser.ast.Node.getBegin()",
"com.github.javaparser.ast.Node.getChildrenNodes()",
"com.github.javaparser.ast.Node.getComment()",
"com.github.javaparser.ast.Node.getEnd()",
"com.github.javaparser.ast.Node.getNodesByType(java.lang.Class<N>)",
"com.github.javaparser.ast.Node.getOrphanComments()",
"com.github.javaparser.ast.Node.getParentNode()",
"com.github.javaparser.ast.Node.getParentNodeOfType(java.lang.Class<T>)",
"com.github.javaparser.ast.Node.getRange()",
"com.github.javaparser.ast.Node.getUserData(com.github.javaparser.ast.UserDataKey<M>)",
"com.github.javaparser.ast.Node.hasComment()",
"com.github.javaparser.ast.Node.hashCode()",
"com.github.javaparser.ast.Node.isPositionedAfter(com.github.javaparser.Position)",
"com.github.javaparser.ast.Node.isPositionedBefore(com.github.javaparser.Position)",
"com.github.javaparser.ast.Node.setAsParentNodeOf(com.github.javaparser.ast.Node)",
"com.github.javaparser.ast.Node.setAsParentNodeOf(java.util.List<? extends com.github.javaparser.ast.Node>)",
"com.github.javaparser.ast.Node.setBegin(com.github.javaparser.Position)",
"com.github.javaparser.ast.Node.setBlockComment(java.lang.String)",
"com.github.javaparser.ast.Node.setComment(com.github.javaparser.ast.comments.Comment)",
"com.github.javaparser.ast.Node.setEnd(com.github.javaparser.Position)",
"com.github.javaparser.ast.Node.setLineComment(java.lang.String)",
"com.github.javaparser.ast.Node.setParentNode(com.github.javaparser.ast.Node)",
"com.github.javaparser.ast.Node.setRange(com.github.javaparser.Range)",
"com.github.javaparser.ast.Node.setUserData(com.github.javaparser.ast.UserDataKey<M>, M)",
"com.github.javaparser.ast.Node.toString()",
"com.github.javaparser.ast.Node.toStringWithoutComments()",
"com.github.javaparser.ast.Node.tryAddImportToParentCompilationUnit(java.lang.Class<?>)",
"com.github.javaparser.ast.body.BodyDeclaration.getAnnotations()",
"com.github.javaparser.ast.body.BodyDeclaration.setAnnotations(java.util.List<com.github.javaparser.ast.expr.AnnotationExpr>)",
"com.github.javaparser.ast.body.ConstructorDeclaration.accept(com.github.javaparser.ast.visitor.GenericVisitor<R, A>, A)",
"com.github.javaparser.ast.body.ConstructorDeclaration.accept(com.github.javaparser.ast.visitor.VoidVisitor<A>, A)",
"com.github.javaparser.ast.body.ConstructorDeclaration.getBody()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getDeclarationAsString()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getDeclarationAsString(boolean, boolean)",
"com.github.javaparser.ast.body.ConstructorDeclaration.getDeclarationAsString(boolean, boolean, boolean)",
"com.github.javaparser.ast.body.ConstructorDeclaration.getJavaDoc()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getModifiers()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getName()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getNameExpr()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getParameters()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getThrows()",
"com.github.javaparser.ast.body.ConstructorDeclaration.getTypeParameters()",
"com.github.javaparser.ast.body.ConstructorDeclaration.setBody(com.github.javaparser.ast.stmt.BlockStmt)",
"com.github.javaparser.ast.body.ConstructorDeclaration.setModifiers(java.util.EnumSet<com.github.javaparser.ast.Modifier>)",
"com.github.javaparser.ast.body.ConstructorDeclaration.setName(java.lang.String)",
"com.github.javaparser.ast.body.ConstructorDeclaration.setNameExpr(com.github.javaparser.ast.expr.NameExpr)",
"com.github.javaparser.ast.body.ConstructorDeclaration.setParameters(java.util.List<com.github.javaparser.ast.body.Parameter>)",
"com.github.javaparser.ast.body.ConstructorDeclaration.setThrows(java.util.List<com.github.javaparser.ast.type.ReferenceType>)",
"com.github.javaparser.ast.body.ConstructorDeclaration.setTypeParameters(java.util.List<com.github.javaparser.ast.type.TypeParameter>)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.addAnnotation(java.lang.Class<? extends java.lang.annotation.Annotation>)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.addAnnotation(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.addMarkerAnnotation(java.lang.Class<? extends java.lang.annotation.Annotation>)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.addMarkerAnnotation(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.addSingleMemberAnnotation(java.lang.Class<? extends java.lang.annotation.Annotation>, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.addSingleMemberAnnotation(java.lang.String, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.getAnnotationByClass(java.lang.Class<? extends java.lang.annotation.Annotation>)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.getAnnotationByName(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.isAnnotationPresent(java.lang.Class<? extends java.lang.annotation.Annotation>)",
"com.github.javaparser.ast.nodeTypes.NodeWithAnnotations.isAnnotationPresent(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithBlockStmt.createBody()",
"com.github.javaparser.ast.nodeTypes.NodeWithJavaDoc.setJavaDocComment(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.addModifier(com.github.javaparser.ast.Modifier...)",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isAbstract()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isFinal()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isNative()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isPrivate()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isProtected()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isPublic()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isStatic()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isStrictfp()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isSynchronized()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isTransient()",
"com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isVolatile()",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addAndGetParameter(com.github.javaparser.ast.body.Parameter)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addAndGetParameter(com.github.javaparser.ast.type.Type, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addAndGetParameter(java.lang.Class<?>, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addAndGetParameter(java.lang.String, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addParameter(com.github.javaparser.ast.body.Parameter)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addParameter(com.github.javaparser.ast.type.Type, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addParameter(java.lang.Class<?>, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.addParameter(java.lang.String, java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.getParamByName(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.getParamByType(java.lang.Class<?>)",
"com.github.javaparser.ast.nodeTypes.NodeWithParameters.getParamByType(java.lang.String)",
"com.github.javaparser.ast.nodeTypes.NodeWithThrowable.addThrows(com.github.javaparser.ast.type.ReferenceType)",
"com.github.javaparser.ast.nodeTypes.NodeWithThrowable.addThrows(java.lang.Class<? extends java.lang.Throwable>)",
"com.github.javaparser.ast.nodeTypes.NodeWithThrowable.isThrows(java.lang.Class<? extends java.lang.Throwable>)",
"com.github.javaparser.ast.nodeTypes.NodeWithThrowable.isThrows(java.lang.String)",
"java.lang.Object.finalize()",
"java.lang.Object.getClass()",
"java.lang.Object.notify()",
"java.lang.Object.notifyAll()",
"java.lang.Object.registerNatives()",
"java.lang.Object.wait()",
"java.lang.Object.wait(long)",
"java.lang.Object.wait(long, int)"), signatures);
}
///
/// Test constructors
///
@Test
public void testGetConstructors() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
List<ConstructorDeclaration> constructors = constructorDeclaration.getConstructors();
assertEquals(4, constructors.size());
assertEquals("ConstructorDeclaration()", constructors.get(0).getSignature());
assertEquals("ConstructorDeclaration(java.util.EnumSet<com.github.javaparser.ast.Modifier>, java.lang.String)", constructors.get(1).getSignature());
assertEquals("ConstructorDeclaration(java.util.EnumSet<com.github.javaparser.ast.Modifier>, java.util.List<com.github.javaparser.ast.expr.AnnotationExpr>, java.util.List<com.github.javaparser.ast.type.TypeParameter>, java.lang.String, java.util.List<com.github.javaparser.ast.body.Parameter>, java.util.List<com.github.javaparser.ast.type.ReferenceType>, com.github.javaparser.ast.stmt.BlockStmt)", constructors.get(2).getSignature());
assertEquals("ConstructorDeclaration(com.github.javaparser.Range, java.util.EnumSet<com.github.javaparser.ast.Modifier>, java.util.List<com.github.javaparser.ast.expr.AnnotationExpr>, java.util.List<com.github.javaparser.ast.type.TypeParameter>, java.lang.String, java.util.List<com.github.javaparser.ast.body.Parameter>, java.util.List<com.github.javaparser.ast.type.ReferenceType>, com.github.javaparser.ast.stmt.BlockStmt)", constructors.get(3).getSignature());
}
///
/// Resolution
///
//SymbolReference<MethodDeclaration> solveMethod(String name, List<Type> parameterTypes);
@Test
public void testSolveMethodExisting() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
SymbolReference<MethodDeclaration> res = null;
res = constructorDeclaration.solveMethod("isStatic", ImmutableList.of());
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithModifiers.isStatic()", res.getCorrespondingDeclaration().getQualifiedSignature());
res = constructorDeclaration.solveMethod("isThrows", ImmutableList.of(ReflectionFactory.typeUsageFor(RuntimeException.class.getClass(), typeSolverNewCode)));
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.isThrows(java.lang.Class<? extends java.lang.Throwable>)", res.getCorrespondingDeclaration().getQualifiedSignature());
res = constructorDeclaration.solveMethod("isThrows", ImmutableList.of(ReflectionFactory.typeUsageFor(String.class, typeSolverNewCode)));
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.isThrows(java.lang.String)", res.getCorrespondingDeclaration().getQualifiedSignature());
// This is solved because it is raw
res = constructorDeclaration.solveMethod("isThrows", ImmutableList.of(ReflectionFactory.typeUsageFor(Class.class, typeSolverNewCode)));
assertEquals("com.github.javaparser.ast.nodeTypes.NodeWithThrowable.isThrows(java.lang.Class<? extends java.lang.Throwable>)", res.getCorrespondingDeclaration().getQualifiedSignature());
}
@Test
public void testSolveMethodNotExisting() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
SymbolReference<MethodDeclaration> res = null;
res = constructorDeclaration.solveMethod("unexistingMethod", ImmutableList.of());
assertEquals(false, res.isSolved());
res = constructorDeclaration.solveMethod("isStatic", ImmutableList.of(PrimitiveType.BOOLEAN));
assertEquals(false, res.isSolved());
}
@Test
public void testSolveMethodNotExistingBecauseOfTypeParameters() {
JavaParserClassDeclaration constructorDeclaration = (JavaParserClassDeclaration) typeSolverNewCode.solveType("com.github.javaparser.ast.body.ConstructorDeclaration");
SymbolReference<MethodDeclaration> res = null;
ReferenceType stringType = (ReferenceType) ReflectionFactory.typeUsageFor(String.class, typeSolverNewCode);
ReferenceType rawClassType = (ReferenceType) ReflectionFactory.typeUsageFor(Class.class, typeSolverNewCode);
ReferenceType classOfStringType = (ReferenceType) rawClassType.replaceTypeVariables(rawClassType.getTypeDeclaration().getTypeParameters().get(0), stringType);
res = constructorDeclaration.solveMethod("isThrows", ImmutableList.of(classOfStringType));
assertEquals(false, res.isSolved());
}
///
/// Assignability
///
// boolean isAssignableBy(Type type);
// boolean canBeAssignedTo(TypeDeclaration other)
// boolean isAssignableBy(TypeDeclaration other);
///
/// Annotations
///
// hasDirectlyAnnotation
@Test
public void testHasDirectlyAnnotation() throws IOException, ParseException {
TypeSolver typeSolver = new ReflectionTypeSolver();
CompilationUnit cu = JavaParser.parse(adaptPath(new File("src/test/resources/Annotations.java.txt")));
JavaParserClassDeclaration ca = new JavaParserClassDeclaration(Navigator.demandClass(cu, "CA"), typeSolver);
assertEquals(true, ca.hasDirectlyAnnotation("foo.bar.MyAnnotation"));
assertEquals(false, ca.hasDirectlyAnnotation("foo.bar.MyAnnotation2"));
assertEquals(false, ca.hasDirectlyAnnotation("MyAnnotation"));
assertEquals(false, ca.hasDirectlyAnnotation("foo.bar.MyUnexistingAnnotation"));
JavaParserClassDeclaration cb = new JavaParserClassDeclaration(Navigator.demandClass(cu, "CB"), typeSolver);
assertEquals(false, cb.hasDirectlyAnnotation("foo.bar.MyAnnotation"));
assertEquals(true, cb.hasDirectlyAnnotation("foo.bar.MyAnnotation2"));
assertEquals(false, cb.hasDirectlyAnnotation("MyAnnotation"));
assertEquals(false, cb.hasDirectlyAnnotation("foo.bar.MyUnexistingAnnotation"));
}
// hasAnnotation
@Test
public void testHasAnnotation() throws IOException, ParseException {
TypeSolver typeSolver = new ReflectionTypeSolver();
CompilationUnit cu = JavaParser.parse(adaptPath(new File("src/test/resources/Annotations.java.txt")));
JavaParserClassDeclaration ca = new JavaParserClassDeclaration(Navigator.demandClass(cu, "CA"), typeSolver);
assertEquals(true, ca.hasAnnotation("foo.bar.MyAnnotation"));
assertEquals(false, ca.hasAnnotation("foo.bar.MyAnnotation2"));
assertEquals(false, ca.hasAnnotation("MyAnnotation"));
assertEquals(false, ca.hasAnnotation("foo.bar.MyUnexistingAnnotation"));
JavaParserClassDeclaration cb = new JavaParserClassDeclaration(Navigator.demandClass(cu, "CB"), typeSolver);
assertEquals(true, cb.hasAnnotation("foo.bar.MyAnnotation"));
assertEquals(true, cb.hasAnnotation("foo.bar.MyAnnotation2"));
assertEquals(false, cb.hasAnnotation("MyAnnotation"));
assertEquals(false, cb.hasAnnotation("foo.bar.MyUnexistingAnnotation"));
}
///
///
///
// List<TypeParameterDeclaration> getTypeParameters();
// AccessLevel accessLevel();
///
/// Containment
///
// Set<TypeDeclaration> internalTypes()
// Optional<TypeDeclaration> containerType()
}
| |
package org.ebayopensource.turmeric.tools.codegen;
import java.io.File;
import java.util.Properties;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
public class CommonTypenNSTMTest extends AbstractServiceGeneratorTestCase{
@Rule public TestName name = new TestName();
File destDir = null;
File binDir = null;
final String INTF_PROPERTIES = "service_intf_project.properties";
final String IMPL_PROPERTIES = "service_impl_project.properties";
ServiceGenerator gen = null;
Properties intfProps = new Properties();
File intfProperty = null;
@Before
public void init() throws Exception{
testingdir.ensureEmpty();
destDir = testingdir.getDir();
binDir = testingdir.getFile("bin");
intfProperty = createPropertyFile(destDir.getAbsolutePath(), INTF_PROPERTIES);
//enter values to property file
intfProps.put("sipp_version","1.1");
intfProps.put("service_interface_class_name","org.ebayopensource.test.soaframework.tools.codegen.AdcommerceConfigGroupMarketV2");
intfProps.put("service_layer","COMMON");
intfProps.put("original_wsdl_uri","Vanilla-Codegen\\ServiceInputFiles\\AdcommerceConfigGroupMarketV2.wsdl");
intfProps.put("service_version","1.0.0");
intfProps.put("admin_name","AdcommerceConfigGroupMarketV2");
intfProps.put("service_namespace_part","billing");
intfProps.put("domainName","Billing");
intfProps.put("enabledNamespaceFolding","true");
fillProperties(intfProps, intfProperty);
}
/*
* Single namespace wsdl with namespace folding = true. The Error type to have wsdl namespace.
*/
@Test
public void testTypeMappingCommonNamespace() throws Exception{
File path = getCodegenQEDataFileInput("AdcommerceConfigGroupMarketV2.wsdl");
String [] testArgs1 = {"-serviceName","ConfigGroupMarket",
"-genType","ServiceFromWSDLIntf",
"-wsdl",path.getAbsolutePath(),
"-gip","org.ebayopensource.test.soaframework.tools.codegen",
"-dest",destDir.getAbsolutePath(),
"-src",destDir.getAbsolutePath(),
"-slayer","INTERMEDIATE",
"-scv","1.0.0",
"-bin",binDir.getAbsolutePath(),
"-pr",destDir.getAbsolutePath()};
performDirectCodeGen(testArgs1,binDir);
File typeMappingFile = new File(getTypeMappingsFile(destDir.getAbsolutePath(),"AdcommerceConfigGroupMarketV2"));
Document typeDoc = XmlUtility.getXmlDoc(typeMappingFile.getAbsolutePath());
NodeList nodeList = typeDoc.getElementsByTagName("xml-element-name");
for(int i =0; i < nodeList.getLength();i++){
Assert.assertTrue(nodeList.item(i).getFirstChild().getNodeValue().contains("{http://www.ebayopensource.com/marketplace/services}"));
}
}
/*
* Single namespace wsdl with namespace folding = false. The Error type to have wsdl namespace.
*/
@Test
public void testTypeMappingNSFoldingFalse() throws Exception{
intfProps.put("enabledNamespaceFolding","false");
fillProperties(intfProps, intfProperty);
File path = getCodegenQEDataFileInput("AdcommerceConfigGroupMarketV2.wsdl");
String [] testArgs1 = {"-serviceName","ConfigGroupMarket",
"-genType","ServiceFromWSDLIntf",
"-wsdl",path.getAbsolutePath(),
"-gip","org.ebayopensource.test.soaframework.tools.codegen",
"-dest",destDir.getAbsolutePath(),
"-src",destDir.getAbsolutePath(),
"-slayer","INTERMEDIATE",
"-bin",binDir.getAbsolutePath(),
"-scv","1.0.0",
"-pr",destDir.getAbsolutePath()};
performDirectCodeGen(testArgs1,binDir);
File typeMappingFile = new File(getTypeMappingsFile(destDir.getAbsolutePath(),"AdcommerceConfigGroupMarketV2"));
Document typeDoc = XmlUtility.getXmlDoc(typeMappingFile.getAbsolutePath());
NodeList nodeList = typeDoc.getElementsByTagName("error-message");
for(int i =0; i < nodeList.getLength();i++){
NodeList childNodes = nodeList.item(i).getChildNodes();
for(int j= 0;j < childNodes.getLength();j++) {
if(childNodes.item(j).getNodeName().equals("xml-element-name")){
Assert.assertTrue(childNodes.item(j).getFirstChild().getNodeValue().contains("{http://www.ebayopensource.org/turmeric/common/v1/types}"));
}
}
}
}
/*
* multi namespace wsdl with namespace folding = false. The Error type to have wsdl namespace.
*/
@Test
public void testTypeMappingNSFoldingFalseForMnsWSDL() throws Exception{
intfProps.put("enabledNamespaceFolding","false");
fillProperties(intfProps, intfProperty);
File path = getCodegenQEDataFileInput("AccountService.wsdl");
String [] testArgs1 = {"-serviceName","AccountService",
"-genType","ServiceFromWSDLIntf",
"-wsdl",path.getAbsolutePath(),
"-gip","org.ebayopensource.test.soaframework.tools.codegen",
"-dest",destDir.getAbsolutePath(),
"-src",destDir.getAbsolutePath(),
"-slayer","INTERMEDIATE",
"-scv","1.0.0",
"-bin",binDir.getAbsolutePath(),
"-pr",destDir.getAbsolutePath()};
performDirectCodeGen(testArgs1,binDir);
File typeMappingFile = new File(getTypeMappingsFile(destDir.getAbsolutePath(),"AdcommerceConfigGroupMarketV2"));
Document typeDoc = XmlUtility.getXmlDoc(typeMappingFile.getAbsolutePath());
NodeList nodeList = typeDoc.getElementsByTagName("error-message");
for(int i =0; i < nodeList.getLength();i++){
NodeList childNodes = nodeList.item(i).getChildNodes();
for(int j= 0;j < childNodes.getLength();j++) {
if(childNodes.item(j).getNodeName().equals("xml-element-name")){
Assert.assertTrue(childNodes.item(j).getFirstChild().getNodeValue().contains("{http://www.ebayopensource.org/turmeric/common/v1/types}"));
}
}
}
}
@Test
public void testMnsWsdlWithFault() throws Exception{
intfProps.put("enabledNamespaceFolding","false");
fillProperties(intfProps, intfProperty);
File path = getCodegenQEDataFileInput("AccountService1.wsdl");
String [] testArgs1 = {"-serviceName","NewService",
"-genType","ServiceFromWSDLIntf",
"-wsdl",path.getAbsolutePath(),
"-gip","org.ebayopensource.test.soaframework.tools.codegen",
"-dest",destDir.getAbsolutePath(),
"-src",destDir.getAbsolutePath(),
"-slayer","INTERMEDIATE",
"-scv","1.0.0",
"-bin",binDir.getAbsolutePath(),
"-pr",destDir.getAbsolutePath()};
performDirectCodeGen(testArgs1,binDir);
}
@Test
public void testSingleNSWsdlWithFault() throws Exception{
intfProps.put("enabledNamespaceFolding","true");
fillProperties(intfProps, intfProperty);
File path = getCodegenQEDataFileInput("BlogsServiceV1.wsdl");
String [] testArgs1 = {"-serviceName","NewService",
"-genType","ServiceFromWSDLIntf",
"-wsdl",path.getAbsolutePath(),
"-gip","org.ebayopensource.test.soaframework.tools.codegen",
"-dest",destDir.getAbsolutePath(),
"-src",destDir.getAbsolutePath(),
"-slayer","INTERMEDIATE",
"-bin",binDir.getAbsolutePath(),
"-scv","1.0.0",
"-pr",destDir.getAbsolutePath()};
performDirectCodeGen(testArgs1,binDir);
}
public String getTypeMappingsFile(String destDir,String serviceName){
return destDir + File.separator +"gen-meta-src/META-INF/soa/common/config/"+serviceName+"/TypeMappings.xml";
}
}
| |
/* Copyright (c) 2001-2011, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.types;
import org.hsqldb.OpTypes;
import org.hsqldb.Session;
import org.hsqldb.SessionInterface;
import org.hsqldb.Tokens;
import org.hsqldb.error.Error;
import org.hsqldb.error.ErrorCode;
import org.hsqldb.store.BitMap;
/**
* Type implementation for BOOLEAN.<p>
*
* @author Fred Toussi (fredt@users dot sourceforge.net)
* @version 2.2.1
* @since 1.9.0
*/
public final class BooleanType extends Type {
static final BooleanType booleanType = new BooleanType();
private BooleanType() {
super(Types.SQL_BOOLEAN, Types.SQL_BOOLEAN, 0, 0);
}
public int displaySize() {
return 5;
}
public int getJDBCTypeCode() {
return Types.BOOLEAN;
}
public Class getJDBCClass() {
return Boolean.class;
}
public String getJDBCClassName() {
return "java.lang.Boolean";
}
public String getNameString() {
return Tokens.T_BOOLEAN;
}
public String getDefinition() {
return Tokens.T_BOOLEAN;
}
public boolean isBooleanType() {
return true;
}
public Type getAggregateType(Type other) {
if (other == null) {
return this;
}
if (other == SQL_ALL_TYPES) {
return this;
}
if (typeCode == other.typeCode) {
return this;
}
if (other.isCharacterType()) {
return other.getAggregateType(this);
}
throw Error.error(ErrorCode.X_42562);
}
public Type getCombinedType(Session session, Type other, int operation) {
switch (operation) {
case OpTypes.EQUAL :
if (other.isBooleanType()) {
return this;
}
}
throw Error.error(ErrorCode.X_42562);
}
public int compare(Session session, Object a, Object b) {
if (a == b) {
return 0;
}
if (a == null) {
return -1;
}
if (b == null) {
return 1;
}
boolean boola = ((Boolean) a).booleanValue();
boolean boolb = ((Boolean) b).booleanValue();
return (boola == boolb) ? 0
: (boolb ? -1
: 1);
}
public Object convertToTypeLimits(SessionInterface session, Object a) {
return a;
}
public Object convertToType(SessionInterface session, Object a,
Type otherType) {
if (a == null) {
return a;
}
switch (otherType.typeCode) {
case Types.SQL_BOOLEAN :
return a;
case Types.SQL_BIT :
case Types.SQL_BIT_VARYING : {
BinaryData b = (BinaryData) a;
if (b.bitLength(session) == 1) {
return BitMap.isSet(b.getBytes(), 0) ? Boolean.TRUE
: Boolean.FALSE;
}
break;
}
case Types.SQL_CLOB :
a = Type.SQL_VARCHAR.convertToType(session, a, otherType);
// fall through
case Types.SQL_CHAR :
case Types.SQL_VARCHAR :
case Types.VARCHAR_IGNORECASE : {
a = ((CharacterType) otherType).trim(session, a, (int) ' ',
true, true);
if (((String) a).equalsIgnoreCase(Tokens.T_TRUE)) {
return Boolean.TRUE;
} else if (((String) a).equalsIgnoreCase(Tokens.T_FALSE)) {
return Boolean.FALSE;
} else if (((String) a).equalsIgnoreCase(Tokens.T_UNKNOWN)) {
return null;
}
break;
}
case Types.SQL_NUMERIC :
case Types.SQL_DECIMAL :
return NumberType.isZero(a) ? Boolean.FALSE
: Boolean.TRUE;
case Types.TINYINT :
case Types.SQL_SMALLINT :
case Types.SQL_INTEGER :
case Types.SQL_BIGINT : {
if (((Number) a).longValue() == 0) {
return Boolean.FALSE;
} else {
return Boolean.TRUE;
}
}
}
throw Error.error(ErrorCode.X_22018);
}
/**
* ResultSet getBoolean support
*/
public Object convertToTypeJDBC(SessionInterface session, Object a,
Type otherType) {
if (a == null) {
return a;
}
switch (otherType.typeCode) {
case Types.SQL_BOOLEAN :
return a;
default :
if (otherType.isLobType()) {
throw Error.error(ErrorCode.X_42561);
}
if (otherType.isCharacterType()) {
if ("0".equals(a)) {
return Boolean.FALSE;
} else if ("1".equals(a)) {
return Boolean.TRUE;
}
}
return convertToType(session, a, otherType);
}
}
public Object convertToDefaultType(SessionInterface session, Object a) {
if (a == null) {
return null;
}
if (a instanceof Boolean) {
return a;
} else if (a instanceof String) {
return convertToType(session, a, Type.SQL_VARCHAR);
} else if (a instanceof Number) {
return NumberType.isZero(a) ? Boolean.FALSE
: Boolean.TRUE;
}
throw Error.error(ErrorCode.X_42561);
}
public Object convertJavaToSQL(SessionInterface session, Object a) {
return convertToDefaultType(session, a);
}
public String convertToString(Object a) {
if (a == null) {
return null;
}
return ((Boolean) a).booleanValue() ? Tokens.T_TRUE
: Tokens.T_FALSE;
}
public String convertToSQLString(Object a) {
if (a == null) {
return Tokens.T_UNKNOWN;
}
return ((Boolean) a).booleanValue() ? Tokens.T_TRUE
: Tokens.T_FALSE;
}
public boolean canConvertFrom(Type otherType) {
return otherType.typeCode == Types.SQL_ALL_TYPES
|| otherType.isBooleanType() || otherType.isCharacterType()
|| otherType.isIntegralType()
|| (otherType.isBitType() && otherType.precision == 1);
}
public int canMoveFrom(Type otherType) {
return otherType.isBooleanType() ? 0
: -1;
}
public static BooleanType getBooleanType() {
return booleanType;
}
}
| |
package org.ovirt.engine.ui.uicommonweb.models.userportal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VmPool;
import org.ovirt.engine.core.common.businessentities.comparators.NameableComparator;
import org.ovirt.engine.core.common.queries.IdQueryParameters;
import org.ovirt.engine.core.common.queries.VdcQueryParametersBase;
import org.ovirt.engine.core.common.queries.VdcQueryReturnValue;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.utils.PairFirstComparator;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.ui.frontend.Frontend;
import org.ovirt.engine.ui.uicommonweb.ConsoleOptionsFrontendPersister.ConsoleContext;
import org.ovirt.engine.ui.uicommonweb.IconUtils;
import org.ovirt.engine.ui.uicommonweb.Linq;
import org.ovirt.engine.ui.uicommonweb.UICommand;
import org.ovirt.engine.ui.uicommonweb.help.HelpTag;
import org.ovirt.engine.ui.uicommonweb.models.ConsolePopupModel;
import org.ovirt.engine.ui.uicommonweb.models.ConsolesFactory;
import org.ovirt.engine.ui.uicommonweb.models.ListWithDetailsModel;
import org.ovirt.engine.ui.uicommonweb.models.Model;
import org.ovirt.engine.ui.uicommonweb.models.VmConsoles;
import org.ovirt.engine.ui.uicommonweb.models.vms.IconCache;
import org.ovirt.engine.ui.uicompat.Event;
import org.ovirt.engine.ui.uicompat.EventArgs;
import org.ovirt.engine.ui.uicompat.FrontendMultipleQueryAsyncResult;
import org.ovirt.engine.ui.uicompat.ICancelable;
import org.ovirt.engine.ui.uicompat.IFrontendMultipleQueryAsyncCallback;
public abstract class AbstractUserPortalListModel extends ListWithDetailsModel<Void, /* VmOrPool */ Object, UserPortalItemModel> implements ICancelable {
private UICommand editConsoleCommand;
protected ConsolesFactory consolesFactory;
public AbstractUserPortalListModel() {
setEditConsoleCommand(new UICommand("NewServer", this)); //$NON-NLS-1$
}
protected Iterable filterVms(List all) {
List<VM> result = new LinkedList<>();
for (Object o : all) {
if (o instanceof VM) {
result.add((VM) o);
}
}
return result;
}
public List<VmConsoles> getAutoConnectableConsoles() {
List<VmConsoles> autoConnectableConsoles = new LinkedList<>();
if (items != null) {
for (UserPortalItemModel upItem : items) {
if (!upItem.isPool() && upItem.getVmConsoles().canConnectToConsole()) {
autoConnectableConsoles.add(upItem.getVmConsoles());
}
}
}
return autoConnectableConsoles;
}
public boolean getCanConnectAutomatically() {
return getAutoConnectableConsoles().size() == 1;
}
public UICommand getEditConsoleCommand() {
return editConsoleCommand;
}
private void setEditConsoleCommand(UICommand editConsoleCommand) {
this.editConsoleCommand = editConsoleCommand;
}
@Override
protected Object provideDetailModelEntity(UserPortalItemModel selectedItem) {
// Each item in this list model is not a business entity,
// therefore select an Entity property to provide it to
// the detail models.
if (selectedItem == null) {
return null;
}
return selectedItem.getEntity();
}
@Override
public void executeCommand(UICommand command) {
super.executeCommand(command);
if (command == getEditConsoleCommand()) {
editConsole();
} else if ("OnEditConsoleSave".equals(command.getName())) { //$NON-NLS-1$
onEditConsoleSave();
} else if (Model.CANCEL_COMMAND.equals(command.getName())) {
cancel();
}
}
private void onEditConsoleSave() {
cancel();
}
private void editConsole() {
if (getWindow() != null || getSelectedItem().getVmConsoles() == null) {
return;
}
ConsolePopupModel model = new ConsolePopupModel();
model.setVmConsoles(getSelectedItem().getVmConsoles());
model.setHelpTag(HelpTag.editConsole);
model.setHashName("editConsole"); //$NON-NLS-1$
setWindow(model);
UICommand saveCommand = UICommand.createDefaultOkUiCommand("OnEditConsoleSave", this); //$NON-NLS-1$
model.getCommands().add(saveCommand);
UICommand cancelCommand = UICommand.createCancelUiCommand("Cancel", this); //$NON-NLS-1$
model.getCommands().add(cancelCommand);
}
protected abstract ConsoleContext getConsoleContext();
protected abstract boolean fetchLargeIcons();
protected abstract Event<EventArgs> getSearchCompletedEvent();
public void onVmAndPoolLoad(final List<VM> vms, List<VmPool> pools) {
if (vms == null || pools == null) {
return;
}
// Remove pools that has provided VMs.
final ArrayList<VmPool> filteredPools = new ArrayList<>();
for (VmPool pool : pools) {
// Add pool to map.
int attachedVmsCount = 0;
for (VM vm : vms) {
if (vm.getVmPoolId() != null && vm.getVmPoolId().equals(pool.getVmPoolId())) {
attachedVmsCount++;
}
}
if (attachedVmsCount < pool.getMaxAssignedVmsPerUser()) {
filteredPools.add(pool);
}
}
final List<Object> vmsObjectList = Collections.<Object>unmodifiableList(vms);
final List<Pair<Object, VM>> vmPairs = Linq.wrapAsFirst(vmsObjectList, VM.class);
if (filteredPools.isEmpty()) {
IconUtils.prefetchIcons(vms, true, fetchLargeIcons(), new IconCache.IconsCallback() {
@Override
public void onSuccess(Map<Guid, String> idToIconMap) {
finishSearch(vmPairs);
}
});
} else { // if we have pools we have to update their console cache and THEN finish search
List<VdcQueryType> poolQueryList = new ArrayList<>();
List<VdcQueryParametersBase> poolParamList = new ArrayList<>();
for (VmPool p : filteredPools) {
poolQueryList.add(VdcQueryType.GetVmDataByPoolId);
poolParamList.add(new IdQueryParameters(p.getVmPoolId()));
}
Frontend.getInstance().runMultipleQueries(
poolQueryList, poolParamList,
new IFrontendMultipleQueryAsyncCallback() {
@Override
public void executed(FrontendMultipleQueryAsyncResult result) {
List<VM> poolRepresentants = new LinkedList<>();
List<VdcQueryReturnValue> poolRepresentantsRetval = result.getReturnValues();
for (VdcQueryReturnValue poolRepresentant : poolRepresentantsRetval) { // extract from return value
poolRepresentants.add((VM) poolRepresentant.getReturnValue());
}
final List<Pair<Object, VM>> poolsPairs =
Linq.zip(Collections.<Object>unmodifiableList(filteredPools),
poolRepresentants);
final List<Pair<Object, VM>> all = Linq.concat(vmPairs, poolsPairs);
final List<VM> vmsAndPoolRepresentants = Linq.concat(vms, poolRepresentants);
IconUtils.prefetchIcons(vmsAndPoolRepresentants, true, fetchLargeIcons(),
new IconCache.IconsCallback() {
@Override
public void onSuccess(Map<Guid, String> idToIconMap) {
finishSearch(all);
}
});
}
});
}
}
private void finishSearch(List<Pair<Object, VM>> vmOrPoolAndPoolRepresentants) {
Collections.sort((List) vmOrPoolAndPoolRepresentants, new PairFirstComparator<>(new NameableComparator()));
ArrayList<UserPortalItemModel> items = new ArrayList<>();
for (Pair<Object, VM> item : vmOrPoolAndPoolRepresentants) {
UserPortalItemModel model = new UserPortalItemModel(item.getFirst(), item.getSecond(), consolesFactory);
model.setEntity(item.getFirst());
items.add(model);
}
setItems(items);
getSearchCompletedEvent().raise(this, EventArgs.EMPTY);
}
public void cancel() {
setWindow(null);
setConfirmWindow(null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.paho.mqtt5;
import java.util.Map;
import java.util.Properties;
import javax.net.SocketFactory;
import javax.net.ssl.HostnameVerifier;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.eclipse.paho.mqttv5.common.packet.MqttProperties;
@UriParams
public class PahoMqtt5Configuration implements Cloneable {
@UriParam
private String clientId;
@UriParam(defaultValue = PahoMqtt5Constants.DEFAULT_BROKER_URL)
private String brokerUrl = PahoMqtt5Constants.DEFAULT_BROKER_URL;
@UriParam(defaultValue = "2")
private int qos = PahoMqtt5Constants.DEFAULT_QOS;
@UriParam
private boolean retained;
@UriParam(defaultValue = "MEMORY")
private PahoMqtt5Persistence persistence = PahoMqtt5Persistence.MEMORY;
@UriParam
private String filePersistenceDirectory;
@UriParam(defaultValue = "60")
private int keepAliveInterval = 60;
@UriParam(defaultValue = "65535")
private int receiveMaximum = 65535;
@UriParam
private String willTopic;
@UriParam
private String willPayload;
@UriParam(defaultValue = "1")
private int willQos = 1;
@UriParam(defaultValue = "false")
private boolean willRetained;
@UriParam
private MqttProperties willMqttProperties;
@UriParam(label = "security")
@Metadata(secret = true)
private String userName;
@UriParam(label = "security")
@Metadata(secret = true)
private String password;
@UriParam(label = "security")
private SocketFactory socketFactory;
@UriParam(label = "security")
private Properties sslClientProps;
@UriParam(label = "security", defaultValue = "true")
private boolean httpsHostnameVerificationEnabled = true;
@UriParam(label = "security")
private HostnameVerifier sslHostnameVerifier;
@UriParam(defaultValue = "true")
private boolean cleanStart = true;
@UriParam(defaultValue = "30")
private int connectionTimeout = 30;
@UriParam
private String serverURIs;
@UriParam(defaultValue = "true")
private boolean automaticReconnect = true;
@UriParam(defaultValue = "128000")
private int maxReconnectDelay = 128000;
@UriParam(label = "advanced")
private Map<String, String> customWebSocketHeaders;
@UriParam(label = "advanced", defaultValue = "1")
private int executorServiceTimeout = 1;
@UriParam(defaultValue = "-1")
private long sessionExpiryInterval = -1;
public String getClientId() {
return clientId;
}
/**
* MQTT client identifier. The identifier must be unique.
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getBrokerUrl() {
return brokerUrl;
}
/**
* The URL of the MQTT broker.
*/
public void setBrokerUrl(String brokerUrl) {
this.brokerUrl = brokerUrl;
}
public int getQos() {
return qos;
}
/**
* Client quality of service level (0-2).
*/
public void setQos(int qos) {
this.qos = qos;
}
public boolean isRetained() {
return retained;
}
/**
* Retain option
*/
public void setRetained(boolean retained) {
this.retained = retained;
}
public PahoMqtt5Persistence getPersistence() {
return persistence;
}
/**
* Client persistence to be used - memory or file.
*/
public void setPersistence(PahoMqtt5Persistence persistence) {
this.persistence = persistence;
}
public String getFilePersistenceDirectory() {
return filePersistenceDirectory;
}
/**
* Base directory used by file persistence. Will by default use user directory.
*/
public void setFilePersistenceDirectory(String filePersistenceDirectory) {
this.filePersistenceDirectory = filePersistenceDirectory;
}
public String getUserName() {
return userName;
}
/**
* Username to be used for authentication against the MQTT broker
*/
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
/**
* Password to be used for authentication against the MQTT broker
*/
public void setPassword(String password) {
this.password = password;
}
public int getKeepAliveInterval() {
return keepAliveInterval;
}
/**
* Sets the keep alive interval. This value, measured in seconds, defines the maximum time interval between messages
* sent or received. It enables the client to detect if the server is no longer available, without having to wait
* for the TCP/IP timeout. The client will ensure that at least one message travels across the network within each
* keep alive period. In the absence of a data-related message during the time period, the client sends a very small
* ping message, which the server will acknowledge. A value of 0 disables keepalive processing in the client.
* <p>
* The default value is 60 seconds
* </p>
*/
public void setKeepAliveInterval(int keepAliveInterval) {
this.keepAliveInterval = keepAliveInterval;
}
public int getReceiveMaximum() {
return receiveMaximum;
}
/**
* Sets the Receive Maximum. This value represents the limit of QoS 1 and QoS 2 publications that the client is
* willing to process concurrently. There is no mechanism to limit the number of QoS 0 publications that the Server
* might try to send.
* <p>
* The default value is 65535
* </p>
*/
public void setReceiveMaximum(int receiveMaximum) {
this.receiveMaximum = receiveMaximum;
}
public String getWillTopic() {
return willTopic;
}
/**
* Sets the "Last Will and Testament" (LWT) for the connection. In the event that this client unexpectedly loses its
* connection to the server, the server will publish a message to itself using the supplied details.
*
* The topic to publish to.
*/
public void setWillTopic(String willTopic) {
this.willTopic = willTopic;
}
public String getWillPayload() {
return willPayload;
}
/**
* Sets the "Last Will and Testament" (LWT) for the connection. In the event that this client unexpectedly loses its
* connection to the server, the server will publish a message to itself using the supplied details.
*
* The byte payload for the message.
*/
public void setWillPayload(String willPayload) {
this.willPayload = willPayload;
}
public int getWillQos() {
return willQos;
}
/**
* Sets the "Last Will and Testament" (LWT) for the connection. In the event that this client unexpectedly loses its
* connection to the server, the server will publish a message to itself using the supplied details.
*
* The quality of service to publish the message at (0, 1 or 2).
*/
public void setWillQos(int willQos) {
this.willQos = willQos;
}
public boolean isWillRetained() {
return willRetained;
}
/**
* Sets the "Last Will and Testament" (LWT) for the connection. In the event that this client unexpectedly loses its
* connection to the server, the server will publish a message to itself using the supplied details.
*
* Whether or not the message should be retained.
*/
public void setWillRetained(boolean willRetained) {
this.willRetained = willRetained;
}
public MqttProperties getWillMqttProperties() {
return willMqttProperties;
}
/**
* Sets the "Last Will and Testament" (LWT) for the connection. In the event that this client unexpectedly loses its
* connection to the server, the server will publish a message to itself using the supplied details.
*
* The MQTT properties set for the message.
*/
public void setWillMqttProperties(MqttProperties properties) {
this.willMqttProperties = properties;
}
public SocketFactory getSocketFactory() {
return socketFactory;
}
/**
* Sets the SocketFactory to use. This allows an application to apply its own policies around the creation of
* network sockets. If using an SSL connection, an SSLSocketFactory can be used to supply application-specific
* security settings.
*/
public void setSocketFactory(SocketFactory socketFactory) {
this.socketFactory = socketFactory;
}
public Properties getSslClientProps() {
return sslClientProps;
}
/**
* Sets the SSL properties for the connection.
* <p>
* Note that these properties are only valid if an implementation of the Java Secure Socket Extensions (JSSE) is
* available. These properties are <em>not</em> used if a custom SocketFactory has been set.
*
* The following properties can be used:
* </p>
* <dl>
* <dt>com.ibm.ssl.protocol</dt>
* <dd>One of: SSL, SSLv3, TLS, TLSv1, SSL_TLS.</dd>
* <dt>com.ibm.ssl.contextProvider
* <dd>Underlying JSSE provider. For example "IBMJSSE2" or "SunJSSE"</dd>
*
* <dt>com.ibm.ssl.keyStore</dt>
* <dd>The name of the file that contains the KeyStore object that you want the KeyManager to use. For example
* /mydir/etc/key.p12</dd>
*
* <dt>com.ibm.ssl.keyStorePassword</dt>
* <dd>The password for the KeyStore object that you want the KeyManager to use. The password can either be in
* plain-text, or may be obfuscated using the static method:
* <code>com.ibm.micro.security.Password.obfuscate(char[] password)</code>. This obfuscates the password using a
* simple and insecure XOR and Base64 encoding mechanism. Note that this is only a simple scrambler to obfuscate
* clear-text passwords.</dd>
*
* <dt>com.ibm.ssl.keyStoreType</dt>
* <dd>Type of key store, for example "PKCS12", "JKS", or "JCEKS".</dd>
*
* <dt>com.ibm.ssl.keyStoreProvider</dt>
* <dd>Key store provider, for example "IBMJCE" or "IBMJCEFIPS".</dd>
*
* <dt>com.ibm.ssl.trustStore</dt>
* <dd>The name of the file that contains the KeyStore object that you want the TrustManager to use.</dd>
*
* <dt>com.ibm.ssl.trustStorePassword</dt>
* <dd>The password for the TrustStore object that you want the TrustManager to use. The password can either be in
* plain-text, or may be obfuscated using the static method:
* <code>com.ibm.micro.security.Password.obfuscate(char[] password)</code>. This obfuscates the password using a
* simple and insecure XOR and Base64 encoding mechanism. Note that this is only a simple scrambler to obfuscate
* clear-text passwords.</dd>
*
* <dt>com.ibm.ssl.trustStoreType</dt>
* <dd>The type of KeyStore object that you want the default TrustManager to use. Same possible values as
* "keyStoreType".</dd>
*
* <dt>com.ibm.ssl.trustStoreProvider</dt>
* <dd>Trust store provider, for example "IBMJCE" or "IBMJCEFIPS".</dd>
*
* <dt>com.ibm.ssl.enabledCipherSuites</dt>
* <dd>A list of which ciphers are enabled. Values are dependent on the provider, for example:
* SSL_RSA_WITH_AES_128_CBC_SHA;SSL_RSA_WITH_3DES_EDE_CBC_SHA.</dd>
*
* <dt>com.ibm.ssl.keyManager</dt>
* <dd>Sets the algorithm that will be used to instantiate a KeyManagerFactory object instead of using the default
* algorithm available in the platform. Example values: "IbmX509" or "IBMJ9X509".</dd>
*
* <dt>com.ibm.ssl.trustManager</dt>
* <dd>Sets the algorithm that will be used to instantiate a TrustManagerFactory object instead of using the default
* algorithm available in the platform. Example values: "PKIX" or "IBMJ9X509".</dd>
* </dl>
*/
public void setSslClientProps(Properties sslClientProps) {
this.sslClientProps = sslClientProps;
}
public boolean isHttpsHostnameVerificationEnabled() {
return httpsHostnameVerificationEnabled;
}
/**
* Whether SSL HostnameVerifier is enabled or not. The default value is true.
*/
public void setHttpsHostnameVerificationEnabled(boolean httpsHostnameVerificationEnabled) {
this.httpsHostnameVerificationEnabled = httpsHostnameVerificationEnabled;
}
public HostnameVerifier getSslHostnameVerifier() {
return sslHostnameVerifier;
}
/**
* Sets the HostnameVerifier for the SSL connection. Note that it will be used after handshake on a connection and
* you should do actions by yourself when hostname is verified error.
* <p>
* There is no default HostnameVerifier
* </p>
*/
public void setSslHostnameVerifier(HostnameVerifier sslHostnameVerifier) {
this.sslHostnameVerifier = sslHostnameVerifier;
}
public boolean isCleanStart() {
return cleanStart;
}
/**
* Sets whether the client and server should remember state across restarts and reconnects.
* <ul>
* <li>If set to false both the client and server will maintain state across restarts of the client, the server and
* the connection. As state is maintained:
* <ul>
* <li>Message delivery will be reliable meeting the specified QOS even if the client, server or connection are
* restarted.
* <li>The server will treat a subscription as durable.
* </ul>
* <li>If set to true the client and server will not maintain state across restarts of the client, the server or the
* connection. This means
* <ul>
* <li>Message delivery to the specified QOS cannot be maintained if the client, server or connection are restarted
* <li>The server will treat a subscription as non-durable
* </ul>
* </ul>
*/
public void setCleanStart(boolean cleanStart) {
this.cleanStart = cleanStart;
}
public int getConnectionTimeout() {
return connectionTimeout;
}
/**
* Sets the connection timeout value. This value, measured in seconds, defines the maximum time interval the client
* will wait for the network connection to the MQTT server to be established. The default timeout is 30 seconds. A
* value of 0 disables timeout processing meaning the client will wait until the network connection is made
* successfully or fails.
*/
public void setConnectionTimeout(int connectionTimeout) {
this.connectionTimeout = connectionTimeout;
}
public String getServerURIs() {
return serverURIs;
}
/**
* Set a list of one or more serverURIs the client may connect to. Multiple servers can be separated by comma.
* <p>
* Each <code>serverURI</code> specifies the address of a server that the client may connect to. Two types of
* connection are supported <code>tcp://</code> for a TCP connection and <code>ssl://</code> for a TCP connection
* secured by SSL/TLS. For example:
* <ul>
* <li><code>tcp://localhost:1883</code></li>
* <li><code>ssl://localhost:8883</code></li>
* </ul>
* If the port is not specified, it will default to 1883 for <code>tcp://</code>" URIs, and 8883 for
* <code>ssl://</code> URIs.
* <p>
* If serverURIs is set then it overrides the serverURI parameter passed in on the constructor of the MQTT client.
* <p>
* When an attempt to connect is initiated the client will start with the first serverURI in the list and work
* through the list until a connection is established with a server. If a connection cannot be made to any of the
* servers then the connect attempt fails.
* <p>
* Specifying a list of servers that a client may connect to has several uses:
* <ol>
* <li>High Availability and reliable message delivery
* <p>
* Some MQTT servers support a high availability feature where two or more "equal" MQTT servers share state. An MQTT
* client can connect to any of the "equal" servers and be assured that messages are reliably delivered and durable
* subscriptions are maintained no matter which server the client connects to.
* </p>
* <p>
* The cleansession flag must be set to false if durable subscriptions and/or reliable message delivery is required.
* </p>
* </li>
* <li>Hunt List
* <p>
* A set of servers may be specified that are not "equal" (as in the high availability option). As no state is
* shared across the servers reliable message delivery and durable subscriptions are not valid. The cleansession
* flag must be set to true if the hunt list mode is used
* </p>
* </li>
* </ol>
*/
public void setServerURIs(String serverURIs) {
this.serverURIs = serverURIs;
}
public boolean isAutomaticReconnect() {
return automaticReconnect;
}
/**
* Sets whether the client will automatically attempt to reconnect to the server if the connection is lost.
* <ul>
* <li>If set to false, the client will not attempt to automatically reconnect to the server in the event that the
* connection is lost.</li>
* <li>If set to true, in the event that the connection is lost, the client will attempt to reconnect to the server.
* It will initially wait 1 second before it attempts to reconnect, for every failed reconnect attempt, the delay
* will double until it is at 2 minutes at which point the delay will stay at 2 minutes.</li>
* </ul>
*/
public void setAutomaticReconnect(boolean automaticReconnect) {
this.automaticReconnect = automaticReconnect;
}
public int getMaxReconnectDelay() {
return maxReconnectDelay;
}
/**
* Get the maximum time (in millis) to wait between reconnects
*/
public void setMaxReconnectDelay(int maxReconnectDelay) {
this.maxReconnectDelay = maxReconnectDelay;
}
public Map<String, String> getCustomWebSocketHeaders() {
return customWebSocketHeaders;
}
/**
* Sets the Custom WebSocket Headers for the WebSocket Connection.
*/
public void setCustomWebSocketHeaders(Map<String, String> customWebSocketHeaders) {
this.customWebSocketHeaders = customWebSocketHeaders;
}
public int getExecutorServiceTimeout() {
return executorServiceTimeout;
}
/**
* Set the time in seconds that the executor service should wait when terminating before forcefully terminating. It
* is not recommended to change this value unless you are absolutely sure that you need to.
*/
public void setExecutorServiceTimeout(int executorServiceTimeout) {
this.executorServiceTimeout = executorServiceTimeout;
}
public long getSessionExpiryInterval() {
return sessionExpiryInterval;
}
/**
* Sets the Session Expiry Interval.
*
* This value, measured in seconds, defines the maximum time that the broker will maintain the session for once the
* client disconnects. Clients should only connect with a long Session Expiry interval if they intend to connect to
* the server at some later point in time.
*
* By default this value is -1 and so will not be sent, in this case, the session will not expire. If a 0 is sent,
* the session will end immediately once the Network Connection is closed. When the client has determined that it
* has no longer any use for the session, it should disconnect with a Session Expiry Interval set to 0.
*/
public void setSessionExpiryInterval(long sessionExpiryInterval) {
this.sessionExpiryInterval = sessionExpiryInterval;
}
public PahoMqtt5Configuration copy() {
try {
return (PahoMqtt5Configuration) clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.ui.laf.darcula.ui;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.impl.ToolbarComboWidget;
import com.intellij.util.ui.JBEmptyBorder;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import sun.swing.SwingUtilities2;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public class ToolbarComboWidgetUI extends ComponentUI {
private static final int ELEMENTS_GAP = 5;
private static final int ICONS_GAP = 5;
private static final Icon EXPAND_ICON = AllIcons.General.ChevronDown;
private static final int MIN_TEXT_LENGTH = 5;
private static final int SEPARATOR_WIDTH = 1;
private final HoverAreaTracker hoverTracker = new HoverAreaTracker();
private final ClickListener clickListener = new ClickListener();
@SuppressWarnings("MethodOverridesStaticMethodOfSuperclass")
public static ComponentUI createUI(JComponent c) {
return new ToolbarComboWidgetUI();
}
@Override
public void installUI(JComponent c) {
ToolbarComboWidget widget = (ToolbarComboWidget)c;
setUIDefaults(widget);
hoverTracker.installTo(widget);
clickListener.installTo(widget);
}
@Override
public void uninstallUI(JComponent c) {
hoverTracker.uninstall();
clickListener.uninstall();
c.removeMouseListener(clickListener);
}
private static void setUIDefaults(ToolbarComboWidget c) {
c.setBackground(UIManager.getColor("ToolbarComboWidget.background"));
c.setHoverBackground(UIManager.getColor("ToolbarComboWidget.hoverBackground"));
Insets insets = UIManager.getInsets("ToolbarComboWidget.borderInsets");
JBEmptyBorder border = JBUI.Borders.empty(insets.top, insets.left, insets.bottom, insets.right);
c.setBorder(border);
}
@Override
public void paint(Graphics g, JComponent c) {
ToolbarComboWidget combo = (ToolbarComboWidget)c;
if (c.isOpaque()) paintBackground(g, combo);
List<Icon> leftIcons = combo.getLeftIcons();
List<Icon> rightIcons = combo.getRightIcons();
Rectangle paintRect = SwingUtilities.calculateInnerArea(c, null);
Graphics2D g2 = (Graphics2D)g.create(paintRect.x, paintRect.y, paintRect.width, paintRect.height);
try {
if (!leftIcons.isEmpty()) {
Rectangle iconsRect = paintIcons(leftIcons, combo, g2);
doClip(g2, iconsRect.width + ELEMENTS_GAP);
}
String text = combo.getText();
if (!StringUtil.isEmpty(text)) {
int maxTextWidth = calcMaxTextWidth(combo, paintRect);
g2.setColor(c.getForeground());
Rectangle textRect = drawText(c, text, maxTextWidth, g2);
doClip(g2, textRect.width + ELEMENTS_GAP);
}
if (!rightIcons.isEmpty()) {
Rectangle iconsRect = paintIcons(rightIcons, combo, g2);
doClip(g2, iconsRect.width + ELEMENTS_GAP);
}
if (isSeparatorShown(combo)) {
g2.setColor(UIManager.getColor("Separator.separatorColor"));
Rectangle bounds = g2.getClipBounds();
g2.fillRect(bounds.x, bounds.y, SEPARATOR_WIDTH, bounds.height);
doClip(g2, SEPARATOR_WIDTH + ELEMENTS_GAP);
}
paintIcons(Collections.singletonList(EXPAND_ICON), combo, g2);
}
finally {
g2.dispose();
}
}
private void paintBackground(Graphics g, ToolbarComboWidget c) {
Graphics g2 = g.create();
try {
g2.setColor(c.getBackground());
Rectangle bounds = g2.getClipBounds();
g2.fillRect(bounds.x, bounds.y, bounds.width, bounds.height);
Rectangle hoverRect = hoverTracker.getHoverRect();
if (hoverRect != null) {
g2.setColor(c.getHoverBackground());
g2.fillRect(hoverRect.x, hoverRect.y, hoverRect.width, hoverRect.height);
}
}
finally {
g2.dispose();
}
}
private static Rectangle drawText(JComponent c, @NotNull String fullText, int maxWidth, Graphics2D g) {
FontMetrics metrics = c.getFontMetrics(c.getFont());
Rectangle clipBounds = g.getClipBounds();
clipBounds.width = maxWidth;
String text = calcShownText(fullText, metrics, maxWidth);
Rectangle strBounds = metrics.getStringBounds(text, g).getBounds();
strBounds.setLocation((int)(clipBounds.getCenterX() - strBounds.getCenterX()),
(int)(clipBounds.getCenterY() - strBounds.getCenterY()));
SwingUtilities2.drawString(c, g, text, strBounds.x, strBounds.y);
return clipBounds;
}
private static String calcShownText(String text, FontMetrics metrics, int maxWidth) {
int width = metrics.stringWidth(text);
if (width <= maxWidth) return text;
while (width > maxWidth && text.length() > MIN_TEXT_LENGTH) {
text = text.substring(0, text.length() - 1);
width = metrics.stringWidth(text + "...");
}
return text + "...";
}
private static int calcMaxTextWidth(ToolbarComboWidget c, Rectangle paintRect) {
int left = calcIconsWidth(c.getLeftIcons());
if (left > 0) left += ELEMENTS_GAP;
int right = calcIconsWidth(c.getRightIcons());
if (right > 0) right += ELEMENTS_GAP;
int separator = isSeparatorShown(c) ? ELEMENTS_GAP + SEPARATOR_WIDTH : 0;
int otherElementsWidth = left + right + separator + ELEMENTS_GAP + EXPAND_ICON.getIconWidth();
return paintRect.width - otherElementsWidth;
}
private static int calcIconsWidth(List<Icon> icons) {
int res = 0;
for (Icon icon : icons) {
if (res > 0) res += ICONS_GAP;
res += icon.getIconWidth();
}
return res;
}
private static void doClip(Graphics2D g, int shift) {
Rectangle bounds = g.getClipBounds();
g.clipRect(bounds.x + shift, bounds.y, bounds.width - shift, bounds.height);
}
private static Rectangle paintIcons(List<Icon> icons, JComponent c, Graphics g) {
if (icons.isEmpty()) return new Rectangle();
Rectangle bounds = g.getClipBounds();
int maxHeight = 0;
int shift = 0;
for (Icon icon : icons) {
if (shift != 0) shift += ICONS_GAP;
int x = bounds.x + shift;
int y = bounds.y + bounds.height / 2 - icon.getIconHeight() / 2;
icon.paintIcon(c, g, x, y);
shift += icon.getIconWidth();
maxHeight = Math.max(maxHeight, icon.getIconHeight());
}
return new Rectangle(shift, maxHeight);
}
@Override
public Dimension getPreferredSize(JComponent c) {
ToolbarComboWidget combo = (ToolbarComboWidget)c;
Dimension res = new Dimension();
List<Icon> icons = combo.getLeftIcons();
if (!icons.isEmpty()) {
res.width += calcIconsWidth(icons);
res.height = icons.stream().mapToInt(Icon::getIconHeight).max().orElse(0);
}
if (!StringUtil.isEmpty(combo.getText())) {
if (res.width > 0) res.width += ELEMENTS_GAP;
FontMetrics metrics = c.getFontMetrics(c.getFont());
res.width += metrics.stringWidth(combo.getText());
res.height = Math.max(res.height, metrics.getHeight());
}
icons = combo.getRightIcons();
if (!icons.isEmpty()) {
if (res.width > 0) res.width += ELEMENTS_GAP;
res.width += calcIconsWidth(icons);
res.height = Math.max(res.height, icons.stream().mapToInt(Icon::getIconHeight).max().orElse(0));
}
if (isSeparatorShown(combo)) {
if (res.width > 0) res.width += ELEMENTS_GAP;
res.width += SEPARATOR_WIDTH;
}
if (res.width > 0) res.width += ELEMENTS_GAP;
res.width += EXPAND_ICON.getIconWidth();
res.height = Math.max(res.height, EXPAND_ICON.getIconHeight());
Insets insets = c.getInsets();
res.height += insets.top + insets.bottom;
res.width += insets.left + insets.right;
return res;
}
private static boolean isSeparatorShown(ToolbarComboWidget widget) {
return !widget.getPressListeners().isEmpty();
}
//todo minimum size
//todo baseline
private static abstract class MyMouseTracker extends MouseAdapter {
protected ToolbarComboWidget comp;
public void installTo(ToolbarComboWidget c) {
comp = c;
c.addMouseListener(this);
c.addMouseMotionListener(this);
}
public void uninstall() {
comp.removeMouseListener(this);
comp.removeMouseMotionListener(this);
comp = null;
}
}
private static class HoverAreaTracker extends MyMouseTracker {
private Rectangle hoverRect;
private Rectangle getHoverRect() {
return hoverRect;
}
@Override
public void mouseEntered(MouseEvent e) {
calcHoverRect(e.getPoint());
}
@Override
public void mouseExited(MouseEvent e) {
updateHoverRect(null);
}
@Override
public void mouseMoved(MouseEvent e) {
calcHoverRect(e.getPoint());
}
private void calcHoverRect(Point mousePosition) {
Rectangle compBounds = comp.getVisibleRect();
if (!isSeparatorShown(comp)) {
updateHoverRect(compBounds);
return;
}
int rightPart = SEPARATOR_WIDTH + ELEMENTS_GAP + EXPAND_ICON.getIconWidth() + comp.getInsets().right;
Rectangle right = new Rectangle((int)(compBounds.getMaxX() - rightPart), compBounds.y, rightPart, compBounds.height);
Rectangle left = new Rectangle(compBounds.x, compBounds.y, compBounds.width - rightPart + SEPARATOR_WIDTH, compBounds.height);
updateHoverRect(left.contains(mousePosition) ? left : right);
}
private void updateHoverRect(Rectangle newRect) {
if (Objects.equals(hoverRect, newRect)) return;
hoverRect = newRect;
comp.repaint();
}
}
private static class ClickListener extends MyMouseTracker {
@Override
public void mouseClicked(MouseEvent e) {
if (!isSeparatorShown(comp)) {
comp.doExpand(e);
return;
}
int leftPartWidth = comp.getWidth() - (ELEMENTS_GAP + EXPAND_ICON.getIconWidth() + comp.getInsets().right);
if (e.getPoint().x <= leftPartWidth) notifyPressListeners(e);
else comp.doExpand(e);
}
private void notifyPressListeners(MouseEvent e) {
ActionEvent ae = new ActionEvent(comp, 0, null, System.currentTimeMillis(), e.getModifiersEx());
comp.getPressListeners().forEach(listener -> listener.actionPerformed(ae));
}
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package com.github.cxfplus.com.sun.xml.bind.v2.runtime;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import com.github.cxfplus.com.sun.xml.bind.api.AccessorException;
import com.github.cxfplus.com.sun.xml.bind.v2.model.core.PropertyKind;
import com.github.cxfplus.com.sun.xml.bind.v2.model.nav.Navigator;
import com.github.cxfplus.com.sun.xml.bind.v2.model.runtime.RuntimeElementInfo;
import com.github.cxfplus.com.sun.xml.bind.v2.model.runtime.RuntimePropertyInfo;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.property.Property;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.property.PropertyFactory;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.property.UnmarshallerChain;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.reflect.Accessor;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller.ChildLoader;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller.Discarder;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller.Intercepter;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller.Loader;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller.TagName;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller.UnmarshallingContext;
import com.github.cxfplus.com.sun.xml.bind.v2.util.QNameMap;
import org.xml.sax.SAXException;
/**
* {@link JaxBeanInfo} implementation for {@link RuntimeElementInfo}.
*
* @author Kohsuke Kawaguchi
*/
public final class ElementBeanInfoImpl extends JaxBeanInfo<JAXBElement> {
private Loader loader;
private final Property property;
// used to create new instances of JAXBElement.
private final QName tagName;
public final Class expectedType;
private final Class scope;
/**
* If non-null, use this to create an instance.
* It takes one value.
*/
private final Constructor<? extends JAXBElement> constructor;
ElementBeanInfoImpl(JAXBContextImpl grammar, RuntimeElementInfo rei) {
super(grammar,rei,(Class<JAXBElement>)rei.getType(),true,false,true);
this.property = PropertyFactory.create(grammar,rei.getProperty());
tagName = rei.getElementName();
expectedType = Navigator.REFLECTION.erasure(rei.getContentInMemoryType());
scope = rei.getScope()==null ? JAXBElement.GlobalScope.class : rei.getScope().getClazz().getWrappered();
Class type = Navigator.REFLECTION.erasure(rei.getType());
if(type==JAXBElement.class)
constructor = null;
else {
try {
constructor = type.getConstructor(expectedType);
} catch (NoSuchMethodException e) {
NoSuchMethodError x = new NoSuchMethodError("Failed to find the constructor for " + type + " with " + expectedType);
x.initCause(e);
throw x;
}
}
}
/**
* The constructor for the sole instanceof {@link JaxBeanInfo} for
* handling user-created {@link JAXBElement}.
*
* Such {@link JaxBeanInfo} is used only for marshalling.
*
* This is a hack.
*/
protected ElementBeanInfoImpl(final JAXBContextImpl grammar) {
super(grammar,null,JAXBElement.class,true,false,true);
tagName = null;
expectedType = null;
scope = null;
constructor = null;
this.property = new Property<JAXBElement>() {
public void reset(JAXBElement o) {
throw new UnsupportedOperationException();
}
public void serializeBody(JAXBElement e, XMLSerializer target, Object outerPeer) throws SAXException, IOException, XMLStreamException {
Class scope = e.getScope();
if(e.isGlobalScope()) scope = null;
QName n = e.getName();
ElementBeanInfoImpl bi = grammar.getElement(scope,n);
if(bi==null) {
// infer what to do from the type
JaxBeanInfo tbi;
try {
tbi = grammar.getBeanInfo(e.getDeclaredType(),true);
} catch (JAXBException x) {
// if e.getDeclaredType() isn't known to this JAXBContext
target.reportError(null,x);
return;
}
Object value = e.getValue();
target.startElement(n.getNamespaceURI(),n.getLocalPart(),n.getPrefix(),null);
if(value==null) {
target.writeXsiNilTrue();
} else {
target.childAsXsiType(value,"value",tbi, false);
}
target.endElement();
} else {
try {
bi.property.serializeBody(e,target,e);
} catch (AccessorException x) {
target.reportError(null,x);
}
}
}
public void serializeURIs(JAXBElement o, XMLSerializer target) {
}
public boolean hasSerializeURIAction() {
return false;
}
public String getIdValue(JAXBElement o) {
return null;
}
public PropertyKind getKind() {
return PropertyKind.ELEMENT;
}
public void buildChildElementUnmarshallers(UnmarshallerChain chain, QNameMap<ChildLoader> handlers) {
}
public Accessor getElementPropertyAccessor(String nsUri, String localName) {
throw new UnsupportedOperationException();
}
public void wrapUp() {
}
public RuntimePropertyInfo getInfo() {
return property.getInfo();
}
};
}
/**
* Use the previous {@link UnmarshallingContext.State}'s target to store
* {@link JAXBElement} object to be unmarshalled. This allows the property {@link Loader}
* to correctly find the parent object.
* This is a hack.
*/
private final class IntercepterLoader extends Loader implements Intercepter {
private final Loader core;
public IntercepterLoader(Loader core) {
this.core = core;
}
public final void startElement(UnmarshallingContext.State state, TagName ea) throws SAXException {
state.loader = core;
state.intercepter = this;
// TODO: make sure there aren't too many duplicate of this code
// create the object to unmarshal
Object child;
UnmarshallingContext context = state.getContext();
// let's see if we can reuse the existing peer object
child = context.getOuterPeer();
if(child!=null && jaxbType!=child.getClass())
child = null; // unexpected type.
if(child!=null)
reset((JAXBElement)child,context);
if(child==null)
child = context.createInstance(ElementBeanInfoImpl.this);
fireBeforeUnmarshal(ElementBeanInfoImpl.this, child, state);
context.recordOuterPeer(child);
UnmarshallingContext.State p = state.prev;
p.backup = p.target;
p.target = child;
core.startElement(state,ea);
}
public Object intercept(UnmarshallingContext.State state, Object o) throws SAXException {
JAXBElement e = (JAXBElement)state.target;
state.target = state.backup;
state.backup = null;
if (state.nil) {
e.setNil(true);
state.nil = false;
}
if(o!=null)
// if the value is a leaf type, it's often already set to the element
// through Accessor.
e.setValue(o);
fireAfterUnmarshal(ElementBeanInfoImpl.this, e, state);
return e;
}
}
public String getElementNamespaceURI(JAXBElement e) {
return e.getName().getNamespaceURI();
}
public String getElementLocalName(JAXBElement e) {
return e.getName().getLocalPart();
}
public Loader getLoader(JAXBContextImpl context, boolean typeSubstitutionCapable) {
if(loader==null) {
// this has to be done lazily to avoid cyclic reference issue
UnmarshallerChain c = new UnmarshallerChain(context);
QNameMap<ChildLoader> result = new QNameMap<ChildLoader>();
property.buildChildElementUnmarshallers(c,result);
if(result.size()==1)
// for ElementBeanInfoImpl created from RuntimeElementInfo
this.loader = new IntercepterLoader(result.getOne().getValue().loader);
else
// for special ElementBeanInfoImpl only used for marshalling
this.loader = Discarder.INSTANCE;
}
return loader;
}
public final JAXBElement createInstance(UnmarshallingContext context) throws IllegalAccessException, InvocationTargetException, InstantiationException {
return createInstanceFromValue(null);
}
public final JAXBElement createInstanceFromValue(Object o) throws IllegalAccessException, InvocationTargetException, InstantiationException {
if(constructor==null)
return new JAXBElement(tagName,expectedType,scope,o);
else
return constructor.newInstance(o);
}
public boolean reset(JAXBElement e, UnmarshallingContext context) {
e.setValue(null);
return true;
}
public String getId(JAXBElement e, XMLSerializer target) {
// TODO: is this OK? Should we be returning the ID value of the type property?
/*
There's one case where we JAXBElement needs to be designated as ID,
and that is when there's a global element whose type is ID.
*/
Object o = e.getValue();
if(o instanceof String)
return (String)o;
else
return null;
}
public void serializeBody(JAXBElement element, XMLSerializer target) throws SAXException, IOException, XMLStreamException {
try {
property.serializeBody(element,target,null);
} catch (AccessorException x) {
target.reportError(null,x);
}
}
public void serializeRoot(JAXBElement e, XMLSerializer target) throws SAXException, IOException, XMLStreamException {
serializeBody(e,target);
}
public void serializeAttributes(JAXBElement e, XMLSerializer target) {
// noop
}
public void serializeURIs(JAXBElement e, XMLSerializer target) {
// noop
}
public final Transducer<JAXBElement> getTransducer() {
return null;
}
public void wrapUp() {
super.wrapUp();
property.wrapUp();
}
public void link(JAXBContextImpl grammar) {
super.link(grammar);
getLoader(grammar,true); // make sure to build them, if we hadn't done so
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.lang.sqlpp.visitor;
import java.io.PrintWriter;
import java.util.List;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.clause.GroupbyClause;
import org.apache.asterix.lang.common.clause.LetClause;
import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
import org.apache.asterix.lang.common.statement.InsertStatement;
import org.apache.asterix.lang.common.visitor.FormatPrintVisitor;
import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
import org.apache.asterix.lang.sqlpp.clause.FromClause;
import org.apache.asterix.lang.sqlpp.clause.FromTerm;
import org.apache.asterix.lang.sqlpp.clause.HavingClause;
import org.apache.asterix.lang.sqlpp.clause.JoinClause;
import org.apache.asterix.lang.sqlpp.clause.NestClause;
import org.apache.asterix.lang.sqlpp.clause.Projection;
import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
import org.apache.asterix.lang.sqlpp.clause.SelectClause;
import org.apache.asterix.lang.sqlpp.clause.SelectElement;
import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
import org.apache.asterix.lang.sqlpp.visitor.base.ISqlppVisitor;
public class SqlppFormatPrintVisitor extends FormatPrintVisitor implements ISqlppVisitor<Void, Integer> {
private final PrintWriter out;
public SqlppFormatPrintVisitor() {
this(new PrintWriter(System.out));
}
public SqlppFormatPrintVisitor(PrintWriter out) {
super(out);
this.out = out;
// Initialize symbols
dataverseSymbol = " database ";
datasetSymbol = " table ";
assignSymbol = "=";
}
@Override
public Void visit(FromClause fromClause, Integer step) throws AsterixException {
out.print(skip(step) + "from ");
int index = 0;
for (FromTerm fromTerm : fromClause.getFromTerms()) {
if (index > 0) {
out.print(COMMA + "\n" + skip(step + 2));
}
fromTerm.accept(this, step + 2);
++index;
}
out.println();
return null;
}
@Override
public Void visit(FromTerm fromTerm, Integer step) throws AsterixException {
fromTerm.getLeftExpression().accept(this, step + 2);
out.print(" as ");
fromTerm.getLeftVariable().accept(this, step + 2);
if (fromTerm.hasPositionalVariable()) {
out.print(" at ");
fromTerm.getPositionalVariable().accept(this, step + 2);
}
if (fromTerm.hasCorrelateClauses()) {
for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
correlateClause.accept(this, step);
}
}
return null;
}
@Override
public Void visit(JoinClause joinClause, Integer step) throws AsterixException {
out.print(joinClause.getJoinType());
joinClause.getRightExpression().accept(this, step + 2);
out.print(" as ");
joinClause.getRightVariable().accept(this, step + 2);
if (joinClause.hasPositionalVariable()) {
out.print(" at ");
joinClause.getPositionalVariable().accept(this, step + 2);
}
joinClause.getConditionExpression().accept(this, step + 2);
return null;
}
@Override
public Void visit(NestClause nestClause, Integer step) throws AsterixException {
out.print(nestClause.getJoinType());
nestClause.getRightExpression().accept(this, step + 2);
out.println(skip(step + 1) + " as ");
nestClause.getRightVariable().accept(this, step + 2);
if (nestClause.hasPositionalVariable()) {
out.println(skip(step + 1) + " at ");
nestClause.getPositionalVariable().accept(this, step + 2);
}
nestClause.getConditionExpression().accept(this, step + 2);
return null;
}
@Override
public Void visit(Projection projection, Integer step) throws AsterixException {
projection.getExpression().accept(this, step);
out.print(" as " + projection.getName());
return null;
}
@Override
public Void visit(SelectBlock selectBlock, Integer step) throws AsterixException {
selectBlock.getSelectClause().accept(this, step);
if (selectBlock.hasFromClause()) {
selectBlock.getFromClause().accept(this, step);
}
if (selectBlock.hasLetClauses()) {
for (LetClause letClause : selectBlock.getLetList()) {
letClause.accept(this, step);
}
}
if (selectBlock.hasWhereClause()) {
selectBlock.getWhereClause().accept(this, step);
}
if (selectBlock.hasGroupbyClause()) {
selectBlock.getGroupbyClause().accept(this, step);
if (selectBlock.hasLetClausesAfterGroupby()) {
for (LetClause letClause : selectBlock.getLetListAfterGroupby()) {
letClause.accept(this, step);
}
}
}
if (selectBlock.hasHavingClause()) {
selectBlock.getHavingClause().accept(this, step);
}
return null;
}
@Override
public Void visit(SelectClause selectClause, Integer step) throws AsterixException {
if (selectClause.selectRegular()) {
selectClause.getSelectRegular().accept(this, step);
}
if (selectClause.selectElement()) {
selectClause.getSelectElement().accept(this, step);
}
out.println();
return null;
}
@Override
public Void visit(SelectElement selectElement, Integer step) throws AsterixException {
out.print("select element ");
selectElement.getExpression().accept(this, step);
return null;
}
@Override
public Void visit(SelectRegular selectRegular, Integer step) throws AsterixException {
out.print("select ");
int index = 0;
for (Projection projection : selectRegular.getProjections()) {
if (index > 0) {
out.print(COMMA);
}
projection.accept(this, step);
++index;
}
return null;
}
@Override
public Void visit(SelectSetOperation selectSetOperation, Integer step) throws AsterixException {
selectSetOperation.getLeftInput().accept(this, step);
if (selectSetOperation.hasRightInputs()) {
for (SetOperationRight right : selectSetOperation.getRightInputs()) {
String all = right.isSetSemantics() ? " " : " all ";
out.print(right.getSetOpType() + all);
right.getSetOperationRightInput().accept(this, step);
}
}
return null;
}
@Override
public Void visit(SelectExpression selectStatement, Integer step) throws AsterixException {
if (selectStatement.isSubquery()) {
out.print("(");
}
int selectStep = selectStatement.isSubquery() ? step + 2 : step;
if (selectStatement.hasLetClauses()) {
for (LetClause letClause : selectStatement.getLetList()) {
letClause.accept(this, selectStep);
}
}
selectStatement.getSelectSetOperation().accept(this, selectStep);
if (selectStatement.hasOrderby()) {
selectStatement.getOrderbyClause().accept(this, selectStep);
}
if (selectStatement.hasLimit()) {
selectStatement.getLimitClause().accept(this, selectStep);
}
if (selectStatement.isSubquery()) {
out.print(skip(step) + " )");
}
return null;
}
@Override
public Void visit(UnnestClause unnestClause, Integer step) throws AsterixException {
out.print(unnestClause.getJoinType());
unnestClause.getRightExpression().accept(this, step + 2);
out.print(" as ");
unnestClause.getRightVariable().accept(this, step + 2);
if (unnestClause.hasPositionalVariable()) {
out.print(" at ");
unnestClause.getPositionalVariable().accept(this, step + 2);
}
return null;
}
@Override
public Void visit(HavingClause havingClause, Integer step) throws AsterixException {
out.print(skip(step) + " having ");
havingClause.getFilterExpression().accept(this, step + 2);
out.println();
return null;
}
@Override
public Void visit(GroupbyClause gc, Integer step) throws AsterixException {
if (gc.hasHashGroupByHint()) {
out.println(skip(step) + "/* +hash */");
}
out.print(skip(step) + "group by ");
printDelimitedGbyExpressions(gc.getGbyPairList(), step + 2);
out.println();
return null;
}
@Override
public Void visit(InsertStatement insert, Integer step) throws AsterixException {
out.print(skip(step) + "insert into " + datasetSymbol
+ generateFullName(insert.getDataverseName(), insert.getDatasetName()) + "\n");
insert.getQuery().accept(this, step);
out.println(SEMICOLON);
return null;
}
@Override
public Void visit(LetClause lc, Integer step) throws AsterixException {
out.print(skip(step) + "with ");
Expression bindingExpr = lc.getBindingExpr();
bindingExpr.accept(this, step + 2);
out.print(" as ");
lc.getVarExpr().accept(this, step + 2);
out.println();
return null;
}
@Override
protected void printDelimitedGbyExpressions(List<GbyVariableExpressionPair> gbyList, int step)
throws AsterixException {
int gbySize = gbyList.size();
int gbyIndex = 0;
for (GbyVariableExpressionPair pair : gbyList) {
pair.getExpr().accept(this, step);
if (pair.getVar() != null) {
out.print(" as ");
pair.getVar().accept(this, step);
}
if (++gbyIndex < gbySize) {
out.print(COMMA);
}
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2014-2-28 @author <a href="mailto:iffiff1@hotmail.com">Tyler Chen</a>.
* All rights reserved.
*
* Contributors:
* <a href="mailto:iffiff1@hotmail.com">Tyler Chen</a> - initial API and implementation
******************************************************************************/
package com.foreveross.common.module.security.application.impl;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import org.jasig.cas.client.validation.Assertion;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.ConfigAttribute;
import org.springframework.security.access.SecurityConfig;
import org.springframework.security.access.SecurityMetadataSource;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.cas.authentication.CasAssertionAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.web.FilterInvocation;
import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource;
import com.foreveross.common.module.security.application.SecurityAllInOneApplication;
import com.foreveross.common.module.security.application.SecurityAuthorizationDataApplication;
import com.foreveross.infra.util.Assert;
import com.foreveross.infra.util.MapHelper;
/**
* @author <a href="mailto:iffiff1@hotmail.com">Tyler Chen</a>
* @since 2014-2-28
*/
@SuppressWarnings("unchecked")
public class SecurityAllInOneApplicationImpl implements
SecurityAllInOneApplication {
SecurityAuthorizationDataApplication securityAuthorizationDataApplication;
Map<String, Object> resourceCache = new HashMap<String, Object>(1024);
Map<String, Object> userCache = new HashMap<String, Object>(1024);
public void setResourceCache(Map<String, Object> resourceCache) {
this.resourceCache = resourceCache;
}
public void setUserCache(Map<String, Object> userCache) {
this.userCache = userCache;
}
public void setSecurityAuthorizationDataApplication(
SecurityAuthorizationDataApplication securityAuthorizationDataApplication) {
this.securityAuthorizationDataApplication = securityAuthorizationDataApplication;
}
public Map<String, Object> findAccountByUsernameInMap(String username) {
return securityAuthorizationDataApplication
.findAccountByUsernameInMap(username);
}
public UserDetails getUserDetail(Map<?, ?> userInMap,
Collection<?> grantedAuthorities) {
CustomUserDetails userDetails = new CustomUserDetails();
{
userDetails.userMap = (Map<String, Object>) userInMap;
userDetails.grantedAuthorities = (Collection<? extends GrantedAuthority>) grantedAuthorities;
}
return userDetails;
}
public List<String> findRoleNameByUsername(String username) {
return securityAuthorizationDataApplication
.findRoleNameByUsername(username);
}
public Map<String, List<String>> findAllResourceNameAndRoleNameInResourceRolesMap() {
return securityAuthorizationDataApplication
.findAllResourceNameAndRoleNameInResourceRolesMap();
}
//===================================
public Authentication authenticate_AuthenticationProvider(
Authentication authentication) throws AuthenticationException {
String username = authentication.getPrincipal().toString();
String password = authentication.getCredentials().toString();
UserDetails userDetails = null;
{
Map<String, Object> account = findAccountByUsernameInMap(username);
if (userDetails == null || account.isEmpty()) {
throw new UsernameNotFoundException("Username not found.");
}
if (!password.equals(encode_PasswordEncoder((String) account
.get("PASSWORD")))) {
throw new BadCredentialsException("Password is not correct.");
}
List<GrantedAuthority> grantedAuthorities = new ArrayList<GrantedAuthority>();
for (String role : findRoleNameByUsername(username)) {
SimpleGrantedAuthority gai = new SimpleGrantedAuthority(role);
grantedAuthorities.add(gai);
}
userDetails = getUserDetail(account, grantedAuthorities);
userCache.put(username, userDetails);
}
{
UsernamePasswordAuthenticationToken result = new UsernamePasswordAuthenticationToken(
userDetails, authentication.getCredentials(), userDetails
.getAuthorities());
result.setDetails(authentication.getDetails());
return result;
}
}
public boolean supports_AuthenticationProvider(Class<?> paramClass) {
return true;
}
//===================================
public void afterPropertiesSet_InitializingBean() throws Exception {
}
//===================================
public String encode_PasswordEncoder(String password) {
return password;
}
//===================================org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource
public Collection<ConfigAttribute> getAllConfigAttributes_FilterInvocationSecurityMetadataSource() {
return null;
}
public Collection<ConfigAttribute> getAttributes_FilterInvocationSecurityMetadataSource(
Object object) throws IllegalArgumentException {
if (resourceCache.isEmpty()) {
Map<String, List<String>> map = findAllResourceNameAndRoleNameInResourceRolesMap();
resourceCache.putAll(map);
}
String url = ((FilterInvocation) object).getRequestUrl();
StringBuilder tempUrl = new StringBuilder(url);
{
int position = tempUrl.indexOf("?");
if (position != -1) {
url = url.substring(0, position);
tempUrl.delete(position - 1, tempUrl.length());
}
}
Collection<ConfigAttribute> attris = new ArrayList<ConfigAttribute>();
while (true) {
List<String> roles = (List<String>) resourceCache.get(tempUrl
.toString());
if (roles != null) {
for (String role : roles) {
attris.add(new SecurityConfig(role));
}
return attris;
} else {
if (tempUrl.charAt(tempUrl.length() - 1) == '*') {// process "/*" and "/**" situation
if (tempUrl.charAt(tempUrl.length() - 2) == '/') {// process "/??/*" -> "/??/**"
tempUrl.append('*');
continue;
} else {// process "/??/**" -> "/**"
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1
&& (lastSpash = tempUrl.lastIndexOf("/",
lastSpash - 1)) > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(),
"**");
continue;
}
}
} else {// process "/??/url" -> "/??/*"
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(), "*");
continue;
}
}
}
break;
}
if (attris.isEmpty()) {
attris.add(new SecurityConfig("ROLE_NOBODY"));
}
return attris;
}
public boolean supports_FilterInvocationSecurityMetadataSource(
Class<?> clazz) {
return true;
}
//=======================================org.springframework.security.access.intercept.AbstractSecurityInterceptor
FilterInvocationSecurityMetadataSource securityMetadataSource;
public void setSecurityMetadataSource(
FilterInvocationSecurityMetadataSource securityMetadataSource) {
this.securityMetadataSource = securityMetadataSource;
}
public Class<? extends Object> getSecureObjectClass_AbstractSecurityInterceptor() {
return FilterInvocation.class;
}
public SecurityMetadataSource obtainSecurityMetadataSource_AbstractSecurityInterceptor() {
return this.securityMetadataSource;
}
//=======================================javax.servlet.Filter
public void destroy_Filter() {
}
public void doFilter_Filter(ServletRequest request,
ServletResponse response, FilterChain chain) throws IOException,
ServletException {
FilterInvocation fi = new FilterInvocation(request, response, chain);
//InterceptorStatusToken token = null;
try {
//token = super.beforeInvocation(fi);
fi.getChain().doFilter(fi.getRequest(), fi.getResponse());
} finally {
//super.afterInvocation(token, null);
}
}
public void init_Filter(FilterConfig config) throws ServletException {
}
//=======================================org.springframework.security.access.AccessDecisionManager
public void decide_AccessDecisionManager(Authentication authentication,
Object url, Collection<ConfigAttribute> configAttributes) {
if (configAttributes == null || configAttributes.isEmpty()) {
return;
}
Object principal = authentication.getPrincipal();
if (principal == null || !(principal instanceof UserDetails)) {
throw new AccessDeniedException(MessageFormat.format(
"Denied to access [{0}][{1}]", url, principal));
}
UserDetails userDetails = (UserDetails) principal;
for (ConfigAttribute configAttribute : configAttributes) {
for (GrantedAuthority gAuthority : userDetails.getAuthorities()) {
if (configAttribute.getAttribute().trim().equals(
gAuthority.getAuthority().trim())) {
return;
}
}
}
for (ConfigAttribute configAttribute : configAttributes) {
for (GrantedAuthority gAuthority : authentication.getAuthorities()) {
if (configAttribute.getAttribute().trim().equals(
gAuthority.getAuthority().trim())) {
return;
}
}
}
throw new AccessDeniedException(MessageFormat.format(
"Denied to access [{0}]", url));
}
public boolean supports_AccessDecisionManager(
ConfigAttribute configAttribute) {
return true;
}
public boolean supports_AccessDecisionManager(Class<?> clazz) {
return true;
}
//===================================cas
//===================================org.springframework.security.core.userdetails.UserDetailsService
//===================================org.springframework.security.core.userdetails.AuthenticationUserDetailsService<CasAssertionAuthenticationToken>
public UserDetails loadUserDetails_UserDetailsService_AuthenticationUserDetailsService(
Object param) {
Assert.notNull(param);
Assert.notBlank(param.toString());
String username = param.toString();
UserDetails userDetails = null;
{
Map<String, Object> account = findAccountByUsernameInMap(username);
if (account == null || account.isEmpty()) {
return userDetails;
}
List<GrantedAuthority> grantedAuthorities = new ArrayList<GrantedAuthority>();
for (String role : findRoleNameByUsername(username)) {
SimpleGrantedAuthority gai = new SimpleGrantedAuthority(role);
grantedAuthorities.add(gai);
}
userDetails = getUserDetail(account, grantedAuthorities);
}
return userDetails;
}
//===================================org.springframework.security.cas.authentication.CasAuthenticationProvider
public UserDetails loadUserByAssertion_CasAuthenticationProvider(
Assertion assertion) {//after CAS has valid
CasAssertionAuthenticationToken token = new CasAssertionAuthenticationToken(
assertion, "");
UserDetails userDetails = loadUserDetails_UserDetailsService_AuthenticationUserDetailsService(token
.getName());
// userDetails may be null
if (userDetails == null) {
userDetails = getUserDetail(MapHelper.toMap("USERNAME", token
.getName()), Collections.EMPTY_LIST);
}
return userDetails;
}
@SuppressWarnings("serial")
public class CustomUserDetails implements UserDetails {
Map<String, Object> userMap = new HashMap<String, Object>();
Collection<? extends GrantedAuthority> grantedAuthorities = new ArrayList<GrantedAuthority>();
public boolean isEnabled() {
return true;
}
public boolean isCredentialsNonExpired() {
return true;
}
public boolean isAccountNonLocked() {
return true;
}
public boolean isAccountNonExpired() {
return true;
}
public String getUsername() {
return (String) userMap.get("USERNAME");
}
public String getPassword() {
return (String) userMap.get("PASSWORD");
}
public Collection<? extends GrantedAuthority> getAuthorities() {
return grantedAuthorities;
}
}
public static void main(String[] args) {
String url = "/a/b/c/d/e?a=1";
System.out.println(Arrays.toString(url.split("/")));
System.out.println(url.substring(0, url.lastIndexOf('/', url
.lastIndexOf('/') - 1)));
StringBuilder tempUrl = new StringBuilder(url);
while (true) {
System.out.println(tempUrl);
{
if (tempUrl.charAt(tempUrl.length() - 1) == '*') {// process "/*" and "/**" situation
if (tempUrl.charAt(tempUrl.length() - 2) == '/') {// process "/*"
tempUrl.append('*');
continue;
} else {// process "/**" situation
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1
&& (lastSpash = tempUrl.lastIndexOf("/",
lastSpash - 1)) > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(),
"**");
continue;
}
}
} else {// process "/url" situation
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(), "*");
continue;
}
}
}
break;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.marshal;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.cassandra.serializers.TypeSerializer;
import org.apache.cassandra.serializers.BytesSerializer;
import org.apache.cassandra.serializers.MarshalException;
import org.apache.cassandra.utils.ByteBufferUtil;
/**
* A class avoiding class duplication between CompositeType and
* DynamicCompositeType.
* Those two differs only in that for DynamicCompositeType, the comparators
* are in the encoded column name at the front of each component.
*/
public abstract class AbstractCompositeType extends AbstractType<ByteBuffer>
{
public int compare(ByteBuffer o1, ByteBuffer o2)
{
if (o1 == null || !o1.hasRemaining())
return o2 == null || !o2.hasRemaining() ? 0 : -1;
if (o2 == null || !o2.hasRemaining())
return 1;
ByteBuffer bb1 = o1.duplicate();
ByteBuffer bb2 = o2.duplicate();
boolean isStatic1 = readIsStatic(bb1);
boolean isStatic2 = readIsStatic(bb2);
if (isStatic1 != isStatic2)
return isStatic1 ? -1 : 1;
int i = 0;
ByteBuffer previous = null;
while (bb1.remaining() > 0 && bb2.remaining() > 0)
{
AbstractType<?> comparator = getComparator(i, bb1, bb2);
ByteBuffer value1 = ByteBufferUtil.readBytesWithShortLength(bb1);
ByteBuffer value2 = ByteBufferUtil.readBytesWithShortLength(bb2);
int cmp = comparator.compareCollectionMembers(value1, value2, previous);
if (cmp != 0)
return cmp;
previous = value1;
byte b1 = bb1.get();
byte b2 = bb2.get();
if (b1 != b2)
return b1 - b2;
++i;
}
if (bb1.remaining() == 0)
return bb2.remaining() == 0 ? 0 : -1;
// bb1.remaining() > 0 && bb2.remaining() == 0
return 1;
}
// Check if the provided BB represents a static name and advance the
// buffer to the real beginning if so.
protected abstract boolean readIsStatic(ByteBuffer bb);
/**
* Split a composite column names into it's components.
*/
public ByteBuffer[] split(ByteBuffer name)
{
List<ByteBuffer> l = new ArrayList<ByteBuffer>();
ByteBuffer bb = name.duplicate();
readIsStatic(bb);
int i = 0;
while (bb.remaining() > 0)
{
getComparator(i++, bb);
l.add(ByteBufferUtil.readBytesWithShortLength(bb));
bb.get(); // skip end-of-component
}
return l.toArray(new ByteBuffer[l.size()]);
}
public static class CompositeComponent
{
public AbstractType<?> comparator;
public ByteBuffer value;
public CompositeComponent( AbstractType<?> comparator, ByteBuffer value )
{
this.comparator = comparator;
this.value = value;
}
}
public List<CompositeComponent> deconstruct( ByteBuffer bytes )
{
List<CompositeComponent> list = new ArrayList<CompositeComponent>();
ByteBuffer bb = bytes.duplicate();
readIsStatic(bb);
int i = 0;
while (bb.remaining() > 0)
{
AbstractType comparator = getComparator(i, bb);
ByteBuffer value = ByteBufferUtil.readBytesWithShortLength(bb);
list.add( new CompositeComponent(comparator,value) );
byte b = bb.get(); // Ignore; not relevant here
++i;
}
return list;
}
/*
* Escapes all occurences of the ':' character from the input, replacing them by "\:".
* Furthermore, if the last character is '\' or '!', a '!' is appended.
*/
static String escape(String input)
{
if (input.isEmpty())
return input;
String res = input.replaceAll(":", "\\\\:");
char last = res.charAt(res.length() - 1);
return last == '\\' || last == '!' ? res + '!' : res;
}
/*
* Reverses the effect of espace().
* Replaces all occurences of "\:" by ":" and remove last character if it is '!'.
*/
static String unescape(String input)
{
if (input.isEmpty())
return input;
String res = input.replaceAll("\\\\:", ":");
char last = res.charAt(res.length() - 1);
return last == '!' ? res.substring(0, res.length() - 1) : res;
}
/*
* Split the input on character ':', unless the previous character is '\'.
*/
static List<String> split(String input)
{
if (input.isEmpty())
return Collections.<String>emptyList();
List<String> res = new ArrayList<String>();
int prev = 0;
for (int i = 0; i < input.length(); i++)
{
if (input.charAt(i) != ':' || (i > 0 && input.charAt(i-1) == '\\'))
continue;
res.add(input.substring(prev, i));
prev = i + 1;
}
res.add(input.substring(prev, input.length()));
return res;
}
public String getString(ByteBuffer bytes)
{
StringBuilder sb = new StringBuilder();
ByteBuffer bb = bytes.duplicate();
readIsStatic(bb);
int i = 0;
while (bb.remaining() > 0)
{
if (bb.remaining() != bytes.remaining())
sb.append(":");
AbstractType<?> comparator = getAndAppendComparator(i, bb, sb);
ByteBuffer value = ByteBufferUtil.readBytesWithShortLength(bb);
sb.append(escape(comparator.getString(value)));
byte b = bb.get();
if (b != 0)
{
sb.append(":!");
break;
}
++i;
}
return sb.toString();
}
public ByteBuffer fromString(String source)
{
List<String> parts = split(source);
List<ByteBuffer> components = new ArrayList<ByteBuffer>(parts.size());
List<ParsedComparator> comparators = new ArrayList<ParsedComparator>(parts.size());
int totalLength = 0, i = 0;
boolean lastByteIsOne = false;
for (String part : parts)
{
if (part.equals("!"))
{
lastByteIsOne = true;
break;
}
ParsedComparator p = parseComparator(i, part);
AbstractType<?> type = p.getAbstractType();
part = p.getRemainingPart();
ByteBuffer component = type.fromString(unescape(part));
totalLength += p.getComparatorSerializedSize() + 2 + component.remaining() + 1;
components.add(component);
comparators.add(p);
++i;
}
ByteBuffer bb = ByteBuffer.allocate(totalLength);
i = 0;
for (ByteBuffer component : components)
{
comparators.get(i).serializeComparator(bb);
ByteBufferUtil.writeShortLength(bb, component.remaining());
bb.put(component); // it's ok to consume component as we won't use it anymore
bb.put((byte)0);
++i;
}
if (lastByteIsOne)
bb.put(bb.limit() - 1, (byte)1);
bb.rewind();
return bb;
}
@Override
public void validate(ByteBuffer bytes) throws MarshalException
{
ByteBuffer bb = bytes.duplicate();
readIsStatic(bb);
int i = 0;
ByteBuffer previous = null;
while (bb.remaining() > 0)
{
AbstractType<?> comparator = validateComparator(i, bb);
if (bb.remaining() < 2)
throw new MarshalException("Not enough bytes to read value size of component " + i);
int length = ByteBufferUtil.readShortLength(bb);
if (bb.remaining() < length)
throw new MarshalException("Not enough bytes to read value of component " + i);
ByteBuffer value = ByteBufferUtil.readBytes(bb, length);
comparator.validateCollectionMember(value, previous);
if (bb.remaining() == 0)
throw new MarshalException("Not enough bytes to read the end-of-component byte of component" + i);
byte b = bb.get();
if (b != 0 && bb.remaining() != 0)
throw new MarshalException("Invalid bytes remaining after an end-of-component at component" + i);
previous = value;
++i;
}
}
public abstract ByteBuffer decompose(Object... objects);
public TypeSerializer<ByteBuffer> getSerializer()
{
return BytesSerializer.instance;
}
/**
* @return the comparator for the given component. static CompositeType will consult
* @param i DynamicCompositeType will read the type information from @param bb
* @param bb name of type definition
*/
abstract protected AbstractType<?> getComparator(int i, ByteBuffer bb);
/**
* Adds DynamicCompositeType type information from @param bb1 to @param bb2.
* @param i is ignored.
*/
abstract protected AbstractType<?> getComparator(int i, ByteBuffer bb1, ByteBuffer bb2);
/**
* Adds type information from @param bb to @param sb. @param i is ignored.
*/
abstract protected AbstractType<?> getAndAppendComparator(int i, ByteBuffer bb, StringBuilder sb);
/**
* Like getComparator, but validates that @param i does not exceed the defined range
*/
abstract protected AbstractType<?> validateComparator(int i, ByteBuffer bb) throws MarshalException;
/**
* Used by fromString
*/
abstract protected ParsedComparator parseComparator(int i, String part);
protected static interface ParsedComparator
{
AbstractType<?> getAbstractType();
String getRemainingPart();
int getComparatorSerializedSize();
void serializeComparator(ByteBuffer bb);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/scheduler/v1/cloudscheduler.proto
package com.google.cloud.scheduler.v1;
/**
*
*
* <pre>
* Request message for deleting a job using
* [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob].
* </pre>
*
* Protobuf type {@code google.cloud.scheduler.v1.DeleteJobRequest}
*/
public final class DeleteJobRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.scheduler.v1.DeleteJobRequest)
DeleteJobRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteJobRequest.newBuilder() to construct.
private DeleteJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteJobRequest() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteJobRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.scheduler.v1.SchedulerProto
.internal_static_google_cloud_scheduler_v1_DeleteJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.scheduler.v1.SchedulerProto
.internal_static_google_cloud_scheduler_v1_DeleteJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.scheduler.v1.DeleteJobRequest.class,
com.google.cloud.scheduler.v1.DeleteJobRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.scheduler.v1.DeleteJobRequest)) {
return super.equals(obj);
}
com.google.cloud.scheduler.v1.DeleteJobRequest other =
(com.google.cloud.scheduler.v1.DeleteJobRequest) obj;
boolean result = true;
result = result && getName().equals(other.getName());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.scheduler.v1.DeleteJobRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for deleting a job using
* [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob].
* </pre>
*
* Protobuf type {@code google.cloud.scheduler.v1.DeleteJobRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.scheduler.v1.DeleteJobRequest)
com.google.cloud.scheduler.v1.DeleteJobRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.scheduler.v1.SchedulerProto
.internal_static_google_cloud_scheduler_v1_DeleteJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.scheduler.v1.SchedulerProto
.internal_static_google_cloud_scheduler_v1_DeleteJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.scheduler.v1.DeleteJobRequest.class,
com.google.cloud.scheduler.v1.DeleteJobRequest.Builder.class);
}
// Construct using com.google.cloud.scheduler.v1.DeleteJobRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.scheduler.v1.SchedulerProto
.internal_static_google_cloud_scheduler_v1_DeleteJobRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.scheduler.v1.DeleteJobRequest getDefaultInstanceForType() {
return com.google.cloud.scheduler.v1.DeleteJobRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.scheduler.v1.DeleteJobRequest build() {
com.google.cloud.scheduler.v1.DeleteJobRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.scheduler.v1.DeleteJobRequest buildPartial() {
com.google.cloud.scheduler.v1.DeleteJobRequest result =
new com.google.cloud.scheduler.v1.DeleteJobRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.scheduler.v1.DeleteJobRequest) {
return mergeFrom((com.google.cloud.scheduler.v1.DeleteJobRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.scheduler.v1.DeleteJobRequest other) {
if (other == com.google.cloud.scheduler.v1.DeleteJobRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.scheduler.v1.DeleteJobRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.scheduler.v1.DeleteJobRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.scheduler.v1.DeleteJobRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.DeleteJobRequest)
private static final com.google.cloud.scheduler.v1.DeleteJobRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.scheduler.v1.DeleteJobRequest();
}
public static com.google.cloud.scheduler.v1.DeleteJobRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteJobRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteJobRequest>() {
@java.lang.Override
public DeleteJobRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteJobRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteJobRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteJobRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.scheduler.v1.DeleteJobRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.compose;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Objects;
import com.google.common.base.Predicate;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NoSuchElementException;
import java.util.Set;
import javax.annotation.Nullable;
/**
* An implementation of {@code RangeMap} based on a {@code TreeMap}, supporting
* all optional operations.
*
* <p>Like all {@code RangeMap} implementations, this supports neither null
* keys nor null values.
*
* @author Louis Wasserman
* @since 14.0
*/
@Beta
@GwtIncompatible("NavigableMap")
public final class TreeRangeMap<K extends Comparable, V> implements RangeMap<K, V> {
private final NavigableMap<Cut<K>, RangeMapEntry<K, V>> entriesByLowerBound;
public static <K extends Comparable, V> TreeRangeMap<K, V> create() {
return new TreeRangeMap<K, V>();
}
private TreeRangeMap() {
this.entriesByLowerBound = Maps.newTreeMap();
}
private static final class RangeMapEntry<K extends Comparable, V>
extends AbstractMapEntry<Range<K>, V> {
private final Range<K> range;
private final V value;
RangeMapEntry(Cut<K> lowerBound, Cut<K> upperBound, V value) {
this(Range.create(lowerBound, upperBound), value);
}
RangeMapEntry(Range<K> range, V value) {
this.range = range;
this.value = value;
}
@Override
public Range<K> getKey() {
return range;
}
@Override
public V getValue() {
return value;
}
public boolean contains(K value) {
return range.contains(value);
}
Cut<K> getLowerBound() {
return range.lowerBound;
}
Cut<K> getUpperBound() {
return range.upperBound;
}
}
@Override
@Nullable
public V get(K key) {
Entry<Range<K>, V> entry = getEntry(key);
return (entry == null) ? null : entry.getValue();
}
@Override
@Nullable
public Entry<Range<K>, V> getEntry(K key) {
Map.Entry<Cut<K>, RangeMapEntry<K, V>> mapEntry =
entriesByLowerBound.floorEntry(Cut.belowValue(key));
if (mapEntry != null && mapEntry.getValue().contains(key)) {
return mapEntry.getValue();
} else {
return null;
}
}
@Override
public void put(Range<K> range, V value) {
if (!range.isEmpty()) {
checkNotNull(value);
remove(range);
entriesByLowerBound.put(range.lowerBound, new RangeMapEntry<K, V>(range, value));
}
}
@Override
public void putAll(RangeMap<K, V> rangeMap) {
for (Map.Entry<Range<K>, V> entry : rangeMap.asMapOfRanges().entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
@Override
public void clear() {
entriesByLowerBound.clear();
}
@Override
public Range<K> span() {
Entry<Cut<K>, RangeMapEntry<K, V>> firstEntry = entriesByLowerBound.firstEntry();
Entry<Cut<K>, RangeMapEntry<K, V>> lastEntry = entriesByLowerBound.lastEntry();
if (firstEntry == null) {
throw new NoSuchElementException();
}
return Range.create(
firstEntry.getValue().getKey().lowerBound,
lastEntry.getValue().getKey().upperBound);
}
private void putRangeMapEntry(Cut<K> lowerBound, Cut<K> upperBound, V value) {
entriesByLowerBound.put(lowerBound, new RangeMapEntry<K, V>(lowerBound, upperBound, value));
}
@Override
public void remove(Range<K> rangeToRemove) {
if (rangeToRemove.isEmpty()) {
return;
}
/*
* The comments for this method will use [ ] to indicate the bounds of rangeToRemove and ( ) to
* indicate the bounds of ranges in the range map.
*/
Map.Entry<Cut<K>, RangeMapEntry<K, V>> mapEntryBelowToTruncate =
entriesByLowerBound.lowerEntry(rangeToRemove.lowerBound);
if (mapEntryBelowToTruncate != null) {
// we know ( [
RangeMapEntry<K, V> rangeMapEntry = mapEntryBelowToTruncate.getValue();
if (rangeMapEntry.getUpperBound().compareTo(rangeToRemove.lowerBound) > 0) {
// we know ( [ )
if (rangeMapEntry.getUpperBound().compareTo(rangeToRemove.upperBound) > 0) {
// we know ( [ ] ), so insert the range ] ) back into the map --
// it's being split apart
putRangeMapEntry(rangeToRemove.upperBound, rangeMapEntry.getUpperBound(),
mapEntryBelowToTruncate.getValue().getValue());
}
// overwrite mapEntryToTruncateBelow with a truncated range
putRangeMapEntry(rangeMapEntry.getLowerBound(), rangeToRemove.lowerBound,
mapEntryBelowToTruncate.getValue().getValue());
}
}
Map.Entry<Cut<K>, RangeMapEntry<K, V>> mapEntryAboveToTruncate =
entriesByLowerBound.lowerEntry(rangeToRemove.upperBound);
if (mapEntryAboveToTruncate != null) {
// we know ( ]
RangeMapEntry<K, V> rangeMapEntry = mapEntryAboveToTruncate.getValue();
if (rangeMapEntry.getUpperBound().compareTo(rangeToRemove.upperBound) > 0) {
// we know ( ] ), and since we dealt with truncating below already,
// we know [ ( ] )
putRangeMapEntry(rangeToRemove.upperBound, rangeMapEntry.getUpperBound(),
mapEntryAboveToTruncate.getValue().getValue());
entriesByLowerBound.remove(rangeToRemove.lowerBound);
}
}
entriesByLowerBound.subMap(rangeToRemove.lowerBound, rangeToRemove.upperBound).clear();
}
@Override
public Map<Range<K>, V> asMapOfRanges() {
return new AsMapOfRanges();
}
private final class AsMapOfRanges extends AbstractMap<Range<K>, V> {
@Override
public boolean containsKey(@Nullable Object key) {
return get(key) != null;
}
@Override
public V get(@Nullable Object key) {
if (key instanceof Range) {
Range<?> range = (Range<?>) key;
RangeMapEntry<K, V> rangeMapEntry = entriesByLowerBound.get(range.lowerBound);
if (rangeMapEntry != null && rangeMapEntry.getKey().equals(range)) {
return rangeMapEntry.getValue();
}
}
return null;
}
@Override
public Set<Entry<Range<K>, V>> entrySet() {
return new AbstractSet<Entry<Range<K>, V>>() {
@SuppressWarnings("unchecked") // it's safe to upcast iterators
@Override
public Iterator<Entry<Range<K>, V>> iterator() {
return (Iterator) entriesByLowerBound.values().iterator();
}
@Override
public int size() {
return entriesByLowerBound.size();
}
};
}
}
@Override
public RangeMap<K, V> subRangeMap(Range<K> subRange) {
if (subRange.equals(Range.all())) {
return this;
} else {
return new SubRangeMap(subRange);
}
}
@SuppressWarnings("unchecked")
private RangeMap<K, V> emptySubRangeMap() {
return EMPTY_SUB_RANGE_MAP;
}
private static final RangeMap EMPTY_SUB_RANGE_MAP =
new RangeMap() {
@Override
@Nullable
public Object get(Comparable key) {
return null;
}
@Override
@Nullable
public Entry<Range, Object> getEntry(Comparable key) {
return null;
}
@Override
public Range span() {
throw new NoSuchElementException();
}
@Override
public void put(Range range, Object value) {
checkNotNull(range);
throw new IllegalArgumentException(
"Cannot insert range " + range + " into an empty subRangeMap");
}
@Override
public void putAll(RangeMap rangeMap) {
if (!rangeMap.asMapOfRanges().isEmpty()) {
throw new IllegalArgumentException(
"Cannot putAll(nonEmptyRangeMap) into an empty " + "subRangeMap");
}
}
@Override
public void clear() {}
@Override
public void remove(Range range) {
checkNotNull(range);
}
@Override
public Map<Range, Object> asMapOfRanges() {
return Collections.emptyMap();
}
@Override
public RangeMap subRangeMap(Range range) {
checkNotNull(range);
return this;
}
};
private class SubRangeMap implements RangeMap<K, V> {
private final Range<K> subRange;
SubRangeMap(Range<K> subRange) {
this.subRange = subRange;
}
@Override
@Nullable
public V get(K key) {
return subRange.contains(key)
? TreeRangeMap.this.get(key)
: null;
}
@Override
@Nullable
public Entry<Range<K>, V> getEntry(K key) {
if (subRange.contains(key)) {
Entry<Range<K>, V> entry = TreeRangeMap.this.getEntry(key);
if (entry != null) {
return Maps.immutableEntry(entry.getKey().intersection(subRange), entry.getValue());
}
}
return null;
}
@Override
public Range<K> span() {
Cut<K> lowerBound;
Entry<Cut<K>, RangeMapEntry<K, V>> lowerEntry =
entriesByLowerBound.floorEntry(subRange.lowerBound);
if (lowerEntry != null &&
lowerEntry.getValue().getUpperBound().compareTo(subRange.lowerBound) > 0) {
lowerBound = subRange.lowerBound;
} else {
lowerBound = entriesByLowerBound.ceilingKey(subRange.lowerBound);
if (lowerBound == null || lowerBound.compareTo(subRange.upperBound) >= 0) {
throw new NoSuchElementException();
}
}
Cut<K> upperBound;
Entry<Cut<K>, RangeMapEntry<K, V>> upperEntry =
entriesByLowerBound.lowerEntry(subRange.upperBound);
if (upperEntry == null) {
throw new NoSuchElementException();
} else if (upperEntry.getValue().getUpperBound().compareTo(subRange.upperBound) >= 0) {
upperBound = subRange.upperBound;
} else {
upperBound = upperEntry.getValue().getUpperBound();
}
return Range.create(lowerBound, upperBound);
}
@Override
public void put(Range<K> range, V value) {
checkArgument(subRange.encloses(range),
"Cannot put range %s into a subRangeMap(%s)", range, subRange);
TreeRangeMap.this.put(range, value);
}
@Override
public void putAll(RangeMap<K, V> rangeMap) {
if (rangeMap.asMapOfRanges().isEmpty()) {
return;
}
Range<K> span = rangeMap.span();
checkArgument(subRange.encloses(span),
"Cannot putAll rangeMap with span %s into a subRangeMap(%s)", span, subRange);
TreeRangeMap.this.putAll(rangeMap);
}
@Override
public void clear() {
TreeRangeMap.this.remove(subRange);
}
@Override
public void remove(Range<K> range) {
if (range.isConnected(subRange)) {
TreeRangeMap.this.remove(range.intersection(subRange));
}
}
@Override
public RangeMap<K, V> subRangeMap(Range<K> range) {
if (!range.isConnected(subRange)) {
return emptySubRangeMap();
} else {
return TreeRangeMap.this.subRangeMap(range.intersection(subRange));
}
}
@Override
public Map<Range<K>, V> asMapOfRanges() {
return new SubRangeMapAsMap();
}
@Override
public boolean equals(@Nullable Object o) {
if (o instanceof RangeMap) {
RangeMap<?, ?> rangeMap = (RangeMap<?, ?>) o;
return asMapOfRanges().equals(rangeMap.asMapOfRanges());
}
return false;
}
@Override
public int hashCode() {
return asMapOfRanges().hashCode();
}
@Override
public String toString() {
return asMapOfRanges().toString();
}
class SubRangeMapAsMap extends AbstractMap<Range<K>, V> {
@Override
public boolean containsKey(Object key) {
return get(key) != null;
}
@Override
public V get(Object key) {
try {
if (key instanceof Range) {
@SuppressWarnings("unchecked") // we catch ClassCastExceptions
Range<K> r = (Range<K>) key;
if (!subRange.encloses(r) || r.isEmpty()) {
return null;
}
RangeMapEntry<K, V> candidate = null;
if (r.lowerBound.compareTo(subRange.lowerBound) == 0) {
// r could be truncated on the left
Entry<Cut<K>, RangeMapEntry<K, V>> entry =
entriesByLowerBound.floorEntry(r.lowerBound);
if (entry != null) {
candidate = entry.getValue();
}
} else {
candidate = entriesByLowerBound.get(r.lowerBound);
}
if (candidate != null && candidate.getKey().isConnected(subRange)
&& candidate.getKey().intersection(subRange).equals(r)) {
return candidate.getValue();
}
}
} catch (ClassCastException e) {
return null;
}
return null;
}
@Override
public V remove(Object key) {
V value = get(key);
if (value != null) {
@SuppressWarnings("unchecked") // it's definitely in the map, so safe
Range<K> range = (Range<K>) key;
TreeRangeMap.this.remove(range);
return value;
}
return null;
}
@Override
public void clear() {
SubRangeMap.this.clear();
}
private boolean removeIf(Predicate<? super Entry<Range<K>, V>> predicate) {
List<Range<K>> toRemove = Lists.newArrayList();
for (Entry<Range<K>, V> entry : entrySet()) {
if (predicate.apply(entry)) {
toRemove.add(entry.getKey());
}
}
for (Range<K> range : toRemove) {
TreeRangeMap.this.remove(range);
}
return !toRemove.isEmpty();
}
@Override
public Set<Range<K>> keySet() {
return new Maps.KeySet<Range<K>, V>(SubRangeMapAsMap.this) {
@Override
public boolean remove(@Nullable Object o) {
return SubRangeMapAsMap.this.remove(o) != null;
}
@Override
public boolean retainAll(Collection<?> c) {
return removeIf(compose(not(in(c)), Maps.<Range<K>>keyFunction()));
}
};
}
@Override
public Set<Entry<Range<K>, V>> entrySet() {
return new Maps.EntrySet<Range<K>, V>() {
@Override
Map<Range<K>, V> map() {
return SubRangeMapAsMap.this;
}
@Override
public Iterator<Entry<Range<K>, V>> iterator() {
if (subRange.isEmpty()) {
return Iterators.emptyIterator();
}
Cut<K> cutToStart = Objects.firstNonNull(
entriesByLowerBound.floorKey(subRange.lowerBound),
subRange.lowerBound);
final Iterator<RangeMapEntry<K, V>> backingItr =
entriesByLowerBound.tailMap(cutToStart, true).values().iterator();
return new AbstractIterator<Entry<Range<K>, V>>() {
@Override
protected Entry<Range<K>, V> computeNext() {
while (backingItr.hasNext()) {
RangeMapEntry<K, V> entry = backingItr.next();
if (entry.getLowerBound().compareTo(subRange.upperBound) >= 0) {
break;
} else if (entry.getUpperBound().compareTo(subRange.lowerBound) > 0) {
// this might not be true e.g. at the start of the iteration
return Maps.immutableEntry(
entry.getKey().intersection(subRange), entry.getValue());
}
}
return endOfData();
}
};
}
@Override
public boolean retainAll(Collection<?> c) {
return removeIf(not(in(c)));
}
@Override
public int size() {
return Iterators.size(iterator());
}
@Override
public boolean isEmpty() {
return !iterator().hasNext();
}
};
}
@Override
public Collection<V> values() {
return new Maps.Values<Range<K>, V>(this) {
@Override
public boolean removeAll(Collection<?> c) {
return removeIf(compose(in(c), Maps.<V>valueFunction()));
}
@Override
public boolean retainAll(Collection<?> c) {
return removeIf(compose(not(in(c)), Maps.<V>valueFunction()));
}
};
}
}
}
@Override
public boolean equals(@Nullable Object o) {
if (o instanceof RangeMap) {
RangeMap<?, ?> rangeMap = (RangeMap<?, ?>) o;
return asMapOfRanges().equals(rangeMap.asMapOfRanges());
}
return false;
}
@Override
public int hashCode() {
return asMapOfRanges().hashCode();
}
@Override
public String toString() {
return entriesByLowerBound.values().toString();
}
}
| |
/*
* ja, a Java-bytecode translator toolkit.
* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved.
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. Alternatively, the contents of this file may be used under
* the terms of the GNU Lesser General Public License Version 2.1 or later,
* or the Apache License Version 2.0.
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*/
package ja.util.proxy;
import java.lang.reflect.Method;
import java.io.Serializable;
/**
* Runtime support routines that the classes generated by ProxyFactory use.
*
* @see ProxyFactory
*/
public class RuntimeSupport {
/**
* A method handler that only executes a method.
*/
public static MethodHandler default_interceptor = new DefaultMethodHandler();
static class DefaultMethodHandler implements MethodHandler, Serializable {
public Object invoke(Object self, Method m, Method proceed,
Object[] args) throws Exception {
return proceed.invoke(self, args);
}
};
/**
* Finds two methods specified by the parameters and stores them into the
* given array.
*
* @throws RuntimeException
* if the methods are not found.
* @see ja.util.proxy.ProxyFactory
*/
public static void find2Methods(Class clazz, String superMethod,
String thisMethod, int index, String desc,
java.lang.reflect.Method[] methods) {
methods[index + 1] = thisMethod == null ? null : findMethod(clazz,
thisMethod, desc);
methods[index] = findSuperClassMethod(clazz, superMethod, desc);
}
/**
* Finds two methods specified by the parameters and stores them into the
* given array.
*
* <p>
* Added back for JBoss Seam. See JASSIST-206.
* </p>
*
* @throws RuntimeException
* if the methods are not found.
* @see ja.util.proxy.ProxyFactory
* @deprecated replaced by
* {@link #find2Methods(Class, String, String, int, String, Method[])}
*/
public static void find2Methods(Object self, String superMethod,
String thisMethod, int index, String desc,
java.lang.reflect.Method[] methods) {
methods[index + 1] = thisMethod == null ? null : findMethod(self,
thisMethod, desc);
methods[index] = findSuperMethod(self, superMethod, desc);
}
/**
* Finds a method with the given name and descriptor. It searches only the
* class of self.
*
* <p>
* Added back for JBoss Seam. See JASSIST-206.
* </p>
*
* @throws RuntimeException
* if the method is not found.
* @deprecated replaced by {@link #findMethod(Class, String, String)}
*/
public static Method findMethod(Object self, String name, String desc) {
Method m = findMethod2(self.getClass(), name, desc);
if (m == null)
error(self.getClass(), name, desc);
return m;
}
/**
* Finds a method with the given name and descriptor. It searches only the
* class of self.
*
* @throws RuntimeException
* if the method is not found.
*/
public static Method findMethod(Class clazz, String name, String desc) {
Method m = findMethod2(clazz, name, desc);
if (m == null)
error(clazz, name, desc);
return m;
}
/**
* Finds a method that has the given name and descriptor and is declared in
* the super class.
*
* @throws RuntimeException
* if the method is not found.
*/
public static Method findSuperMethod(Object self, String name, String desc) {
// for JBoss Seam. See JASSIST-183.
Class clazz = self.getClass();
return findSuperClassMethod(clazz, name, desc);
}
/**
* Finds a method that has the given name and descriptor and is declared in
* the super class.
*
* @throws RuntimeException
* if the method is not found.
*/
public static Method findSuperClassMethod(Class clazz, String name,
String desc) {
Method m = findSuperMethod2(clazz.getSuperclass(), name, desc);
if (m == null)
m = searchInterfaces(clazz, name, desc);
if (m == null)
error(clazz, name, desc);
return m;
}
private static void error(Class clazz, String name, String desc) {
throw new RuntimeException("not found " + name + ":" + desc + " in "
+ clazz.getName());
}
private static Method findSuperMethod2(Class clazz, String name, String desc) {
Method m = findMethod2(clazz, name, desc);
if (m != null)
return m;
Class superClass = clazz.getSuperclass();
if (superClass != null) {
m = findSuperMethod2(superClass, name, desc);
if (m != null)
return m;
}
return searchInterfaces(clazz, name, desc);
}
private static Method searchInterfaces(Class clazz, String name, String desc) {
Method m = null;
Class[] interfaces = clazz.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
m = findSuperMethod2(interfaces[i], name, desc);
if (m != null)
return m;
}
return m;
}
private static Method findMethod2(Class clazz, String name, String desc) {
Method[] methods = SecurityActions.getDeclaredMethods(clazz);
int n = methods.length;
for (int i = 0; i < n; i++)
if (methods[i].getName().equals(name)
&& makeDescriptor(methods[i]).equals(desc))
return methods[i];
return null;
}
/**
* Makes a descriptor for a given method.
*/
public static String makeDescriptor(Method m) {
Class[] params = m.getParameterTypes();
return makeDescriptor(params, m.getReturnType());
}
/**
* Makes a descriptor for a given method.
*
* @param params
* parameter types.
* @param retType
* return type.
*/
public static String makeDescriptor(Class[] params, Class retType) {
StringBuffer sbuf = new StringBuffer();
sbuf.append('(');
for (int i = 0; i < params.length; i++)
makeDesc(sbuf, params[i]);
sbuf.append(')');
if (retType != null)
makeDesc(sbuf, retType);
return sbuf.toString();
}
/**
* Makes a descriptor for a given method.
*
* @param params
* the descriptor of parameter types.
* @param retType
* return type.
*/
public static String makeDescriptor(String params, Class retType) {
StringBuffer sbuf = new StringBuffer(params);
makeDesc(sbuf, retType);
return sbuf.toString();
}
private static void makeDesc(StringBuffer sbuf, Class type) {
if (type.isArray()) {
sbuf.append('[');
makeDesc(sbuf, type.getComponentType());
} else if (type.isPrimitive()) {
if (type == Void.TYPE)
sbuf.append('V');
else if (type == Integer.TYPE)
sbuf.append('I');
else if (type == Byte.TYPE)
sbuf.append('B');
else if (type == Long.TYPE)
sbuf.append('J');
else if (type == Double.TYPE)
sbuf.append('D');
else if (type == Float.TYPE)
sbuf.append('F');
else if (type == Character.TYPE)
sbuf.append('C');
else if (type == Short.TYPE)
sbuf.append('S');
else if (type == Boolean.TYPE)
sbuf.append('Z');
else
throw new RuntimeException("bad type: " + type.getName());
} else
sbuf.append('L').append(type.getName().replace('.', '/'))
.append(';');
}
/**
* Converts a proxy object to an object that is writable to an object
* stream. This method is called by <code>writeReplace()</code> in a proxy
* class.
*
* @since 3.4
*/
public static SerializedProxy makeSerializedProxy(Object proxy)
throws java.io.InvalidClassException {
Class clazz = proxy.getClass();
MethodHandler methodHandler = null;
if (proxy instanceof ProxyObject)
methodHandler = ((ProxyObject) proxy).getHandler();
else if (proxy instanceof Proxy)
methodHandler = ProxyFactory.getHandler((Proxy) proxy);
return new SerializedProxy(clazz,
ProxyFactory.getFilterSignature(clazz), methodHandler);
}
}
| |
package amazon;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Scanner;
public class Solution
{
public static class Movie
{
private static final Movie DUMMY_MOVIE = new Movie(-0, -0.0F);
private final int movieId;
private final float rating;
private List<Movie> similarMovies; // Similarity is bidirectional
public Movie(int movieId, float rating)
{
this.movieId = movieId;
this.rating = rating;
similarMovies = new ArrayList<Movie>();
}
public int getId()
{
return movieId;
}
public float getRating()
{
return rating;
}
public void addSimilarMovie(Movie movie)
{
similarMovies.add(movie);
movie.similarMovies.add(this);
}
public List<Movie> getSimilarMovies()
{
return similarMovies;
}
@Override
public String toString()
{
return String.valueOf(getId());
}
}
/*
* @param movie Current movie.
* @param numTopRatedSimilarMovies the maximum number of recommended movies to return.
* @return List of top rated similar movies.
* Assumptions I made: TODO
* Description of my approach: TODO
* Runtime complexity of my approach: TODO
* Justification of runtime complexity: TODO
*/
/*
* Sample Input
movie 1 1.0
movie 2 2.3
movie 3 3.5
movie 4 4.5
movie 5 5.5
similar 1 2
similar 1 3
similar 1 4
similar 2 5
similar 3 2
params 1 1
e
*
*/
/****
* Time Complexcity n^2 (NEsTED looops)
* INNER MAX LOOP N * RECURSION N LOOPS
*
* Spatial Complexity:
*
* @param rmvr
* @param n
* @return
*/
private static List<Movie> findTops(List<Movie> rmvr, int n)
{
if (rmvr == null)
return Collections.emptyList();
if (n == 0)
return rmvr;
if (rmvr.size() < n)
return rmvr;
return findTops(rmvr, new ArrayList<Movie>(), n, 0);
}
private static List<Movie> findTops(List<Movie> rmvr, List<Movie> tmvr, int n, int i)
{
if (n == i)
return tmvr;
Movie tmv = topRating(rmvr);
rmvr.remove(tmv);
tmvr.add(tmv);
return findTops(rmvr, tmvr, n, i + 1);
}
private static Movie topRating(List<Movie> rmvr)
{
if (rmvr == null)
return Solution.Movie.DUMMY_MOVIE;
Movie maxm = rmvr.get(0);
final Iterator<Movie> iter = rmvr.iterator();
while (iter.hasNext())
{
Movie nextMv = iter.next();
if (nextMv.getRating() > maxm.getRating())
maxm = nextMv;
}
return maxm;
}
/*************************************************************8
* SOLUTION START
*/
/**************************************************************
*
*/
/***
* sAMI's Method
* Time Complexity = O(mergesort) + O(bulding output list of top n)
* = O(n Log n ) + O(n)
* = O( n (logn + 1)
* = O(nlogn).
*
* Spatial Complexity = O(mergesort) + O(output list of size n)
* = O(n) + O(n)
* = O(2n)
* = O(n)
* @param rmvr
* @param n
* @return
*/
private static List<Movie> findTops2(List<Movie> rmvr, int n)
{
if (rmvr == null)
return Collections.emptyList();
if (n == 0)
return rmvr;
if (rmvr.size() < n)
return rmvr;
Collections.sort(rmvr, new Comparator<Movie>()
{
@Override
public int compare(Movie m1, Movie m2)
{
return ((m2.getRating() - m1.getRating()) > 0) ? 1 : -1;
}
});
List<Movie> tmvs = new ArrayList<>(n);
for (int i = 0; i < n; i++)
{
tmvs.add(rmvr.get(i));
}
return tmvs;
}
static List<Movie> getMovieRecommendations(Movie movie, int topN)
{
if (movie == null)
return Collections.emptyList();
if (topN == 0)
return Collections.emptyList();
List<Movie> similarMovies = movie.getSimilarMovies();
/*List<Movie> src = movie.getSimilarMovies();
List<Movie> dist = new ArrayList<>(src.size());
src.forEach((M) -> dist.add(M));*/
return getMovieRecommendations(movie, (List<Movie>) ((ArrayList) similarMovies).clone(), topN,
(List<Movie>) ((ArrayList) similarMovies).clone(), new ArrayList<Movie>(), new ArrayList<Movie>());
}
/**
* Time Complexity = O(n) for traversing all nodes * O(k) for traversing direct similars
* where k is number of direct similar of each ith node
* K at wors case would be n
* => O(n) * O(n)
* = O(n^2)
*
* Spatial Complexity = 4 * O(n)
* = O(n)
* @param mv
* @param mvs
* @param topN
* @param fdss
* @param traveresedSoFar
* @param output
* @return
*/
private static List<Movie> getMovieRecommendations(Movie mv, List<Movie> mvs, int topN, List<Movie> fdss, List<Movie> traveresedSoFar,
List<Movie> output)
{
if (fdss == null || fdss.isEmpty()) /**Base case*/
return findTops2(new ArrayList<Movie>(new HashSet<Movie>(output)), topN); //spatial and Time complexity high
if (mvs == null || mvs.isEmpty())
{
Movie cmv = fdss.remove(0);
List<Movie> cmvs = cmv.getSimilarMovies();
cmvs.removeAll(traveresedSoFar);
fdss.addAll(cmvs);
return getMovieRecommendations(cmv, cmvs, topN, fdss, traveresedSoFar, output);
}
output.add(mvs.remove(0));
traveresedSoFar.add(mv);
return getMovieRecommendations(mv, mvs, topN, fdss, traveresedSoFar, output);
}
public static void main(String[] args) throws IOException
{
final Map<Integer, Movie> movieMap = new HashMap<Integer, Movie>();
Movie rootMovie = null;
int numTopRatedSimilarMovies = 0;
final Scanner in = new Scanner(System.in);
in.useLocale(new Locale("en", "US"));
while (in.hasNextLine())
{
final String type = in.next();
if (type.equals("movie"))
{
final int id = in.nextInt();
final float rating = in.nextFloat();
movieMap.put(id, new Movie(id, rating));
}
else if (type.equals("similar"))
{
final Movie movie1 = movieMap.get(in.nextInt());
final Movie movie2 = movieMap.get(in.nextInt());
movie1.addSimilarMovie(movie2);
}
else if (type.equals("params"))
{
rootMovie = movieMap.get(in.nextInt());
numTopRatedSimilarMovies = in.nextInt();
}
else if (type.equals("e"))
{
break;
}
}
final List<Movie> result = getMovieRecommendations(rootMovie, numTopRatedSimilarMovies);
String output = "result";
if (result == null)
{
output += " <null>";
}
else
{
Collections.sort(result, new Comparator()
{
@Override
public int compare(Object m1, Object m2)
{
return ((Movie) m1).getId() - ((Movie) m2).getId();
}
});
for (Movie m : result)
{
output += " ";
output += m.getId();
}
}
System.out.println(output);
// final String fileName = System.getenv("OUTPUT_PATH");
// BufferedWriter bw = new BufferedWriter(new FileWriter(fileName));
// bw.write(output);
// bw.newLine();
// bw.close();
}
}
/*private static Movie topRating2(List<Movie> rmvr)
{
if (rmvr == null)
return Solution.Movie.DUMMY_MOVIE;
Collections.sort(rmvr, new Comparator<Movie>()
{
@Override
public int compare(Movie m1, Movie m2)
{
*//**!!! Warning buggy - when case of 10.21 and 10.11 */
/*
return (int) (m1.getRating() - m2.getRating());
}
});
return rmvr.get(rmvr.size() - 1);
}
*/
| |
/*******************************************************************************
* Copyright (c) 2013, SAP AG
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* - Neither the name of the SAP AG nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v3.0-03/04/2009 09:20 AM(valikov)-fcs
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2010.11.03 at 09:58:45 AM CET
//
package eu.primelife.ppl.policy.xacml.impl;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
import eu.primelife.ppl.policy.credential.impl.CredentialAttributeDesignatorType;
import eu.primelife.ppl.policy.credential.impl.PrimelifeApplyType;
import eu.primelife.ppl.policy.credential.impl.UndisclosedExpressionType;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.jvnet.hyperjaxb3.item.ItemUtils;
import org.jvnet.jaxb2_commons.lang.Equals;
import org.jvnet.jaxb2_commons.lang.HashCode;
import org.jvnet.jaxb2_commons.lang.ToString;
import org.jvnet.jaxb2_commons.lang.builder.JAXBEqualsBuilder;
import org.jvnet.jaxb2_commons.lang.builder.JAXBHashCodeBuilder;
import org.jvnet.jaxb2_commons.lang.builder.JAXBToStringBuilder;
/**
* <p>Java class for ApplyType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ApplyType">
* <complexContent>
* <extension base="{urn:oasis:names:tc:xacml:2.0:policy:schema:os}ExpressionType">
* <sequence>
* <element ref="{urn:oasis:names:tc:xacml:2.0:policy:schema:os}Expression" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute name="FunctionId" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" />
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ApplyType", propOrder = {
"expression"
})
@XmlSeeAlso({
PrimelifeApplyType.class
})
@Entity(name = "eu.primelife.ppl.policy.xacml.impl.ApplyType")
@Table(name = "APPLYTYPE")
public class ApplyType
extends ExpressionType
implements Serializable, Equals, HashCode, ToString
{
@XmlElementRef(name = "Expression", namespace = "urn:oasis:names:tc:xacml:2.0:policy:schema:os", type = JAXBElement.class)
protected List<JAXBElement<?>> expression;
@XmlAttribute(name = "FunctionId", required = true)
@XmlSchemaType(name = "anyURI")
protected String functionId;
protected transient List<ApplyTypeExpressionItem> expressionItems;
/**
* Gets the value of the expression property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the expression property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getExpression().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link JAXBElement }{@code <}{@link SubjectAttributeDesignatorType }{@code >}
* {@link JAXBElement }{@code <}{@link AttributeSelectorType }{@code >}
* {@link JAXBElement }{@code <}{@link ResourceAttributeDesignatorType }{@code >}
* {@link JAXBElement }{@code <}{@link ExpressionType }{@code >}
* {@link JAXBElement }{@code <}{@link FunctionType }{@code >}
* {@link JAXBElement }{@code <}{@link ApplyType }{@code >}
* {@link JAXBElement }{@code <}{@link PrimelifeApplyType }{@code >}
* {@link JAXBElement }{@code <}{@link AttributeValueType }{@code >}
* {@link JAXBElement }{@code <}{@link CredentialAttributeDesignatorType }{@code >}
* {@link JAXBElement }{@code <}{@link ActionAttributeDesignatorType }{@code >}
* {@link JAXBElement }{@code <}{@link VariableReferenceType }{@code >}
* {@link JAXBElement }{@code <}{@link UndisclosedExpressionType }{@code >}
* {@link JAXBElement }{@code <}{@link EnvironmentAttributeDesignatorType }{@code >}
*
*
*/
@Transient
public List<JAXBElement<?>> getExpression() {
if (expression == null) {
expression = new ArrayList<JAXBElement<?>>();
}
return this.expression;
}
/**
*
*
*/
public void setExpression(List<JAXBElement<?>> expression) {
this.expression = expression;
}
/**
* Gets the value of the functionId property.
*
* @return
* possible object is
* {@link String }
*
*/
@Basic
@Column(name = "FUNCTIONID")
public String getFunctionId() {
return functionId;
}
/**
* Sets the value of the functionId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFunctionId(String value) {
this.functionId = value;
}
@OneToMany(targetEntity = ApplyTypeExpressionItem.class, cascade = {
CascadeType.ALL
})
@JoinColumn(name = "EXPRESSIONITEMS_APPLYTYPE_HJ_0")
public List<ApplyTypeExpressionItem> getExpressionItems() {
if (this.expressionItems == null) {
this.expressionItems = new ArrayList<ApplyTypeExpressionItem>();
}
if (ItemUtils.shouldBeWrapped(this.expression)) {
this.expression = ItemUtils.wrap(this.expression, this.expressionItems, ApplyTypeExpressionItem.class);
}
return this.expressionItems;
}
public void setExpressionItems(List<ApplyTypeExpressionItem> value) {
this.expression = null;
this.expressionItems = null;
this.expressionItems = value;
if (this.expressionItems == null) {
this.expressionItems = new ArrayList<ApplyTypeExpressionItem>();
}
if (ItemUtils.shouldBeWrapped(this.expression)) {
this.expression = ItemUtils.wrap(this.expression, this.expressionItems, ApplyTypeExpressionItem.class);
}
}
public void equals(Object object, EqualsBuilder equalsBuilder) {
if (!(object instanceof ApplyType)) {
equalsBuilder.appendSuper(false);
return ;
}
if (this == object) {
return ;
}
super.equals(object, equalsBuilder);
final ApplyType that = ((ApplyType) object);
equalsBuilder.append(this.getExpression(), that.getExpression());
equalsBuilder.append(this.getFunctionId(), that.getFunctionId());
}
public boolean equals(Object object) {
if (!(object instanceof ApplyType)) {
return false;
}
if (this == object) {
return true;
}
final EqualsBuilder equalsBuilder = new JAXBEqualsBuilder();
equals(object, equalsBuilder);
return equalsBuilder.isEquals();
}
public void hashCode(HashCodeBuilder hashCodeBuilder) {
super.hashCode(hashCodeBuilder);
hashCodeBuilder.append(this.getExpression());
hashCodeBuilder.append(this.getFunctionId());
}
public int hashCode() {
final HashCodeBuilder hashCodeBuilder = new JAXBHashCodeBuilder();
hashCode(hashCodeBuilder);
return hashCodeBuilder.toHashCode();
}
public void toString(ToStringBuilder toStringBuilder) {
super.toString(toStringBuilder);
{
List<JAXBElement<?>> theExpression;
theExpression = this.getExpression();
toStringBuilder.append("expression", theExpression);
}
{
String theFunctionId;
theFunctionId = this.getFunctionId();
toStringBuilder.append("functionId", theFunctionId);
}
}
public String toString() {
final ToStringBuilder toStringBuilder = new JAXBToStringBuilder(this);
toString(toStringBuilder);
return toStringBuilder.toString();
}
}
| |
/*
* Copyright 2016 Naver Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.jdk7.activemq.client;
import com.navercorp.pinpoint.bootstrap.plugin.test.ExpectedTrace;
import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier;
import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifierHolder;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.ActiveMQClientITHelper;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.AssertTextMessageListener;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.MessageConsumerBuilder;
import com.navercorp.pinpoint.plugin.jdk7.activemq.client.util.MessageProducerBuilder;
import org.apache.activemq.ActiveMQMessageConsumer;
import org.apache.activemq.ActiveMQSession;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.command.MessageDispatch;
import org.junit.Assert;
import org.junit.Test;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.TextMessage;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.Collection;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.annotation;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.event;
import static com.navercorp.pinpoint.bootstrap.plugin.test.Expectations.root;
/**
* @author HyunGil Jeong
*/
public abstract class ActiveMQClientITBase {
public static final String ACTIVEMQ_CLIENT = "ACTIVEMQ_CLIENT";
public static final String ACTIVEMQ_CLIENT_INTERNAL = "ACTIVEMQ_CLIENT_INTERNAL";
protected abstract String getProducerBrokerName();
protected abstract String getProducerBrokerUrl();
protected abstract String getConsumerBrokerName();
protected abstract String getConsumerBrokerUrl();
@Test
public void testQueuePull() throws Exception {
// Given
final String testQueueName = "TestPullQueue";
final ActiveMQQueue testQueue = new ActiveMQQueue(testQueueName);
final String testMessage = "Hello World for Queue!";
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = producerSession.createProducer(testQueue);
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// When
ActiveMQSession consumerSession = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer = consumerSession.createConsumer(testQueue);
// Then
producer.send(expectedTextMessage);
Message message = consumer.receive(1000L);
Assert.assertEquals(testMessage, ((TextMessage) message).getText());
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(5, 5000L);
verifyProducerSendEvent(testQueue, producerSession); // trace count : 1
verifyConsumerPullEvent(testQueue, consumerSession, consumer, expectedTextMessage); // trace count : 4
}
@Test
public void testTopicPull() throws Exception {
// Given
final String testTopicName = "TestPullTopic";
final ActiveMQTopic testTopic = new ActiveMQTopic(testTopicName);
final String testMessage = "Hello World for Topic!";
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = new MessageProducerBuilder(producerSession, testTopic).waitTillStarted().build();
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create 2 consumers
ActiveMQSession consumer1Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer1 = new MessageConsumerBuilder(consumer1Session, testTopic).waitTillStarted().build();
ActiveMQSession consumer2Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer2 = new MessageConsumerBuilder(consumer2Session, testTopic).waitTillStarted().build();
// When
producer.send(expectedTextMessage);
Message message1 = consumer1.receive(1000L);
Message message2 = consumer2.receive(1000L);
Assert.assertEquals(testMessage, ((TextMessage) message1).getText());
Assert.assertEquals(testMessage, ((TextMessage) message2).getText());
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(9, 5000L);
verifyProducerSendEvent(testTopic, producerSession); // trace count : 1
verifyConsumerPullEvent(testTopic, consumer1Session, consumer1, expectedTextMessage); // trace count : 4
verifyConsumerPullEvent(testTopic, consumer2Session, consumer2, expectedTextMessage); // trace count : 4
}
@Test
public void testQueuePush() throws Exception {
// Given
final String testQueueName = "TestPushQueue";
final ActiveMQQueue testQueue = new ActiveMQQueue(testQueueName);
final String testMessage = "Hello World for Queue!";
final CountDownLatch consumerLatch = new CountDownLatch(1);
final Collection<Throwable> consumerThrowables = new CopyOnWriteArrayList<Throwable>();
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = producerSession.createProducer(testQueue);
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create consumer
ActiveMQSession consumerSession = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
MessageConsumer consumer = consumerSession.createConsumer(testQueue);
consumer.setMessageListener(new AssertTextMessageListener(consumerLatch, consumerThrowables, expectedTextMessage));
// When
producer.send(expectedTextMessage);
consumerLatch.await(1L, TimeUnit.SECONDS);
// Then
assertNoConsumerError(consumerThrowables);
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(2, 5000L);
verifyProducerSendEvent(testQueue, producerSession); // trace count : 1
verifyConsumerPushEvent(testQueue, consumerSession); // trace count : 1
}
@Test
public void testTopicPush() throws Exception {
// Given
final String testTopicName = "TestPushTopic";
final ActiveMQTopic testTopic = new ActiveMQTopic(testTopicName);
final String testMessage = "Hello World for Topic!";
final int numMessageConsumers = 2;
final CountDownLatch consumerConsumeLatch = new CountDownLatch(numMessageConsumers);
final Collection<Throwable> consumerThrowables = new CopyOnWriteArrayList<Throwable>();
// create producer
ActiveMQSession producerSession = ActiveMQClientITHelper.createSession(getProducerBrokerName(), getProducerBrokerUrl());
MessageProducer producer = new MessageProducerBuilder(producerSession, testTopic).waitTillStarted().build();
final TextMessage expectedTextMessage = producerSession.createTextMessage(testMessage);
// create 2 consumers
ActiveMQSession consumer1Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
new MessageConsumerBuilder(consumer1Session, testTopic)
.withMessageListener(new AssertTextMessageListener(consumerConsumeLatch, consumerThrowables, expectedTextMessage))
.waitTillStarted()
.build();
ActiveMQSession consumer2Session = ActiveMQClientITHelper.createSession(getConsumerBrokerName(), getConsumerBrokerUrl());
new MessageConsumerBuilder(consumer2Session, testTopic)
.withMessageListener(new AssertTextMessageListener(consumerConsumeLatch, consumerThrowables, expectedTextMessage))
.waitTillStarted()
.build();
// When
producer.send(expectedTextMessage);
consumerConsumeLatch.await(1L, TimeUnit.SECONDS);
// Then
// Wait till all traces are recorded (consumer traces are recorded from another thread)
awaitAndVerifyTraceCount(3, 1000L);
verifyProducerSendEvent(testTopic, producerSession); // trace count : 1
verifyConsumerPushEvent(testTopic, consumer1Session); // trace count : 1
verifyConsumerPushEvent(testTopic, consumer2Session); // trace count : 1
}
/**
* Verifies traced span event for when {@link org.apache.activemq.ActiveMQMessageProducer ActiveMQMessageProducer}
* sends the message. (trace count : 1)
*
* @param destination the destination to which the producer is sending the message
* @throws Exception
*/
private void verifyProducerSendEvent(ActiveMQDestination destination, ActiveMQSession session) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
verifier.printCache();
Class<?> messageProducerClass = Class.forName("org.apache.activemq.ActiveMQMessageProducer");
Method send = messageProducerClass.getDeclaredMethod("send", Destination.class, Message.class, int.class, int.class, long.class);
// URI producerBrokerUri = new URI(getProducerBrokerUrl());
// String expectedEndPoint = getProducerBrokerUri.getHost() + ":" + producerBrokerUri.getPort();
// String expectedEndPoint = producerBrokerUri.toString();
String expectedEndPoint = session.getConnection().getTransport().getRemoteAddress();
verifier.verifyDiscreteTrace(event(
ACTIVEMQ_CLIENT, // serviceType
send, // method
null, // rpc
expectedEndPoint, // endPoint
destination.getPhysicalName(), // destinationId
annotation("message.queue.url", destination.getQualifiedName()),
annotation("activemq.broker.address", expectedEndPoint)
));
}
/**
* Verifies spans and span events for when {@link ActiveMQMessageConsumer} receives the message and enqueues it to
* the {@link org.apache.activemq.MessageDispatchChannel MessageDispatchChannel}. The client then invokes any of
* {@link ActiveMQMessageConsumer#receive() receive()}, {@link ActiveMQMessageConsumer#receive(long) receive(long)},
* or {@link ActiveMQMessageConsumer#receiveNoWait() receiveNotWait()} to retrieve the message. (trace count : 4)
*
* @param destination the destination from which the consumer is receiving the message
* @param expectedMessage the message the consumer is expected to receive
* @throws Exception
*/
private void verifyConsumerPullEvent(ActiveMQDestination destination, ActiveMQSession session, MessageConsumer consumer, Message expectedMessage) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
verifier.printCache();
Class<?> messageConsumerClass = Class.forName("org.apache.activemq.ActiveMQMessageConsumer");
Method receiveWithTimeout = messageConsumerClass.getDeclaredMethod("receive", long.class);
// URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
// String expectedEndPoint = consumerBrokerUri.toString();
String expectedEndPoint = session.getConnection().getTransport().getRemoteAddress();
ExpectedTrace consumerDispatchTrace = root(ACTIVEMQ_CLIENT, // serviceType
"ActiveMQ Consumer Invocation", // method
destination.getQualifiedName(), // rpc
null, // endPoint (collected but there's no easy way to retrieve local address)
expectedEndPoint);
ExpectedTrace consumerReceiveTrace = event(ACTIVEMQ_CLIENT_INTERNAL, // serviceType
receiveWithTimeout, // method
annotation("activemq.message", getMessageAsString(expectedMessage)));
Class<?> messageDispatchChannel = getMessageDispatchChannelClass(consumer);
if (messageDispatchChannel != null) {
Method enqueue = messageDispatchChannel.getDeclaredMethod("enqueue", MessageDispatch.class);
Method dequeueWithTimeout = messageDispatchChannel.getDeclaredMethod("dequeue", long.class);
// Consumer dispatches and enqueues the message to dispatch channel automatically
verifier.verifyDiscreteTrace(consumerDispatchTrace, event(ACTIVEMQ_CLIENT_INTERNAL, enqueue));
// Client receives the message by dequeueing it from the dispatch channel
verifier.verifyDiscreteTrace(consumerReceiveTrace, event(ACTIVEMQ_CLIENT_INTERNAL, dequeueWithTimeout));
} else {
// Consumer dispatches and enqueues the message to dispatch channel automatically
verifier.verifyDiscreteTrace(consumerDispatchTrace);
// Client receives the message by dequeueing it from the dispatch channel
verifier.verifyDiscreteTrace(consumerReceiveTrace);
}
}
/**
* Verifies spans and span events for when {@link ActiveMQMessageConsumer} receives the message and invokes it's
* {@link javax.jms.MessageListener MessageListener}. (trace count : 1)
*
* @param destination the destination from which the consumer is receiving the message
* @throws Exception
*/
private void verifyConsumerPushEvent(ActiveMQDestination destination, ActiveMQSession session) throws Exception {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
// URI consumerBrokerUri = new URI(getConsumerBrokerUrl());
// String expectedRemoteAddress = consumerBrokerUri.toString();
String expectedRemoteAddress = session.getConnection().getTransport().getRemoteAddress();
verifier.verifyDiscreteTrace(root(
ACTIVEMQ_CLIENT, // serviceType
"ActiveMQ Consumer Invocation", // method
destination.getQualifiedName(), // rpc
null, // endPoint (collected but there's no easy way to retrieve local address so skip check)
expectedRemoteAddress // remoteAddress
));
}
private Class<?> getMessageDispatchChannelClass(MessageConsumer consumer) throws NoSuchFieldException, IllegalAccessException {
final String messageDispatchChannelFieldName = "unconsumedMessages";
Class<?> consumerClass = consumer.getClass();
// Need a better way as field names could change in future versions. Comparing classes or class names doesn't
// work due to class loading issue, and some versions may not have certain implementations of
// MessageDispatchChannel.
// Test should be fixed if anything changes in future ActiveMQClient library
Field messageDispatchChannelField = consumerClass.getDeclaredField(messageDispatchChannelFieldName);
messageDispatchChannelField.setAccessible(true);
return messageDispatchChannelField.get(consumer).getClass();
}
private String getMessageAsString(Message message) throws JMSException {
StringBuilder messageStringBuilder = new StringBuilder(message.getClass().getSimpleName());
if (message instanceof TextMessage) {
messageStringBuilder.append('{').append(((TextMessage) message).getText()).append('}');
}
return messageStringBuilder.toString();
}
protected final void assertNoConsumerError(Collection<Throwable> consumerThrowables) {
Assert.assertTrue("Consumer Error : " + consumerThrowables.toString(), consumerThrowables.isEmpty());
}
protected final void awaitAndVerifyTraceCount(int expectedTraceCount, long maxWaitMs) throws InterruptedException {
PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance();
final long waitIntervalMs = 100L;
long maxWaitTime = maxWaitMs;
if (maxWaitMs < waitIntervalMs) {
maxWaitTime = waitIntervalMs;
}
long startTime = System.currentTimeMillis();
while (System.currentTimeMillis() - startTime < maxWaitTime) {
try {
verifier.verifyTraceCount(expectedTraceCount);
return;
} catch (AssertionError e) {
// ignore and retry
Thread.sleep(waitIntervalMs);
}
}
verifier.printCache();
verifier.verifyTraceCount(expectedTraceCount);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.repository;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.repository.Deployment;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.engine.repository.ProcessDefinitionQuery;
import org.camunda.bpm.engine.runtime.Incident;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import java.util.List;
/**
* @author Joram Barrez
*/
public class ProcessDefinitionQueryTest extends AbstractDefinitionQueryTest {
private String deploymentThreeId;
protected String getResourceOnePath() {
return "org/camunda/bpm/engine/test/repository/one.bpmn20.xml";
}
protected String getResourceTwoPath() {
return "org/camunda/bpm/engine/test/repository/two.bpmn20.xml";
}
protected String getResourceThreePath() {
return "org/camunda/bpm/engine/test/repository/three_.bpmn20.xml";
}
@Override
protected void setUp() throws Exception {
deploymentThreeId = repositoryService.createDeployment().name("thirdDeployment").addClasspathResource(getResourceThreePath()).deploy().getId();
super.setUp();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
repositoryService.deleteDeployment(deploymentThreeId, true);
}
public void testProcessDefinitionProperties() {
List<ProcessDefinition> processDefinitions = repositoryService
.createProcessDefinitionQuery()
.orderByProcessDefinitionName().asc()
.orderByProcessDefinitionVersion().asc()
.orderByProcessDefinitionCategory().asc()
.list();
ProcessDefinition processDefinition = processDefinitions.get(0);
assertEquals("one", processDefinition.getKey());
assertEquals("One", processDefinition.getName());
assertEquals("Desc one", processDefinition.getDescription());
assertTrue(processDefinition.getId().startsWith("one:1"));
assertEquals("Examples", processDefinition.getCategory());
processDefinition = processDefinitions.get(1);
assertEquals("one", processDefinition.getKey());
assertEquals("One", processDefinition.getName());
assertEquals("Desc one", processDefinition.getDescription());
assertTrue(processDefinition.getId().startsWith("one:2"));
assertEquals("Examples", processDefinition.getCategory());
processDefinition = processDefinitions.get(2);
assertEquals("two", processDefinition.getKey());
assertEquals("Two", processDefinition.getName());
assertNull(processDefinition.getDescription());
assertTrue(processDefinition.getId().startsWith("two:1"));
assertEquals("Examples2", processDefinition.getCategory());
}
public void testQueryByDeploymentId() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().deploymentId(deploymentOneId);
verifyQueryResults(query, 2);
}
public void testQueryByInvalidDeploymentId() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().deploymentId("invalid");
verifyQueryResults(query, 0);
try {
repositoryService.createProcessDefinitionQuery().deploymentId(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByName() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionName("Two");
verifyQueryResults(query, 1);
query = repositoryService.createProcessDefinitionQuery().processDefinitionName("One");
verifyQueryResults(query, 2);
}
public void testQueryByInvalidName() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionName("invalid");
verifyQueryResults(query, 0);
try {
repositoryService.createProcessDefinitionQuery().processDefinitionName(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByNameLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionNameLike("%w%");
verifyQueryResults(query, 1);
query = query.processDefinitionNameLike("%z\\_%");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidNameLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionNameLike("%invalid%");
verifyQueryResults(query, 0);
}
public void testQueryByKey() {
// process one
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("one");
verifyQueryResults(query, 2);
// process two
query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("two");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidKey() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("invalid");
verifyQueryResults(query, 0);
try {
repositoryService.createProcessDefinitionQuery().processDefinitionKey(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByKeyLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionKeyLike("%o%");
verifyQueryResults(query, 3);
query = query.processDefinitionKeyLike("%z\\_%");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidKeyLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionKeyLike("%invalid%");
verifyQueryResults(query, 0);
try {
repositoryService.createProcessDefinitionQuery().processDefinitionKeyLike(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByResourceNameLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionResourceNameLike("%ee\\_%");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidResourceNameLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionResourceNameLike("%invalid%");
verifyQueryResults(query, 0);
try {
repositoryService.createProcessDefinitionQuery().processDefinitionResourceNameLike(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByCategory() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionCategory("Examples");
verifyQueryResults(query, 2);
}
public void testQueryByCategoryLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionCategoryLike("%Example%");
verifyQueryResults(query, 3);
query = repositoryService.createProcessDefinitionQuery().processDefinitionCategoryLike("%amples2");
verifyQueryResults(query, 1);
query = repositoryService.createProcessDefinitionQuery().processDefinitionCategoryLike("%z\\_%");
verifyQueryResults(query, 1);
}
public void testQueryByVersion() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionVersion(2);
verifyQueryResults(query, 1);
query = repositoryService.createProcessDefinitionQuery().processDefinitionVersion(1);
verifyQueryResults(query, 3);
}
public void testQueryByInvalidVersion() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionVersion(3);
verifyQueryResults(query, 0);
try {
repositoryService.createProcessDefinitionQuery().processDefinitionVersion(-1).list();
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
try {
repositoryService.createProcessDefinitionQuery().processDefinitionVersion(null).list();
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByKeyAndVersion() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("one").processDefinitionVersion(1);
verifyQueryResults(query, 1);
query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("one").processDefinitionVersion(2);
verifyQueryResults(query, 1);
query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("one").processDefinitionVersion(3);
verifyQueryResults(query, 0);
}
public void testQueryByLatest() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().latestVersion();
verifyQueryResults(query, 3);
query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("one").latestVersion();
verifyQueryResults(query, 1);
query = repositoryService.createProcessDefinitionQuery().processDefinitionKey("two").latestVersion();
verifyQueryResults(query, 1);
}
public void testInvalidUsageOfLatest() {
try {
repositoryService.createProcessDefinitionQuery().processDefinitionId("test").latestVersion().list();
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
try {
repositoryService.createProcessDefinitionQuery().processDefinitionVersion(1).latestVersion().list();
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
try {
repositoryService.createProcessDefinitionQuery().deploymentId("test").latestVersion().list();
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQuerySorting() {
// asc
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionId().asc();
verifyQueryResults(query, 4);
query = repositoryService.createProcessDefinitionQuery().orderByDeploymentId().asc();
verifyQueryResults(query, 4);
query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionKey().asc();
verifyQueryResults(query, 4);
query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionVersion().asc();
verifyQueryResults(query, 4);
// desc
query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionId().desc();
verifyQueryResults(query, 4);
query = repositoryService.createProcessDefinitionQuery().orderByDeploymentId().desc();
verifyQueryResults(query, 4);
query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionKey().desc();
verifyQueryResults(query, 4);
query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionVersion().desc();
verifyQueryResults(query, 4);
// Typical use case
query = repositoryService.createProcessDefinitionQuery().orderByProcessDefinitionKey().asc().orderByProcessDefinitionVersion().desc();
List<ProcessDefinition> processDefinitions = query.list();
assertEquals(4, processDefinitions.size());
assertEquals("one", processDefinitions.get(0).getKey());
assertEquals(2, processDefinitions.get(0).getVersion());
assertEquals("one", processDefinitions.get(1).getKey());
assertEquals(1, processDefinitions.get(1).getVersion());
assertEquals("two", processDefinitions.get(2).getKey());
assertEquals(1, processDefinitions.get(2).getVersion());
}
public void testQueryByMessageSubscription() {
Deployment deployment = repositoryService.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/processWithNewBookingMessage.bpmn20.xml")
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/processWithNewInvoiceMessage.bpmn20.xml")
.deploy();
assertEquals(1,repositoryService.createProcessDefinitionQuery()
.messageEventSubscriptionName("newInvoiceMessage")
.count());
assertEquals(1,repositoryService.createProcessDefinitionQuery()
.messageEventSubscriptionName("newBookingMessage")
.count());
assertEquals(0,repositoryService.createProcessDefinitionQuery()
.messageEventSubscriptionName("bogus")
.count());
repositoryService.deleteDeployment(deployment.getId());
}
@org.camunda.bpm.engine.test.Deployment(resources={"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml"})
public void testQueryByIncidentId() {
assertEquals(1, repositoryService.createProcessDefinitionQuery()
.processDefinitionKey("failingProcess")
.count());
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingProcess");
executeAvailableJobs();
List<Incident> incidentList = runtimeService.createIncidentQuery().list();
assertEquals(1, incidentList.size());
Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult();
ProcessDefinitionQuery query = repositoryService
.createProcessDefinitionQuery()
.incidentId(incident.getId());
verifyQueryResults(query, 1);
}
public void testQueryByInvalidIncidentId() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
verifyQueryResults(query.incidentId("invalid"), 0);
try {
query.incidentId(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
@org.camunda.bpm.engine.test.Deployment(resources={"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml"})
public void testQueryByIncidentType() {
assertEquals(1, repositoryService.createProcessDefinitionQuery()
.processDefinitionKey("failingProcess")
.count());
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingProcess");
executeAvailableJobs();
List<Incident> incidentList = runtimeService.createIncidentQuery().list();
assertEquals(1, incidentList.size());
Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult();
ProcessDefinitionQuery query = repositoryService
.createProcessDefinitionQuery()
.incidentType(incident.getIncidentType());
verifyQueryResults(query, 1);
}
public void testQueryByInvalidIncidentType() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
verifyQueryResults(query.incidentType("invalid"), 0);
try {
query.incidentType(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
@org.camunda.bpm.engine.test.Deployment(resources={"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml"})
public void testQueryByIncidentMessage() {
assertEquals(1, repositoryService.createProcessDefinitionQuery()
.processDefinitionKey("failingProcess")
.count());
ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("failingProcess");
executeAvailableJobs();
List<Incident> incidentList = runtimeService.createIncidentQuery().list();
assertEquals(1, incidentList.size());
Incident incident = runtimeService.createIncidentQuery().processInstanceId(processInstance.getId()).singleResult();
ProcessDefinitionQuery query = repositoryService
.createProcessDefinitionQuery()
.incidentMessage(incident.getIncidentMessage());
verifyQueryResults(query, 1);
}
public void testQueryByInvalidIncidentMessage() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
verifyQueryResults(query.incidentMessage("invalid"), 0);
try {
query.incidentMessage(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
@org.camunda.bpm.engine.test.Deployment(resources={"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml"})
public void testQueryByIncidentMessageLike() {
assertEquals(1, repositoryService.createProcessDefinitionQuery()
.processDefinitionKey("failingProcess")
.count());
runtimeService.startProcessInstanceByKey("failingProcess");
executeAvailableJobs();
List<Incident> incidentList = runtimeService.createIncidentQuery().list();
assertEquals(1, incidentList.size());
ProcessDefinitionQuery query = repositoryService
.createProcessDefinitionQuery()
.incidentMessageLike("%expected%");
verifyQueryResults(query, 1);
query = repositoryService
.createProcessDefinitionQuery()
.incidentMessageLike("%\\_expected%");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidIncidentMessageLike() {
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
verifyQueryResults(query.incidentMessageLike("invalid"), 0);
try {
query.incidentMessageLike(null);
fail();
} catch (ProcessEngineException e) {
// Expected Exception
}
}
public void testQueryByProcessDefinitionIds() {
// empty list
assertTrue(repositoryService.createProcessDefinitionQuery().processDefinitionIdIn("a", "b").list().isEmpty());
// collect all ids
List<ProcessDefinition> list = repositoryService.createProcessDefinitionQuery().list();
String[] ids = new String[list.size()];
for (int i = 0; i < ids.length; i++) {
ids[i] = list.get(i).getId();
}
List<ProcessDefinition> idInList = repositoryService.createProcessDefinitionQuery().processDefinitionIdIn(ids).list();
for (ProcessDefinition processDefinition : idInList) {
boolean found = false;
for (ProcessDefinition otherProcessDefinition : list) {
if(otherProcessDefinition.getId().equals(processDefinition.getId())) {
found = true; break;
}
}
if(!found) {
fail("Expected to find process definition "+processDefinition);
}
}
assertEquals(0, repositoryService.createProcessDefinitionQuery().processDefinitionId("dummyId").processDefinitionIdIn(ids).count());
}
public void testQueryByLatestAndName() {
String firstDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/first-process.bpmn20.xml")
.deploy()
.getId();
String secondDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/first-process.bpmn20.xml")
.deploy()
.getId();
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
query
.processDefinitionName("First Test Process")
.latestVersion();
verifyQueryResults(query, 1);
ProcessDefinition result = query.singleResult();
assertEquals("First Test Process", result.getName());
assertEquals(2, result.getVersion());
repositoryService.deleteDeployment(firstDeployment, true);
repositoryService.deleteDeployment(secondDeployment, true);
}
public void testQueryByLatestAndName_NotFound() {
String firstDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/first-process.bpmn20.xml")
.deploy()
.getId();
String secondDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/second-process.bpmn20.xml")
.deploy()
.getId();
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
query
.processDefinitionName("First Test Process")
.latestVersion();
verifyQueryResults(query, 0);
repositoryService.deleteDeployment(firstDeployment, true);
repositoryService.deleteDeployment(secondDeployment, true);
}
public void testQueryByLatestAndNameLike() {
String firstDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/first-process.bpmn20.xml")
.deploy()
.getId();
String secondDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/second-process.bpmn20.xml")
.deploy()
.getId();
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
query
.processDefinitionNameLike("%Test Process")
.latestVersion();
verifyQueryResults(query, 1);
ProcessDefinition result = query.singleResult();
assertEquals("Second Test Process", result.getName());
assertEquals(2, result.getVersion());
query
.processDefinitionNameLike("%Test%")
.latestVersion();
verifyQueryResults(query, 1);
result = query.singleResult();
assertEquals("Second Test Process", result.getName());
assertEquals(2, result.getVersion());
query
.processDefinitionNameLike("Second%")
.latestVersion();
result = query.singleResult();
assertEquals("Second Test Process", result.getName());
assertEquals(2, result.getVersion());
repositoryService.deleteDeployment(firstDeployment, true);
repositoryService.deleteDeployment(secondDeployment, true);
}
public void testQueryByLatestAndNameLike_NotFound() {
String firstDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/first-process.bpmn20.xml")
.deploy()
.getId();
String secondDeployment = repositoryService
.createDeployment()
.addClasspathResource("org/camunda/bpm/engine/test/api/repository/second-process.bpmn20.xml")
.deploy()
.getId();
ProcessDefinitionQuery query = repositoryService.createProcessDefinitionQuery();
query
.processDefinitionNameLike("First%")
.latestVersion();
verifyQueryResults(query, 0);
repositoryService.deleteDeployment(firstDeployment, true);
repositoryService.deleteDeployment(secondDeployment, true);
}
@org.camunda.bpm.engine.test.Deployment(resources={"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml"})
public void testQueryByVersionTag() {
assertEquals(1, repositoryService.createProcessDefinitionQuery()
.versionTag("ver_tag_2")
.count());
}
@org.camunda.bpm.engine.test.Deployment(resources={"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml"})
public void testQueryByVersionTagLike() {
assertEquals(1, repositoryService.createProcessDefinitionQuery()
.versionTagLike("ver\\_tag\\_%")
.count());
}
@org.camunda.bpm.engine.test.Deployment(resources={
"org/camunda/bpm/engine/test/api/repository/failingProcessCreateOneIncident.bpmn20.xml",
"org/camunda/bpm/engine/test/api/repository/VersionTagTest.testParsingVersionTag.bpmn20.xml"
})
public void testQueryOrderByVersionTag() {
List<ProcessDefinition> processDefinitionList = repositoryService.createProcessDefinitionQuery()
.versionTagLike("ver%tag%")
.orderByVersionTag()
.asc()
.list();
assertEquals("ver_tag_2", processDefinitionList.get(1).getVersionTag());
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.controls.resultset;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.TextTransfer;
import org.eclipse.swt.dnd.Transfer;
import org.eclipse.swt.widgets.Display;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.data.*;
import org.jkiss.dbeaver.model.exec.*;
import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor;
import org.jkiss.dbeaver.model.sql.SQLQuery;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.model.struct.rdb.DBSTable;
import org.jkiss.dbeaver.model.struct.rdb.DBSTableIndex;
import org.jkiss.dbeaver.model.virtual.DBVEntity;
import org.jkiss.dbeaver.model.virtual.DBVEntityConstraint;
import org.jkiss.dbeaver.model.virtual.DBVUtils;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.utils.CommonUtils;
import java.util.*;
/**
* Utils
*/
public class ResultSetUtils
{
private static final Log log = Log.getLog(ResultSetUtils.class);
private static volatile IDialogSettings viewerSettings;
@NotNull
public static IDialogSettings getViewerSettings(String section) {
if (viewerSettings == null) {
viewerSettings = UIUtils.getDialogSettings(ResultSetViewer.class.getSimpleName());
}
return UIUtils.getSettingsSection(viewerSettings, section);
}
public static void bindAttributes(
@NotNull DBCSession session,
@Nullable DBSEntity sourceEntity,
@Nullable DBCResultSet resultSet,
@NotNull DBDAttributeBindingMeta[] bindings,
@Nullable List<Object[]> rows) throws DBException
{
final DBRProgressMonitor monitor = session.getProgressMonitor();
final DBPDataSource dataSource = session.getDataSource();
boolean readMetaData = dataSource.getContainer().getPreferenceStore().getBoolean(ResultSetPreferences.RESULT_SET_READ_METADATA);
if (!readMetaData && sourceEntity == null) {
// Do not read metadata if source entity is not known
return;
}
boolean readReferences = dataSource.getContainer().getPreferenceStore().getBoolean(ResultSetPreferences.RESULT_SET_READ_REFERENCES);
final Map<DBCEntityMetaData, DBSEntity> entityBindingMap = new IdentityHashMap<>();
monitor.beginTask("Discover resultset metadata", 3);
try {
SQLQuery sqlQuery = null;
DBSEntity entity = null;
if (sourceEntity != null) {
entity = sourceEntity;
} else if (resultSet != null) {
DBCStatement sourceStatement = resultSet.getSourceStatement();
if (sourceStatement != null && sourceStatement.getStatementSource() != null) {
DBCExecutionSource executionSource = sourceStatement.getStatementSource();
monitor.subTask("Discover owner entity");
DBSDataContainer dataContainer = executionSource.getDataContainer();
if (dataContainer instanceof DBSEntity) {
entity = (DBSEntity) dataContainer;
}
DBCEntityMetaData entityMeta = null;
if (entity == null) {
// Discover from entity metadata
Object sourceDescriptor = executionSource.getSourceDescriptor();
if (sourceDescriptor instanceof SQLQuery) {
sqlQuery = (SQLQuery) sourceDescriptor;
entityMeta = sqlQuery.getSingleSource();
}
if (entityMeta != null) {
entity = getEntityFromMetaData(monitor, dataSource, entityMeta);
if (entity != null) {
entityBindingMap.put(entityMeta, entity);
}
}
}
}
}
final Map<DBSEntity, DBDRowIdentifier> locatorMap = new IdentityHashMap<>();
monitor.subTask("Discover attributes");
for (DBDAttributeBindingMeta binding : bindings) {
monitor.subTask("Discover attribute '" + binding.getName() + "'");
DBCAttributeMetaData attrMeta = binding.getMetaAttribute();
// We got table name and column name
// To be editable we need this resultset contain set of columns from the same table
// which construct any unique key
DBSEntity attrEntity = null;
final DBCEntityMetaData attrEntityMeta = attrMeta.getEntityMetaData();
if (attrEntityMeta != null) {
attrEntity = entityBindingMap.get(attrEntityMeta);
if (attrEntity == null) {
if (entity != null && entity instanceof DBSTable && ((DBSTable) entity).isView()) {
// If this is a view then don't try to detect entity for each attribute
// MySQL returns source table name instead of view name. That's crazy.
attrEntity = entity;
} else {
attrEntity = getEntityFromMetaData(monitor, dataSource, attrEntityMeta);
}
}
if (attrEntity != null) {
entityBindingMap.put(attrEntityMeta, attrEntity);
}
}
if (attrEntity == null) {
attrEntity = entity;
}
if (attrEntity == null) {
if (attrEntityMeta != null) {
log.debug("Table '" + DBUtils.getSimpleQualifiedName(attrEntityMeta.getCatalogName(), attrEntityMeta.getSchemaName(), attrEntityMeta.getEntityName()) + "' not found in metadata catalog");
}
} else {
DBDPseudoAttribute pseudoAttribute = DBUtils.getPseudoAttribute(attrEntity, attrMeta.getName());
if (pseudoAttribute != null) {
binding.setPseudoAttribute(pseudoAttribute);
}
DBSEntityAttribute tableColumn;
if (binding.getPseudoAttribute() != null) {
tableColumn = binding.getPseudoAttribute().createFakeAttribute(attrEntity, attrMeta);
} else {
tableColumn = attrEntity.getAttribute(monitor, attrMeta.getName());
}
if (sqlQuery != null) {
if (tableColumn != null && tableColumn.getTypeID() != attrMeta.getTypeID()) {
// !! Do not try to use table column handlers for custom queries if source data type
// differs from table data type.
// Query may have expressions with the same alias as underlying table column
// and this expression may return very different data type. It breaks fetch completely.
// There should be a better solution but for now let's just disable this too smart feature.
binding.setEntityAttribute(tableColumn, false);
continue;
}
/*
final SQLSelectItem selectItem = sqlQuery.getSelectItem(attrMeta.getName());
if (selectItem != null && !selectItem.isPlainColumn()) {
// It is not a column.
// It maybe an expression, function or anything else
continue;
}
*/
}
if (tableColumn != null && binding.setEntityAttribute(tableColumn, true) && rows != null) {
// We have new type and new value handler.
// We have to fix already fetched values.
// E.g. we fetched strings and found out that we should handle them as LOBs or enums.
try {
int pos = attrMeta.getOrdinalPosition();
for (Object[] row : rows) {
row[pos] = binding.getValueHandler().getValueFromObject(session, tableColumn, row[pos], false);
}
} catch (DBCException e) {
log.warn("Error resolving attribute '" + binding.getName() + "' values", e);
}
}
}
}
monitor.worked(1);
{
// Init row identifiers
monitor.subTask("Detect unique identifiers");
for (DBDAttributeBindingMeta binding : bindings) {
//monitor.subTask("Find attribute '" + binding.getName() + "' identifier");
DBSEntityAttribute attr = binding.getEntityAttribute();
if (attr == null) {
continue;
}
DBSEntity attrEntity = attr.getParentObject();
if (attrEntity != null) {
DBDRowIdentifier rowIdentifier = locatorMap.get(attrEntity);
if (rowIdentifier == null) {
DBSEntityReferrer entityIdentifier = getBestIdentifier(monitor, attrEntity, bindings, readMetaData);
if (entityIdentifier != null) {
rowIdentifier = new DBDRowIdentifier(
attrEntity,
entityIdentifier);
locatorMap.put(attrEntity, rowIdentifier);
}
}
binding.setRowIdentifier(rowIdentifier);
}
}
monitor.worked(1);
}
if (readMetaData && readReferences && rows != null) {
monitor.subTask("Read results metadata");
// Read nested bindings
for (DBDAttributeBinding binding : bindings) {
binding.lateBinding(session, rows);
}
}
monitor.subTask("Load transformers");
// Load transformers
for (DBDAttributeBinding binding : bindings) {
binding.loadTransformers(session, rows);
}
monitor.subTask("Complete metadata load");
// Reload attributes in row identifiers
for (DBDRowIdentifier rowIdentifier : locatorMap.values()) {
rowIdentifier.reloadAttributes(monitor, bindings);
}
}
finally {
monitor.done();
}
}
public static DBSEntity getEntityFromMetaData(DBRProgressMonitor monitor, DBPDataSource dataSource, DBCEntityMetaData entityMeta) throws DBException {
final DBSObjectContainer objectContainer = DBUtils.getAdapter(DBSObjectContainer.class, dataSource);
if (objectContainer != null) {
DBSEntity entity = getEntityFromMetaData(monitor, objectContainer, entityMeta, false);
if (entity == null) {
entity = getEntityFromMetaData(monitor, objectContainer, entityMeta, true);
}
return entity;
} else {
return null;
}
}
public static DBSEntity getEntityFromMetaData(DBRProgressMonitor monitor, DBSObjectContainer objectContainer, DBCEntityMetaData entityMeta, boolean transformName) throws DBException {
final DBPDataSource dataSource = objectContainer.getDataSource();
String catalogName = entityMeta.getCatalogName();
String schemaName = entityMeta.getSchemaName();
String entityName = entityMeta.getEntityName();
if (transformName) {
catalogName = DBObjectNameCaseTransformer.transformName(dataSource, catalogName);
schemaName = DBObjectNameCaseTransformer.transformName(dataSource, schemaName);
entityName = DBObjectNameCaseTransformer.transformName(dataSource, entityName);
}
DBSObject entityObject = DBUtils.getObjectByPath(monitor, objectContainer, catalogName, schemaName, entityName);
if (entityObject instanceof DBSAlias && !(entityObject instanceof DBSEntity)) {
entityObject = ((DBSAlias) entityObject).getTargetObject(monitor);
}
if (entityObject == null) {
return null;
} else if (entityObject instanceof DBSEntity) {
return (DBSEntity) entityObject;
} else {
log.debug("Unsupported table class: " + entityObject.getClass().getName());
return null;
}
}
private static DBSEntityReferrer getBestIdentifier(@NotNull DBRProgressMonitor monitor, @NotNull DBSEntity table, DBDAttributeBindingMeta[] bindings, boolean readMetaData)
throws DBException
{
List<DBSEntityReferrer> identifiers = new ArrayList<>(2);
if (readMetaData) {
if (table instanceof DBSTable && ((DBSTable) table).isView()) {
// Skip physical identifiers for views. There are nothing anyway
} else {
// Check indexes first.
if (table instanceof DBSTable) {
try {
Collection<? extends DBSTableIndex> indexes = ((DBSTable) table).getIndexes(monitor);
if (!CommonUtils.isEmpty(indexes)) {
// First search for primary index
for (DBSTableIndex index : indexes) {
if (index.isPrimary() && DBUtils.isIdentifierIndex(monitor, index)) {
identifiers.add(index);
break;
}
}
// Then search for unique index
for (DBSTableIndex index : indexes) {
if (DBUtils.isIdentifierIndex(monitor, index)) {
identifiers.add(index);
break;
}
}
}
} catch (Exception e) {
// Indexes are not supported or not available
// Just skip them
log.debug(e);
}
}
{
// Check constraints
Collection<? extends DBSEntityConstraint> constraints = table.getConstraints(monitor);
if (constraints != null) {
for (DBSEntityConstraint constraint : constraints) {
if (DBUtils.isIdentifierConstraint(monitor, constraint)) {
identifiers.add((DBSEntityReferrer) constraint);
}
}
}
}
}
}
if (CommonUtils.isEmpty(identifiers)) {
// Check for pseudo attrs (ROWID)
// Do this after natural identifiers search (see #3829)
for (DBDAttributeBindingMeta column : bindings) {
DBDPseudoAttribute pseudoAttribute = column.getPseudoAttribute();
if (pseudoAttribute != null && pseudoAttribute.getType() == DBDPseudoAttributeType.ROWID) {
identifiers.add(new DBDPseudoReferrer(table, column));
break;
}
}
}
if (CommonUtils.isEmpty(identifiers)) {
// No physical identifiers or row ids
// Make new or use existing virtual identifier
DBVEntity virtualEntity = DBVUtils.findVirtualEntity(table, true);
identifiers.add(virtualEntity.getBestIdentifier());
}
if (!CommonUtils.isEmpty(identifiers)) {
// Find PK or unique key
DBSEntityReferrer uniqueId = null;
for (DBSEntityReferrer referrer : identifiers) {
if (isGoodReferrer(monitor, bindings, referrer)) {
if (referrer.getConstraintType() == DBSEntityConstraintType.PRIMARY_KEY) {
return referrer;
} else if (uniqueId == null &&
(referrer.getConstraintType().isUnique() ||
(referrer instanceof DBSTableIndex && ((DBSTableIndex) referrer).isUnique())))
{
uniqueId = referrer;
}
}
}
return uniqueId;
}
return null;
}
private static boolean isGoodReferrer(DBRProgressMonitor monitor, DBDAttributeBinding[] bindings, DBSEntityReferrer referrer) throws DBException
{
if (referrer instanceof DBDPseudoReferrer) {
return true;
}
Collection<? extends DBSEntityAttributeRef> references = referrer.getAttributeReferences(monitor);
if (references == null || references.isEmpty()) {
return referrer instanceof DBVEntityConstraint;
}
for (DBSEntityAttributeRef ref : references) {
for (DBDAttributeBinding binding : bindings) {
if (binding.matches(ref.getAttribute(), false)) {
return true;
}
}
}
return true;
}
public static boolean equalAttributes(DBCAttributeMetaData attr1, DBCAttributeMetaData attr2) {
return
SQLUtils.compareAliases(attr1.getLabel(), attr2.getLabel()) &&
SQLUtils.compareAliases(attr1.getName(), attr2.getName()) &&
CommonUtils.equalObjects(attr1.getEntityMetaData(), attr2.getEntityMetaData()) &&
attr1.getOrdinalPosition() == attr2.getOrdinalPosition() &&
attr1.isRequired() == attr2.isRequired() &&
attr1.getMaxLength() == attr2.getMaxLength() &&
CommonUtils.equalObjects(attr1.getPrecision(), attr2.getPrecision()) &&
CommonUtils.equalObjects(attr1.getScale(), attr2.getScale()) &&
attr1.getTypeID() == attr2.getTypeID() &&
CommonUtils.equalObjects(attr1.getTypeName(), attr2.getTypeName());
}
@Nullable
public static Object getAttributeValueFromClipboard(DBDAttributeBinding attribute) throws DBCException
{
Clipboard clipboard = new Clipboard(Display.getCurrent());
try (DBCSession session = DBUtils.openUtilSession(new VoidProgressMonitor(), attribute, "Copy from clipboard")) {
String strValue = (String) clipboard.getContents(TextTransfer.getInstance());
return attribute.getValueHandler().getValueFromObject(
session, attribute.getAttribute(), strValue, true);
} finally {
clipboard.dispose();
}
}
public static void copyToClipboard(String string) {
if (string != null && string.length() > 0) {
Clipboard clipboard = new Clipboard(Display.getCurrent());
try {
TextTransfer textTransfer = TextTransfer.getInstance();
clipboard.setContents(
new Object[]{string},
new Transfer[]{textTransfer});
} finally {
clipboard.dispose();
}
}
}
public static boolean isServerSideFiltering(IResultSetController controller)
{
return
controller.getPreferenceStore().getBoolean(ResultSetPreferences.RESULT_SET_ORDER_SERVER_SIDE) &&
(controller.isHasMoreData() || !CommonUtils.isEmpty(controller.getModel().getDataFilter().getOrder()));
}
}
| |
/* The contents of this file are subject to the license and copyright terms
* detailed in the license directory at the root of the source tree (also
* available online at http://fedora-commons.org/license/).
*/
package org.fcrepo.client.objecteditor;
import java.awt.BorderLayout;
import java.awt.CardLayout;
import java.awt.GridBagLayout;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.BorderFactory;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import org.fcrepo.client.Administrator;
import org.fcrepo.client.objecteditor.types.MethodDefinition;
import org.fcrepo.client.objecteditor.types.ParameterDefinition;
/**
*
*/
public class ServiceDescriptionPanel
extends JPanel {
private static final long serialVersionUID = 1L;
private final Map<String, JPanel> m_loadedPanels;
private final JComponent m_containerToValidate;
/**
* Initialize with information for the indicated service definition. If
* sDefPID is given as null, don't display anything yet. If
* containerToValidate is not given as null, that container will be
* validate()ed each time this panel changes its structure.
*/
public ServiceDescriptionPanel(String sDefPID,
JComponent containerToValidate)
throws IOException {
m_containerToValidate = containerToValidate;
setLayout(new BorderLayout());
m_loadedPanels = new HashMap<String, JPanel>();
if (sDefPID != null) {
setSDef(sDefPID);
}
}
/**
* Switch what is displayed (if anything) with information for the indicated
* service definition. If null is given, clear what is currently displayed.
*/
public void setSDef(String sDefPID) throws IOException {
removeAll();
if (sDefPID != null) {
JPanel lp = m_loadedPanels.get(sDefPID);
if (lp == null) {
lp = makePanel(sDefPID);
m_loadedPanels.put(sDefPID, lp);
}
add(lp, BorderLayout.CENTER);
}
if (m_containerToValidate != null) {
m_containerToValidate.revalidate();
m_containerToValidate.repaint(new Rectangle(m_containerToValidate
.getSize()));
}
}
/**
* Create and return a new panel describing the service definition. Methods
* dropdown "methodName - descriptionIfExists" Parameters "None." |
* tabbedPane
*/
private JPanel makePanel(String sDefPID) throws IOException {
JTextArea supportsMethodsTextArea = new JTextArea(" defines method");
supportsMethodsTextArea.setLineWrap(false);
supportsMethodsTextArea.setEditable(false);
supportsMethodsTextArea.setBackground(Administrator.BACKGROUND_COLOR);
JTextArea methodParametersTextArea = new JTextArea(" with parm(s)");
methodParametersTextArea.setLineWrap(false);
methodParametersTextArea.setEditable(false);
methodParametersTextArea.setBackground(Administrator.BACKGROUND_COLOR);
JComponent[] left =
new JComponent[] {supportsMethodsTextArea,
methodParametersTextArea};
//
// Methods
//
List<MethodDefinition> methodDefs = Util.getMethodDefinitions(sDefPID);
String[] methodSelections = new String[methodDefs.size()];
for (int i = 0; i < methodDefs.size(); i++) {
MethodDefinition def = (MethodDefinition) methodDefs.get(i);
StringBuffer buf = new StringBuffer();
buf.append(def.getName());
if (def.getLabel() != null) {
buf.append(" - ");
buf.append(def.getLabel());
}
methodSelections[i] = buf.toString();
}
final JComboBox<String> methodComboBox =
new JComboBox<String>(methodSelections);
Administrator.constrainHeight(methodComboBox);
//
// Parameters... ParameterPanel handles the switching and displaying
//
final ParameterPanel parameterPanel = new ParameterPanel(methodDefs);
methodComboBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
String[] parts =
((String) methodComboBox.getSelectedItem())
.split(" - ");
parameterPanel.show(parts[0]);
parameterPanel.revalidate();
}
});
JComponent[] right = new JComponent[] {methodComboBox, parameterPanel};
GridBagLayout gb = new GridBagLayout();
JPanel panel = new JPanel(gb);
panel.setBorder(BorderFactory.createEmptyBorder(4, 0, 0, 0));
Util.addRows(left, right, gb, panel, true, false);
return panel;
}
class ParameterPanel
extends JPanel {
private static final long serialVersionUID = 1L;
private final CardLayout m_cardLayout;
public ParameterPanel(List<MethodDefinition> methodDefs) {
m_cardLayout = new CardLayout();
setLayout(m_cardLayout);
for (MethodDefinition def: methodDefs) {
add(makePane(def), def.getName());
}
}
public void show(String methodName) {
m_cardLayout.show(this, methodName);
}
private JComponent makePane(MethodDefinition def) {
if (def.parameterDefinitions().size() == 0) {
JTextArea noParams = new JTextArea("no parameters.");
noParams.setLineWrap(false);
noParams.setEditable(false);
noParams.setBackground(Administrator.BACKGROUND_COLOR);
JPanel pane = new JPanel(new BorderLayout());
JPanel leftPane = new JPanel(new BorderLayout());
leftPane.add(noParams, BorderLayout.NORTH);
pane.add(leftPane, BorderLayout.WEST);
return pane;
}
JTabbedPane pane = new JTabbedPane();
for (int i = 0; i < def.parameterDefinitions().size(); i++) {
ParameterDefinition parmDef =
(ParameterDefinition) def.parameterDefinitions().get(i);
pane.add(parmDef.getName(), makeDescPane(parmDef));
}
return pane;
}
private JPanel makeDescPane(ParameterDefinition parmDef) {
StringBuffer buf = new StringBuffer();
if (parmDef.isRequired()) {
buf.append("Required.");
} else {
buf.append("Optional. ");
if (parmDef.getDefaultValue() != null
&& parmDef.getDefaultValue().length() > 0) {
buf
.append("Defaults to " + parmDef.getDefaultValue()
+ ".");
}
}
if (parmDef.getLabel() != null) {
buf.append(" " + parmDef.getLabel());
}
if (parmDef.validValues().size() > 0) {
buf.append(" Valid values: ");
for (int k = 0; k < parmDef.validValues().size(); k++) {
if (k > 0) {
buf.append(", ");
}
buf.append((String) parmDef.validValues().get(k));
}
}
JTextArea desc = new JTextArea(buf.toString());
desc.setLineWrap(true);
desc.setEditable(false);
desc.setWrapStyleWord(true);
desc.setBackground(Administrator.BACKGROUND_COLOR);
JPanel pane = new JPanel(new BorderLayout());
pane.add(desc, BorderLayout.NORTH);
return pane;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.datatypes;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.core.datastore.ColumnType;
import org.apache.carbondata.core.datastore.row.ComplexColumnInfo;
import org.apache.carbondata.core.keygenerator.KeyGenException;
import org.apache.carbondata.core.util.ByteUtil;
import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.processing.loading.complexobjects.ArrayObject;
import org.apache.carbondata.processing.loading.converter.BadRecordLogHolder;
import org.apache.carbondata.processing.util.CarbonBadRecordUtil;
/**
* Array DataType stateless object used in data loading
*/
public class ArrayDataType implements GenericDataType<Object> {
/**
* child columns
*/
private GenericDataType children;
/**
* name of the column
*/
private String name;
/**
* column unique id
*/
private String columnId;
/**
* parent column name
*/
private String parentName;
/**
* output array index
*/
private int outputArrayIndex;
/**
* True if this is for dictionary column
*/
private boolean isDictionary;
/**
* current data counter
*/
private int dataCounter;
/* flat complex datatype length, including the children*/
private int depth;
private ArrayDataType(int outputArrayIndex, int dataCounter, GenericDataType children,
String name) {
this.outputArrayIndex = outputArrayIndex;
this.dataCounter = dataCounter;
this.children = children;
this.name = name;
}
/**
* constructor
* @param name
* @param parentName
* @param columnId
*/
public ArrayDataType(String name, String parentName, String columnId) {
this.name = name;
this.parentName = parentName;
this.columnId = columnId;
}
/**
* constructor
* @param name
* @param parentName
* @param columnId
* @param isDictionary
*/
public ArrayDataType(String name, String parentName, String columnId,
Boolean isDictionary) {
this.name = name;
this.parentName = parentName;
this.columnId = columnId;
this.isDictionary = isDictionary;
}
/*
* to add child dimensions
*/
@Override
public void addChildren(GenericDataType children) {
if (this.getName().equals(children.getParentName())) {
this.children = children;
} else {
this.children.addChildren(children);
}
}
/*
* return column name
*/
@Override
public String getName() {
return name;
}
/*
* return column unique id
*/
@Override
public String getColumnNames() {
return columnId;
}
/*
* set parent name
*/
@Override
public String getParentName() {
return parentName;
}
/*
* returns all primitive type child columns
*/
@Override
public void getAllPrimitiveChildren(List<GenericDataType> primitiveChild) {
if (children instanceof PrimitiveDataType) {
primitiveChild.add(children);
} else {
children.getAllPrimitiveChildren(primitiveChild);
}
}
/*
* set surrogate index
*/
@Override
public void setSurrogateIndex(int surrIndex) {
}
@Override
public boolean getIsColumnDictionary() {
return isDictionary;
}
@Override
public void writeByteArray(Object input, DataOutputStream dataOutputStream,
BadRecordLogHolder logHolder, Boolean isWithoutConverter, boolean isEmptyBadRecord)
throws IOException {
if (input == null) {
dataOutputStream.writeInt(1);
children.writeByteArray(null, dataOutputStream, logHolder, isWithoutConverter,
isEmptyBadRecord);
} else {
Object[] data = ((ArrayObject) input).getData();
if (data.length == 1 && data[0] != null
&& data[0].equals("") && !(children instanceof PrimitiveDataType)) {
// If child complex column is empty, no need to iterate. Fill empty byte array and return.
CarbonBadRecordUtil.updateEmptyValue(dataOutputStream, isEmptyBadRecord, logHolder,
parentName, DataTypeUtil.valueOf("array"));
return;
} else {
dataOutputStream.writeInt(data.length);
}
for (Object eachInput : data) {
children.writeByteArray(eachInput, dataOutputStream, logHolder, isWithoutConverter,
isEmptyBadRecord);
}
}
}
@Override
public void parseComplexValue(ByteBuffer byteArrayInput, DataOutputStream dataOutputStream)
throws IOException, KeyGenException {
int dataLength = byteArrayInput.getInt();
dataOutputStream.writeInt(dataLength);
if (children instanceof PrimitiveDataType) {
if (children.getIsColumnDictionary()) {
dataOutputStream.writeInt(ByteUtil.dateBytesSize());
}
}
for (int i = 0; i < dataLength; i++) {
children.parseComplexValue(byteArrayInput, dataOutputStream);
}
}
/*
* get children column count
*/
@Override
public int getColsCount() {
return children.getColsCount() + 1;
}
/*
* set array index
*/
@Override
public void setOutputArrayIndex(int outputArrayIndex) {
this.outputArrayIndex = outputArrayIndex;
children.setOutputArrayIndex(outputArrayIndex + 1);
}
/*
* get current max array index
*/
@Override
public int getMaxOutputArrayIndex() {
int currentMax = outputArrayIndex;
int childMax = children.getMaxOutputArrayIndex();
if (childMax > currentMax) {
currentMax = childMax;
}
return currentMax;
}
/*
* split byte array and return metadata and primitive column data
*/
@Override
public void getColumnarDataForComplexType(List<ArrayList<byte[]>> columnsArray,
ByteBuffer inputArray) {
ByteBuffer b = ByteBuffer.allocate(8);
int dataLength = inputArray.getInt();
b.putInt(dataLength);
if (dataLength == 0) {
b.putInt(0);
} else {
b.putInt(children.getDataCounter());
}
columnsArray.get(this.outputArrayIndex).add(b.array());
if (children instanceof PrimitiveDataType) {
PrimitiveDataType child = ((PrimitiveDataType) children);
if (child.getIsColumnDictionary()) {
child.setKeySize(inputArray.getInt());
}
}
for (int i = 0; i < dataLength; i++) {
children.getColumnarDataForComplexType(columnsArray, inputArray);
}
this.dataCounter++;
}
/*
* return data counter
*/
@Override
public int getDataCounter() {
return this.dataCounter;
}
@Override
public GenericDataType<Object> deepCopy() {
return new ArrayDataType(this.outputArrayIndex, this.dataCounter, this.children.deepCopy(),
this.name);
}
@Override
public void getComplexColumnInfo(List<ComplexColumnInfo> columnInfoList) {
columnInfoList.add(
new ComplexColumnInfo(ColumnType.COMPLEX_ARRAY, DataTypeUtil.valueOf("array"),
name, false));
children.getComplexColumnInfo(columnInfoList);
}
@Override
public int getDepth() {
if (depth == 0) {
// calculate only one time
List<ComplexColumnInfo> complexColumnInfoList = new ArrayList<>();
getComplexColumnInfo(complexColumnInfoList);
depth = complexColumnInfoList.size();
}
return depth;
}
}
| |
// Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.common.options;
import static com.google.devtools.common.options.OptionsParserImpl.findConverter;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import java.lang.reflect.Field;
import java.text.BreakIterator;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* A renderer for usage messages. For now this is very simple.
*/
class OptionsUsage {
private static final Splitter NEWLINE_SPLITTER = Splitter.on('\n');
private static final Joiner COMMA_JOINER = Joiner.on(",");
/**
* Given an options class, render the usage string into the usage,
* which is passed in as an argument.
*/
static void getUsage(Class<? extends OptionsBase> optionsClass, StringBuilder usage) {
List<Field> optionFields =
Lists.newArrayList(OptionsParser.getAllAnnotatedFields(optionsClass));
Collections.sort(optionFields, BY_NAME);
for (Field optionField : optionFields) {
getUsage(optionField, usage, OptionsParser.HelpVerbosity.LONG);
}
}
/**
* Paragraph-fill the specified input text, indenting lines to 'indent' and
* wrapping lines at 'width'. Returns the formatted result.
*/
static String paragraphFill(String in, int indent, int width) {
String indentString = Strings.repeat(" ", indent);
StringBuilder out = new StringBuilder();
String sep = "";
for (String paragraph : NEWLINE_SPLITTER.split(in)) {
BreakIterator boundary = BreakIterator.getLineInstance(); // (factory)
boundary.setText(paragraph);
out.append(sep).append(indentString);
int cursor = indent;
for (int start = boundary.first(), end = boundary.next();
end != BreakIterator.DONE;
start = end, end = boundary.next()) {
String word =
paragraph.substring(start, end); // (may include trailing space)
if (word.length() + cursor > width) {
out.append('\n').append(indentString);
cursor = indent;
}
out.append(word);
cursor += word.length();
}
sep = "\n";
}
return out.toString();
}
/**
* Append the usage message for a single option-field message to 'usage'.
*/
static void getUsage(Field optionField, StringBuilder usage,
OptionsParser.HelpVerbosity helpVerbosity) {
String flagName = getFlagName(optionField);
String typeDescription = getTypeDescription(optionField);
Option annotation = optionField.getAnnotation(Option.class);
usage.append(" --" + flagName);
if (helpVerbosity == OptionsParser.HelpVerbosity.SHORT) { // just the name
usage.append('\n');
return;
}
if (annotation.abbrev() != '\0') {
usage.append(" [-").append(annotation.abbrev()).append(']');
}
if (!typeDescription.equals("")) {
usage.append(" (" + typeDescription + "; ");
if (annotation.allowMultiple()) {
usage.append("may be used multiple times");
} else {
// Don't call the annotation directly (we must allow overrides to certain defaults)
String defaultValueString = OptionsParserImpl.getDefaultOptionString(optionField);
if (OptionsParserImpl.isSpecialNullDefault(defaultValueString, optionField)) {
usage.append("default: see description");
} else {
usage.append("default: \"" + defaultValueString + "\"");
}
}
usage.append(")");
}
usage.append("\n");
if (helpVerbosity == OptionsParser.HelpVerbosity.MEDIUM) { // just the name and type.
return;
}
if (!annotation.help().equals("")) {
usage.append(paragraphFill(annotation.help(), 4, 80)); // (indent, width)
usage.append('\n');
}
if (annotation.expansion().length > 0) {
StringBuilder expandsMsg = new StringBuilder("Expands to: ");
for (String exp : annotation.expansion()) {
expandsMsg.append(exp).append(" ");
}
usage.append(paragraphFill(expandsMsg.toString(), 4, 80)); // (indent, width)
usage.append('\n');
}
}
/**
* Returns the available completion for the given option field. The completions are the exact
* command line option (with the prepending '--') that one should pass. It is suitable for
* completion script to use. If the option expect an argument, the kind of argument is given
* after the equals. If the kind is a enum, the various enum values are given inside an accolade
* in a comma separated list. For other special kind, the type is given as a name (e.g.,
* <code>label</code>, <code>float</ode>, <code>path</code>...). Example outputs of this
* function are for, respectively, a tristate flag <code>tristate_flag</code>, a enum
* flag <code>enum_flag</code> which can take <code>value1</code>, <code>value2</code> and
* <code>value3</code>, a path fragment flag <code>path_flag</code>, a string flag
* <code>string_flag</code> and a void flag <code>void_flag</code>:
* <pre>
* --tristate_flag={auto,yes,no}
* --notristate_flag
* --enum_flag={value1,value2,value3}
* --path_flag=path
* --string_flag=
* --void_flag
* </pre>
*
* @param field The field to return completion for
* @param builder the string builder to store the completion values
*/
static void getCompletion(Field field, StringBuilder builder) {
// Return the list of possible completions for this option
String flagName = field.getAnnotation(Option.class).name();
Class<?> fieldType = field.getType();
builder.append("--").append(flagName);
if (fieldType.equals(boolean.class)) {
builder.append("\n");
builder.append("--no").append(flagName).append("\n");
} else if (fieldType.equals(TriState.class)) {
builder.append("={auto,yes,no}\n");
builder.append("--no").append(flagName).append("\n");
} else if (fieldType.isEnum()) {
builder.append("={")
.append(COMMA_JOINER.join(fieldType.getEnumConstants()).toLowerCase()).append("}\n");
} else if (fieldType.getSimpleName().equals("Label")) {
// String comparison so we don't introduce a dependency to com.google.devtools.build.lib.
builder.append("=label\n");
} else if (fieldType.getSimpleName().equals("PathFragment")) {
builder.append("=path\n");
} else if (Void.class.isAssignableFrom(fieldType)) {
builder.append("\n");
} else {
// TODO(bazel-team): add more types. Maybe even move the completion type
// to the @Option annotation?
builder.append("=\n");
}
}
private static final Comparator<Field> BY_NAME = new Comparator<Field>() {
@Override
public int compare(Field left, Field right) {
return left.getName().compareTo(right.getName());
}
};
/**
* An ordering relation for option-field fields that first groups together
* options of the same category, then sorts by name within the category.
*/
static final Comparator<Field> BY_CATEGORY = new Comparator<Field>() {
@Override
public int compare(Field left, Field right) {
int r = left.getAnnotation(Option.class).category().compareTo(
right.getAnnotation(Option.class).category());
return r == 0 ? BY_NAME.compare(left, right) : r;
}
};
private static String getTypeDescription(Field optionsField) {
return findConverter(optionsField).getTypeDescription();
}
static String getFlagName(Field field) {
String name = field.getAnnotation(Option.class).name();
return OptionsParserImpl.isBooleanField(field) ? "[no]" + name : name;
}
}
| |
package org.thunder.actionbarpulltorefresh;
import org.thunder.mpdcontrol.R;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.widget.ProgressBar;
/**
* Created by castorflex on 11/10/13.
*/
public class SmoothProgressBar extends ProgressBar {
private static final int INTERPOLATOR_ACCELERATE = 0;
private static final int INTERPOLATOR_LINEAR = 1;
private static final int INTERPOLATOR_ACCELERATEDECELERATE = 2;
private static final int INTERPOLATOR_DECELERATE = 3;
public SmoothProgressBar(Context context) {
this(context, null);
}
public SmoothProgressBar(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.spbStyle);
}
public SmoothProgressBar(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
Resources res = context.getResources();
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.SmoothProgressBar, defStyle, 0);
final int color = a.getColor(R.styleable.SmoothProgressBar_spb_color, res.getColor(R.color.spb_default_color));
final int sectionsCount = a.getInteger(R.styleable.SmoothProgressBar_spb_sections_count, res.getInteger(R.integer.spb_default_sections_count));
final int separatorLength = a.getDimensionPixelSize(R.styleable.SmoothProgressBar_spb_stroke_separator_length, res.getDimensionPixelSize(R.dimen.spb_default_stroke_separator_length));
final float strokeWidth = a.getDimension(R.styleable.SmoothProgressBar_spb_stroke_width, res.getDimension(R.dimen.spb_default_stroke_width));
final float speed = a.getFloat(R.styleable.SmoothProgressBar_spb_speed, Float.parseFloat(res.getString(R.string.spb_default_speed)));
final int iInterpolator = a.getInteger(R.styleable.SmoothProgressBar_spb_interpolator, res.getInteger(R.integer.spb_default_interpolator));
final boolean reversed = a.getBoolean(R.styleable.SmoothProgressBar_spb_reversed, res.getBoolean(R.bool.spb_default_reversed));
final boolean mirrorMode = a.getBoolean(R.styleable.SmoothProgressBar_spb_mirror_mode, res.getBoolean(R.bool.spb_default_mirror_mode));
final int colorsId = a.getResourceId(R.styleable.SmoothProgressBar_spb_colors, 0);
a.recycle();
//interpolator
Interpolator interpolator;
switch (iInterpolator) {
case INTERPOLATOR_ACCELERATEDECELERATE:
interpolator = new AccelerateDecelerateInterpolator();
break;
case INTERPOLATOR_DECELERATE:
interpolator = new DecelerateInterpolator();
break;
case INTERPOLATOR_LINEAR:
interpolator = new LinearInterpolator();
break;
case INTERPOLATOR_ACCELERATE:
default:
interpolator = new AccelerateInterpolator();
}
int[] colors = null;
//colors
if (colorsId != 0) {
colors = res.getIntArray(colorsId);
}
SmoothProgressDrawable.Builder builder = new SmoothProgressDrawable.Builder(context)
.speed(speed)
.interpolator(interpolator)
.sectionsCount(sectionsCount)
.separatorLength(separatorLength)
.strokeWidth(strokeWidth)
.reversed(reversed)
.mirrorMode(mirrorMode);
if(colors != null && colors.length > 0)
builder.colors(colors);
else
builder.color(color);
setIndeterminateDrawable(builder.build());
}
public void applyStyle(int styleResId){
TypedArray a = getContext().obtainStyledAttributes(null, R.styleable.SmoothProgressBar, 0, styleResId);
if(a.hasValue(R.styleable.SmoothProgressBar_spb_color)){
setSmoothProgressDrawableColor(a.getColor(R.styleable.SmoothProgressBar_spb_color, 0));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_colors)){
int colorsId = a.getResourceId(R.styleable.SmoothProgressBar_spb_colors, 0);
if(colorsId != 0){
int[] colors = getResources().getIntArray(colorsId);
if(colors != null && colors.length > 0)
setSmoothProgressDrawableColors(colors);
}
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_sections_count)){
setSmoothProgressDrawableSectionsCount(a.getInteger(R.styleable.SmoothProgressBar_spb_sections_count, 0));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_stroke_separator_length)){
setSmoothProgressDrawableSeparatorLength(a.getDimensionPixelSize(R.styleable.SmoothProgressBar_spb_stroke_separator_length, 0));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_stroke_width)){
setSmoothProgressDrawableStrokeWidth(a.getDimension(R.styleable.SmoothProgressBar_spb_stroke_width, 0));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_speed)){
setSmoothProgressDrawableSpeed(a.getFloat(R.styleable.SmoothProgressBar_spb_speed, 0));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_reversed)){
setSmoothProgressDrawableReversed(a.getBoolean(R.styleable.SmoothProgressBar_spb_reversed, false));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_mirror_mode)){
setSmoothProgressDrawableMirrorMode(a.getBoolean(R.styleable.SmoothProgressBar_spb_mirror_mode, false));
}
if(a.hasValue(R.styleable.SmoothProgressBar_spb_interpolator)){
int iInterpolator = a.getInteger(R.styleable.SmoothProgressBar_spb_interpolator, -1);
Interpolator interpolator;
switch (iInterpolator) {
case INTERPOLATOR_ACCELERATEDECELERATE:
interpolator = new AccelerateDecelerateInterpolator();
break;
case INTERPOLATOR_DECELERATE:
interpolator = new DecelerateInterpolator();
break;
case INTERPOLATOR_LINEAR:
interpolator = new LinearInterpolator();
break;
case INTERPOLATOR_ACCELERATE:
interpolator = new AccelerateInterpolator();
break;
default:
interpolator = null;
}
if(interpolator != null){
setInterpolator(interpolator);
}
}
a.recycle();
}
private SmoothProgressDrawable checkIndeterminateDrawable(){
Drawable ret = getIndeterminateDrawable();
if(ret == null || !(ret instanceof SmoothProgressDrawable))
throw new RuntimeException("The drawable is not a SmoothProgressDrawable");
return (SmoothProgressDrawable) ret;
}
@Override
public void setInterpolator(Interpolator interpolator) {
super.setInterpolator(interpolator);
Drawable ret = getIndeterminateDrawable();
if(ret != null && (ret instanceof SmoothProgressDrawable))
((SmoothProgressDrawable) ret).setInterpolator(interpolator);
}
public void setSmoothProgressDrawableInterpolator(Interpolator interpolator) {
checkIndeterminateDrawable().setInterpolator(interpolator);
}
public void setSmoothProgressDrawableColors(int[] colors) {
checkIndeterminateDrawable().setColors(colors);
}
public void setSmoothProgressDrawableColor(int color) {
checkIndeterminateDrawable().setColor(color);
}
public void setSmoothProgressDrawableSpeed(float speed){
checkIndeterminateDrawable().setSpeed(speed);
}
public void setSmoothProgressDrawableSectionsCount(int sectionsCount){
checkIndeterminateDrawable().setSectionsCount(sectionsCount);
}
public void setSmoothProgressDrawableSeparatorLength(int separatorLength){
checkIndeterminateDrawable().setSeparatorLength(separatorLength);
}
public void setSmoothProgressDrawableStrokeWidth(float strokeWidth){
checkIndeterminateDrawable().setStrokeWidth(strokeWidth);
}
public void setSmoothProgressDrawableReversed(boolean reversed){
checkIndeterminateDrawable().setReversed(reversed);
}
public void setSmoothProgressDrawableMirrorMode(boolean mirrorMode){
checkIndeterminateDrawable().setMirrorMode(mirrorMode);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.importer;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Properties;
import java.util.Set;
import javax.annotation.Nonnull;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.common.io.Files;
import org.apache.felix.inventory.Format;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.plugins.index.AsyncIndexUpdate;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.IndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateCallback;
import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
import org.apache.jackrabbit.oak.plugins.index.inventory.IndexDefinitionPrinter;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexLookup;
import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
import org.apache.jackrabbit.oak.plugins.memory.PropertyValues;
import org.apache.jackrabbit.oak.query.NodeStateNodeTypeInfoProvider;
import org.apache.jackrabbit.oak.query.QueryEngineSettings;
import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo;
import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider;
import org.apache.jackrabbit.oak.query.ast.SelectorImpl;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.Editor;
import org.apache.jackrabbit.oak.spi.commit.EditorHook;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStateUtils;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.collect.ImmutableSet.of;
import static java.util.Arrays.asList;
import static org.apache.jackrabbit.JcrConstants.NT_BASE;
import static org.apache.jackrabbit.oak.InitialContent.INITIAL_CONTENT;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.ASYNC_PROPERTY_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_COUNT;
import static org.apache.jackrabbit.oak.plugins.index.IndexUtils.createIndexDefinition;
import static org.apache.jackrabbit.oak.plugins.index.importer.AsyncIndexerLock.NOOP_LOCK;
import static org.apache.jackrabbit.oak.plugins.index.importer.IndexDefinitionUpdater.INDEX_DEFINITIONS_JSON;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
public class IndexImporterTest {
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder(new File("target"));
private NodeStore store = new MemoryNodeStore();
private IndexEditorProvider provider = new PropertyIndexEditorProvider();
@Test(expected = IllegalArgumentException.class)
public void importIndex_NoMeta() throws Exception{
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
}
@Test(expected = NullPointerException.class)
public void lostCheckpoint() throws Exception{
IndexerInfo info = new IndexerInfo(temporaryFolder.getRoot(), "unknown-checkpoint");
info.save();
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
}
@Test
public void switchLanes() throws Exception{
NodeBuilder builder = store.getRoot().builder();
builder.child("idx-a").setProperty("type", "property");
builder.child("idx-b").setProperty("type", "property");
builder.child("idx-b").setProperty("async", "async");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
createIndexDirs("/idx-a", "/idx-b");
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
importer.switchLanes();
NodeState idxa = NodeStateUtils.getNode(store.getRoot(), "/idx-a");
assertEquals(AsyncLaneSwitcher.getTempLaneName(IndexImporter.ASYNC_LANE_SYNC), idxa.getString(ASYNC_PROPERTY_NAME));
NodeState idxb = NodeStateUtils.getNode(store.getRoot(), "/idx-b");
assertEquals(AsyncLaneSwitcher.getTempLaneName("async"), idxb.getString(ASYNC_PROPERTY_NAME));
}
@Test(expected = NullPointerException.class)
public void importData_NoProvider() throws Exception{
NodeBuilder builder = store.getRoot().builder();
builder.child("idx-a").setProperty("type", "property");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
createIndexDirs("/idx-a");
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
importer.switchLanes();
importer.importIndexData();
}
@Test
public void importData_CallbackInvoked() throws Exception{
NodeBuilder builder = store.getRoot().builder();
builder.child("idx-a").setProperty("type", "property");
builder.child("idx-a").setProperty("foo", "bar");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
createIndexDirs("/idx-a");
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
IndexImporterProvider provider = new IndexImporterProvider() {
@Override
public void importIndex(NodeState root, NodeBuilder defn, File indexDir) {
assertEquals("bar", defn.getString("foo"));
assertEquals("idx-a", indexDir.getName());
defn.setProperty("imported", true);
}
@Override
public String getType() {
return "property";
}
};
importer.addImporterProvider(provider);
importer.switchLanes();
importer.importIndexData();
assertTrue(store.getRoot().getChildNode("idx-a").getBoolean("imported"));
}
@Test
public void importData_UpdatedIndexDefinition() throws Exception{
NodeBuilder builder = store.getRoot().builder();
builder.child("idx-a").setProperty("type", "property");
builder.child("idx-a").setProperty("foo", "bar");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
createIndexDirs("/idx-a");
//We remove foo property here
builder = store.getRoot().builder();
builder.child("idx-a").removeProperty("foo");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
IndexImporterProvider provider = new IndexImporterProvider() {
@Override
public void importIndex(NodeState root, NodeBuilder defn, File indexDir) {
//Foo property should be set by virtue of import from json
assertEquals("bar", defn.getString("foo"));
}
@Override
public String getType() {
return "property";
}
};
importer.addImporterProvider(provider);
importer.switchLanes();
importer.importIndexData();
}
@Test
public void importData_IncrementalUpdate() throws Exception{
NodeBuilder builder = store.getRoot().builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, false, ImmutableSet.of("foo"), null)
.setProperty(ASYNC_PROPERTY_NAME, "async");
builder.child("a").setProperty("foo", "abc");
builder.child("b").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
new AsyncIndexUpdate("async", store, provider).run();
String checkpoint = createIndexDirs("/oak:index/fooIndex");
builder = store.getRoot().builder();
builder.child("c").setProperty("foo", "abc");
builder.child("d").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
new AsyncIndexUpdate("async", store, provider).run();
FilterImpl f = createFilter(store.getRoot(), NT_BASE);
PropertyIndexLookup lookup = new PropertyIndexLookup(store.getRoot());
assertEquals(of("a", "b", "c", "d"), find(lookup, "foo", "abc", f));
IndexImporterProvider importerProvider = new IndexImporterProvider() {
@Override
public void importIndex(NodeState root, NodeBuilder defn, File indexDir) {
assertEquals("fooIndex", indexDir.getName());
assertEquals(2, defn.getProperty(REINDEX_COUNT).getValue(Type.LONG).longValue());
defn.getChildNode(IndexConstants.INDEX_CONTENT_NODE_NAME).remove();
NodeState cpState = store.retrieve(checkpoint);
NodeState indexData = NodeStateUtils.getNode(cpState, "/oak:index/fooIndex/:index");
defn.setChildNode(IndexConstants.INDEX_CONTENT_NODE_NAME, indexData);
}
@Override
public String getType() {
return "property";
}
};
builder = store.getRoot().builder();
builder.child("e").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, NOOP_LOCK);
importer.addImporterProvider(importerProvider);
importer.addImporterProvider(importerProvider);
importer.importIndex();
NodeState idx = store.getRoot().getChildNode("oak:index").getChildNode("fooIndex");
assertEquals("async", idx.getString("async"));
lookup = new PropertyIndexLookup(store.getRoot());
//It would not pickup /e as thats not yet indexed as part of last checkpoint
assertEquals(of("a", "b", "c", "d"), find(lookup, "foo", "abc", f));
assertNull(store.retrieve(checkpoint));
}
@Test
public void importIndex_newIndex() throws Exception{
NodeBuilder builder = store.getRoot().builder();
builder.child("oak:index");
builder.child("a").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
String json = "{\"/oak:index/fooIndex\": {\n" +
" \"reindexCount\": 1,\n" +
" \"reindex\": false,\n" +
" \"type\": \"property\",\n" +
" \"async\" : \"async\",\n" +
" \"propertyNames\": [\"foo\"],\n" +
" \"jcr:primaryType\": \"oak:QueryIndexDefinition\"\n" +
" }\n" +
"}";
File indexFolder = temporaryFolder.getRoot();
//Create checkpoint file
String checkpoint = store.checkpoint(1000000);
IndexerInfo info = new IndexerInfo(indexFolder, checkpoint);
info.save();
//Create index definitions json
Files.write(json, new File(indexFolder, INDEX_DEFINITIONS_JSON), UTF_8);
createIndexFolder(indexFolder, "/oak:index/fooIndex");
//Prepare importer
IndexImporterProvider importerProvider = new IndexImporterProvider() {
@Override
public void importIndex(NodeState root, NodeBuilder defn, File indexDir)
throws CommitFailedException {
NodeState fooIndex = getFooIndexNodeState();
defn.setChildNode(IndexConstants.INDEX_CONTENT_NODE_NAME,
fooIndex.getChildNode(":index"));
}
@Override
public String getType() {
return "property";
}
};
IndexImporter importer = new IndexImporter(store, indexFolder, provider, NOOP_LOCK);
importer.addImporterProvider(importerProvider);
//Add some more indexable data
builder = store.getRoot().builder();
builder.child("c").setProperty("foo", "abc");
builder.child("d").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
new AsyncIndexUpdate("async", store, provider).run();
//Now perform import
importer.importIndex();
FilterImpl f = createFilter(store.getRoot(), NT_BASE);
PropertyIndexLookup lookup = new PropertyIndexLookup(store.getRoot());
assertEquals(of("a", "c", "d"), find(lookup, "foo", "abc", f));
NodeState idx = store.getRoot().getChildNode("oak:index").getChildNode("fooIndex");
assertEquals(2, idx.getLong("reindexCount"));
}
private NodeState getFooIndexNodeState() throws CommitFailedException {
NodeState root = INITIAL_CONTENT;
// Add index definition
NodeBuilder builder = root.builder();
NodeBuilder index = createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME), "fooIndex",
true, false, ImmutableSet.of("foo"), null);
builder.child("a").setProperty("foo", "abc");
NodeState after = builder.getNodeState();
EditorHook hook = new EditorHook(
new IndexUpdateProvider(new PropertyIndexEditorProvider()));
NodeState indexed = hook.processCommit(EMPTY_NODE, after, CommitInfo.EMPTY);
return indexed.getChildNode("oak:index").getChildNode("fooIndex");
}
@Test
public void laneName() throws Exception{
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty(IndexConstants.ASYNC_PROPERTY_NAME, "async");
assertEquals("async", IndexImporter.getAsyncLaneName("foo", builder.getNodeState()));
builder = EMPTY_NODE.builder();
builder.setProperty(IndexConstants.ASYNC_PROPERTY_NAME, asList("async", "nrt"), Type.STRINGS);
assertEquals("async", IndexImporter.getAsyncLaneName("foo", builder.getNodeState()));
builder = EMPTY_NODE.builder();
builder.setProperty(IndexConstants.ASYNC_PROPERTY_NAME, asList("async", "nrt"), Type.STRINGS);
AsyncLaneSwitcher.switchLane(builder, "tmp-async");
assertEquals("async", IndexImporter.getAsyncLaneName("foo", builder.getNodeState()));
builder = EMPTY_NODE.builder();
builder.setProperty(IndexConstants.ASYNC_PROPERTY_NAME, asList("async", "nrt"), Type.STRINGS);
AsyncLaneSwitcher.switchLane(builder, "tmp-async");
assertEquals("async", IndexImporter.getAsyncLaneName("foo", builder.getNodeState()));
}
@Test
public void laneUnlockedInCaseOfFailure() throws Exception{
NodeBuilder builder = store.getRoot().builder();
createIndexDefinition(builder.child(INDEX_DEFINITIONS_NAME),
"fooIndex", true, false, ImmutableSet.of("foo"), null)
.setProperty(ASYNC_PROPERTY_NAME, "async");
builder.child("a").setProperty("foo", "abc");
builder.child("b").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
new AsyncIndexUpdate("async", store, provider).run();
String checkpoint = createIndexDirs("/oak:index/fooIndex");
builder = store.getRoot().builder();
builder.child("c").setProperty("foo", "abc");
builder.child("d").setProperty("foo", "abc");
store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
new AsyncIndexUpdate("async", store, provider).run();
IndexImporterProvider importerProvider = new IndexImporterProvider() {
@Override
public void importIndex(NodeState root, NodeBuilder defn, File indexDir) {
}
@Override
public String getType() {
return "property";
}
};
final String exceptionMessage = "TEST MESSAGE";
ClusterNodeStoreLock lock = new ClusterNodeStoreLock(store);
provider = new PropertyIndexEditorProvider() {
@Override
public Editor getIndexEditor(@Nonnull String type, @Nonnull NodeBuilder definition,
@Nonnull NodeState root, @Nonnull IndexUpdateCallback callback) {
throw new RuntimeException(exceptionMessage);
}
};
IndexImporter importer = new IndexImporter(store, temporaryFolder.getRoot(), provider, lock);
importer.addImporterProvider(importerProvider);
try {
importer.importIndex();
fail();
} catch (RuntimeException ignore) {
}
assertFalse(lock.isLocked("async"));
AsyncIndexerLock lock2 = new AsyncIndexerLock() {
@Override
public LockToken lock(String asyncIndexerLane) throws CommitFailedException {
return mock(LockToken.class);
}
@Override
public void unlock(LockToken token) throws CommitFailedException {
throw new IllegalStateException("Exception in unlock");
}
};
IndexImporter importer2 = new IndexImporter(store, temporaryFolder.getRoot(), provider, lock2);
importer2.addImporterProvider(importerProvider);
try {
importer2.importIndex();
fail();
} catch (RuntimeException ignore) {
assertEquals(exceptionMessage, ignore.getMessage());
}
}
private static FilterImpl createFilter(NodeState root, String nodeTypeName) {
NodeTypeInfoProvider nodeTypes = new NodeStateNodeTypeInfoProvider(root);
NodeTypeInfo type = nodeTypes.getNodeTypeInfo(nodeTypeName);
SelectorImpl selector = new SelectorImpl(type, nodeTypeName);
return new FilterImpl(selector, "SELECT * FROM [" + nodeTypeName + "]",
new QueryEngineSettings());
}
private static Set<String> find(PropertyIndexLookup lookup, String name,
String value, Filter filter) {
return Sets.newHashSet(lookup.query(filter, name, value == null ? null
: PropertyValues.newString(value)));
}
private String createIndexDirs(String... indexPaths) throws IOException, CommitFailedException {
String checkpoint = store.checkpoint(1000000);
IndexerInfo info = new IndexerInfo(temporaryFolder.getRoot(), checkpoint);
info.save();
for (String indexPath : indexPaths){
createIndexFolder(temporaryFolder.getRoot(), indexPath);
}
dumpIndexDefinitions(indexPaths);
return checkpoint;
}
private void createIndexFolder(File indexFolder, String indexPath) throws IOException {
String dirName = PathUtils.getName(indexPath);
File indexDir = new File(indexFolder, dirName);
File indexMeta = new File(indexDir, IndexerInfo.INDEX_METADATA_FILE_NAME);
Properties p = new Properties();
p.setProperty(IndexerInfo.PROP_INDEX_PATH, indexPath);
indexDir.mkdir();
PropUtils.writeTo(p, indexMeta, "index info");
}
private void dumpIndexDefinitions(String... indexPaths) throws IOException, CommitFailedException {
IndexDefinitionPrinter printer = new IndexDefinitionPrinter(store, () -> asList(indexPaths));
printer.setFilter("{\"properties\":[\"*\", \"-:childOrder\"],\"nodes\":[\"*\", \"-:index-definition\"]}");
File file = new File(temporaryFolder.getRoot(), INDEX_DEFINITIONS_JSON);
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
printer.print(pw, Format.JSON, false);
Files.write(sw.toString(), file, UTF_8);
}
}
| |
package crazypants.enderio.loot;
import crazypants.enderio.EnderIO;
import crazypants.enderio.ModObject;
import crazypants.enderio.config.Config;
import crazypants.enderio.fluid.Buckets;
import crazypants.enderio.item.darksteel.DarkSteelItems;
import crazypants.enderio.material.Alloy;
import crazypants.util.CapturedMob;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.storage.loot.LootEntry;
import net.minecraft.world.storage.loot.LootEntryItem;
import net.minecraft.world.storage.loot.LootPool;
import net.minecraft.world.storage.loot.LootTable;
import net.minecraft.world.storage.loot.LootTableList;
import net.minecraft.world.storage.loot.RandomValueRange;
import net.minecraft.world.storage.loot.conditions.LootCondition;
import net.minecraft.world.storage.loot.conditions.RandomChance;
import net.minecraft.world.storage.loot.functions.LootFunction;
import net.minecraft.world.storage.loot.functions.SetCount;
import net.minecraft.world.storage.loot.functions.SetDamage;
import net.minecraft.world.storage.loot.functions.SetMetadata;
import net.minecraft.world.storage.loot.functions.SetNBT;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.LootTableLoadEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import static crazypants.enderio.ModObject.itemAlloy;
import static crazypants.enderio.ModObject.itemBasicCapacitor;
import static crazypants.enderio.ModObject.itemConduitProbe;
import static crazypants.enderio.ModObject.itemTravelStaff;
public class LootManager {
// Add this code to an item (e.g. ItemAlloy) to easily test generation of loot
// @Override
// public EnumActionResult onItemUseFirst(ItemStack stack, EntityPlayer player, World world, BlockPos pos, EnumFacing side, float hitX, float hitY, float
// hitZ,
// EnumHand hand) {
//
// if (world.isRemote) {
// return EnumActionResult.PASS;
// }
// TileEntity te = world.getTileEntity(pos);
// if (!(te instanceof TileEntityChest)) {
// return EnumActionResult.PASS;
// }
// TileEntityChest chest = (TileEntityChest) te;
// chest.clear();
//
// LootContext.Builder lootcontext$builder = new LootContext.Builder((WorldServer) world);
// if (player != null) {
// lootcontext$builder.withLuck(player.getLuck());
// }
//
//// LootTable loottable = world.getLootTableManager().getLootTableFromLocation(LootTableList.CHESTS_SIMPLE_DUNGEON);
// LootTable loottable = world.getLootTableManager().getLootTableFromLocation(LootTableList.CHESTS_VILLAGE_BLACKSMITH);
// loottable.fillInventory(chest, world.rand, lootcontext$builder.build());
// return EnumActionResult.PASS;
// }
private static final LootCondition[] NO_CONDITIONS = new LootCondition[0];
private static LootManager INSTANCE = new LootManager();
public static void register() {
MinecraftForge.EVENT_BUS.register(INSTANCE);
}
private LootManager() {
}
@SubscribeEvent
public void onLootTableLoad(LootTableLoadEvent evt) {
LootTable table = evt.getTable();
LootPool lp = new LootPool(new LootEntry[0], NO_CONDITIONS, new RandomValueRange(1, 3), new RandomValueRange(0, 0), EnderIO.MOD_NAME);
if (evt.getName().equals(LootTableList.CHESTS_SIMPLE_DUNGEON)) {
if (Config.lootDarkSteel) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.DARK_STEEL.ordinal(), 1, 3, 0.25F));
}
if (Config.lootItemConduitProbe) {
lp.addEntry(createLootEntry(itemConduitProbe.getItem(), 0.10F));
}
if (Config.lootQuartz) {
lp.addEntry(createLootEntry(Items.QUARTZ, 3, 16, 0.25F));
}
if (Config.lootNetherWart) {
lp.addEntry(createLootEntry(Items.NETHER_WART, 1, 4, 0.20F));
}
if (Config.lootEnderPearl) {
lp.addEntry(createLootEntry(Items.ENDER_PEARL, 1, 2, 0.30F));
}
if (Config.lootTheEnder) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelSword, 0.1F));
}
if (Config.lootDarkSteelBoots) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelBoots, 0.1F));
}
lp.addEntry(createLootCapacitor(0.15F));
lp.addEntry(createLootCapacitor(0.15F));
lp.addEntry(createLootCapacitor(0.15F));
} else if (evt.getName().equals(LootTableList.CHESTS_ABANDONED_MINESHAFT)) {
if (Config.lootDarkSteel) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.DARK_STEEL.ordinal(), 1, 3, 0.05F));
}
if (Config.lootEnderPearl) {
lp.addEntry(createLootEntry(Items.ENDER_PEARL, 1, 2, 0.10F));
}
if (Config.lootTheEnder) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelSword, 0.2F));
}
lp.addEntry(createLootCapacitor(0.15F));
lp.addEntry(createLootCapacitor(0.05F));
} else if (evt.getName().equals(LootTableList.CHESTS_NETHER_BRIDGE)) {
if (Config.lootDarkSteelBoots) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelBoots, 0.1F));
}
lp.addEntry(createLootCapacitor(0.15F));
} else if (evt.getName().equals(LootTableList.CHESTS_IGLOO_CHEST)) {
final CapturedMob polarBear = CapturedMob.create("PolarBear", null);
if (polarBear != null) {
lp.addEntry(
new LootEntryItem(ModObject.itemSoulVessel.getItem(), 1, 1, new LootFunction[] { setCount(1, 1), new SetNBT(NO_CONDITIONS, polarBear.toNbt(null)) },
new LootCondition[] { new RandomChance(.2F) }, "PolarBearSoulVial"));
}
lp.addEntry(createLootEntry(ModObject.itemSoulVessel.getItem(), 1, 3, 0.5F));
lp.addEntry(createLootCapacitor(0.05F));
} else if (evt.getName().equals(LootTableList.CHESTS_JUNGLE_TEMPLE_DISPENSER)) {
ItemStack bucket = Buckets.itemBucketFireWater.copy();
lp.addEntry(new LootEntryItem(bucket.getItem(), 1, 1, new LootFunction[] { setCount(1, 1), setMetadata(bucket.getMetadata()), setNBT(bucket) },
new LootCondition[] { new RandomChance(.05F) }, bucket.getItem().getRegistryName().toString() + ":" + bucket.getMetadata()));
} else if (evt.getName().equals(LootTableList.CHESTS_VILLAGE_BLACKSMITH)) {
if (Config.lootElectricSteel) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.ELECTRICAL_STEEL.ordinal(), 2, 6, 0.20F));
}
if (Config.lootRedstoneAlloy) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.REDSTONE_ALLOY.ordinal(), 3, 6, 0.35F));
}
if (Config.lootDarkSteel) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.DARK_STEEL.ordinal(), 3, 6, 0.35F));
}
if (Config.lootPhasedIron) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.PULSATING_IRON.ordinal(), 1, 2, 0.3F));
}
if (Config.lootPhasedGold) {
lp.addEntry(createLootEntry(itemAlloy.getItem(), Alloy.VIBRANT_ALLOY.ordinal(), 1, 2, 0.2F));
}
if (Config.lootTheEnder) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelSword, 1, 1, 0.25F));
}
if (Config.lootDarkSteelBoots) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelBoots, 1, 1, 0.25F));
}
lp.addEntry(createLootCapacitor(0.1F));
} else if (evt.getName().equals(LootTableList.CHESTS_DESERT_PYRAMID)) {
if (Config.lootTheEnder) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelSword, 0.2F));
}
if (Config.lootTravelStaff) {
lp.addEntry(createLootEntry(itemTravelStaff.getItem(), 0.1F));
}
lp.addEntry(createLootCapacitor(25));
} else if (evt.getName().equals(LootTableList.CHESTS_JUNGLE_TEMPLE)) {
if (Config.lootTheEnder) {
lp.addEntry(createDarkSteelLootEntry(DarkSteelItems.itemDarkSteelSword, 1, 1, 0.25F));
}
if (Config.lootTravelStaff) {
lp.addEntry(createLootEntry(itemTravelStaff.getItem(), 1, 1, 0.1F));
}
lp.addEntry(createLootCapacitor(0.25F));
lp.addEntry(createLootCapacitor(0.25F));
}
table.addPool(lp);
}
private LootEntry createLootEntry(Item item, float chance) {
return createLootEntry(item, 1, 1, chance);
}
private LootEntry createLootEntry(Item item, int minSize, int maxSize, float chance) {
return createLootEntry(item, 0, minSize, maxSize, chance);
}
/*
* All loot entries are given the same weight, the generation probabilities depend on the RandomChance condition.
*/
private LootEntry createLootEntry(Item item, int ordinal, int minStackSize, int maxStackSize, float chance) {
LootCondition[] chanceCond = new LootCondition[] { new RandomChance(chance) };
if (item.isDamageable()) {
return new LootEntryItem(item, 1, 1, new LootFunction[] { setCount(minStackSize, maxStackSize), setDamage(item, ordinal), setEnergy() }, chanceCond,
item.getRegistryName().toString() + ":" + ordinal);
} else {
return new LootEntryItem(item, 1, 1, new LootFunction[] { setCount(minStackSize, maxStackSize), setMetadata(ordinal) }, chanceCond,
item.getRegistryName().toString() + ":" + ordinal);
}
}
private LootEntry createDarkSteelLootEntry(Item item, float chance) {
return createDarkSteelLootEntry(item, 1, 1, chance);
}
private LootEntry createDarkSteelLootEntry(Item item, int minSize, int maxSize, float chance) {
return createDarkSteelLootEntry(item, 0, minSize, maxSize, chance);
}
private LootEntry createDarkSteelLootEntry(Item item, int ordinal, int minStackSize, int maxStackSize, float chance) {
LootCondition[] chanceCond = new LootCondition[] { new RandomChance(chance) };
return new LootEntryItem(item, 1, 1, new LootFunction[] { setCount(minStackSize, maxStackSize), setDamage(item, ordinal), setUpgrades(), setEnergy() },
chanceCond, item.getRegistryName().toString() + ":" + ordinal);
}
int capCount = 0; // Each loot entry in a pool must have a unique name
private LootEntry createLootCapacitor(float chance) {
capCount++;
return new LootEntryItem(itemBasicCapacitor.getItem(), 1, 1, new LootFunction[] { ls, setMetadata(3) }, new LootCondition[] { new RandomChance(chance) },
itemBasicCapacitor.getItem().getRegistryName().toString() + capCount);
}
private SetCount setCount(int min, int max) {
return new SetCount(NO_CONDITIONS, new RandomValueRange(min, min));
}
private SetDamage setDamage(Item item, int damage) {
return new SetDamage(NO_CONDITIONS, new RandomValueRange(damage > 0 ? damage : 1, damage > 0 ? damage : item.getMaxDamage()));
}
private SetMetadata setMetadata(int meta) {
return new SetMetadata(NO_CONDITIONS, new RandomValueRange(meta));
}
private SetRandomEnergy setEnergy() {
return new SetRandomEnergy(NO_CONDITIONS);
}
private SetRandomDarkUpgrade setUpgrades() {
return new SetRandomDarkUpgrade(NO_CONDITIONS);
}
private SetNBT setNBT(ItemStack stack) {
return new SetNBT(NO_CONDITIONS, stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound());
}
private static LootSelector ls = new LootSelector(NO_CONDITIONS);
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.codepipeline.model;
import java.io.Serializable;
/**
* <p>
* Represents information about failure details.
* </p>
*/
public class FailureDetails implements Serializable, Cloneable {
/**
* <p>
* The type of the failure.
* </p>
*/
private String type;
/**
* <p>
* The message about the failure.
* </p>
*/
private String message;
/**
* <p>
* The external ID of the run of the action that failed.
* </p>
*/
private String externalExecutionId;
/**
* <p>
* The type of the failure.
* </p>
*
* @param type
* The type of the failure.
* @see FailureType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The type of the failure.
* </p>
*
* @return The type of the failure.
* @see FailureType
*/
public String getType() {
return this.type;
}
/**
* <p>
* The type of the failure.
* </p>
*
* @param type
* The type of the failure.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see FailureType
*/
public FailureDetails withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The type of the failure.
* </p>
*
* @param type
* The type of the failure.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see FailureType
*/
public void setType(FailureType type) {
this.type = type.toString();
}
/**
* <p>
* The type of the failure.
* </p>
*
* @param type
* The type of the failure.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see FailureType
*/
public FailureDetails withType(FailureType type) {
setType(type);
return this;
}
/**
* <p>
* The message about the failure.
* </p>
*
* @param message
* The message about the failure.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* The message about the failure.
* </p>
*
* @return The message about the failure.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* The message about the failure.
* </p>
*
* @param message
* The message about the failure.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public FailureDetails withMessage(String message) {
setMessage(message);
return this;
}
/**
* <p>
* The external ID of the run of the action that failed.
* </p>
*
* @param externalExecutionId
* The external ID of the run of the action that failed.
*/
public void setExternalExecutionId(String externalExecutionId) {
this.externalExecutionId = externalExecutionId;
}
/**
* <p>
* The external ID of the run of the action that failed.
* </p>
*
* @return The external ID of the run of the action that failed.
*/
public String getExternalExecutionId() {
return this.externalExecutionId;
}
/**
* <p>
* The external ID of the run of the action that failed.
* </p>
*
* @param externalExecutionId
* The external ID of the run of the action that failed.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public FailureDetails withExternalExecutionId(String externalExecutionId) {
setExternalExecutionId(externalExecutionId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getType() != null)
sb.append("Type: " + getType() + ",");
if (getMessage() != null)
sb.append("Message: " + getMessage() + ",");
if (getExternalExecutionId() != null)
sb.append("ExternalExecutionId: " + getExternalExecutionId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof FailureDetails == false)
return false;
FailureDetails other = (FailureDetails) obj;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null
&& other.getType().equals(this.getType()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null
&& other.getMessage().equals(this.getMessage()) == false)
return false;
if (other.getExternalExecutionId() == null
^ this.getExternalExecutionId() == null)
return false;
if (other.getExternalExecutionId() != null
&& other.getExternalExecutionId().equals(
this.getExternalExecutionId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode
+ ((getMessage() == null) ? 0 : getMessage().hashCode());
hashCode = prime
* hashCode
+ ((getExternalExecutionId() == null) ? 0
: getExternalExecutionId().hashCode());
return hashCode;
}
@Override
public FailureDetails clone() {
try {
return (FailureDetails) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (c) 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.chromeos.lowlatencystylusdemo.gpu;
import android.opengl.GLES20;
import android.util.Log;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.List;
/**
* A wrapper class to manage all the opengl buffers needed to draw with both line-based and bitmap-
* based brushes. Handles both committed and predicted points
*/
public class DrawPoints {
// Convenience constants
public static final int FLOAT_SIZE = Float.SIZE / Byte.SIZE;
public static final int INT_SIZE = Float.SIZE / Byte.SIZE;
public static final int NUM_TEXTURE_COORDINATES_PER_SQUARE = 8;
public static final int TEXTURE_COORDINATE_SIZE = FLOAT_SIZE * NUM_TEXTURE_COORDINATES_PER_SQUARE;
public static final int NUM_COLOR_VALUES_PER_SQUARE = 4 * 4; //rgba * 4 vertices
public static final int TEXTURE_COLOR_SIZE = FLOAT_SIZE * NUM_COLOR_VALUES_PER_SQUARE;
public static final int NUM_INDICES_PER_SQUARE = 6;
public static final int SQUARE_INDICES_SIZE = INT_SIZE * NUM_INDICES_PER_SQUARE;
public static final int DEFAULT_BUFFER_SIZE = 1024;
private int mPointCount;
// Keep track of previous vertex for line drawing operations
Vertex mPreviousVertex = null;
// List of predicted vertices
public final List<DrawPoint> mPredictedDrawPoints;
public final List<Vertex> mPredictedVertices;
Vertex mPreviousPredictionVertex = null;
// Buffer to hold Vertex's of drawn points. Will not contain predicted points
public AutoGrowByteBuffer mVertexBuffer;
// Reference to the buffer used by GL to draw the vertices
public final int mGLVertexBufferHandle;
// Buffer to hold data for texture squares
public AutoGrowByteBuffer mSquareBuffer;
// Reference to the buffer for texture squares
public final int mGLSquareBufferHandle;
// Buffer to hold data for texture coordinates
public AutoGrowByteBuffer mTextureCoordinateBuffer;
// Reference to the buffer for texture coordinates
public final int mGLTextureCoordinateBufferHandle;
// Buffer to hold color data for texture squares
public AutoGrowByteBuffer mTextureColorBuffer;
// Reference to the buffer for texture squares
public final int mGLTextureColorBufferHandle;
// Buffer to hold indices for texture squares
public AutoGrowByteBuffer mSquareIndexBuffer;
// Reference to the buffer for texture square indices
public final int mGLSquareIndexBufferHandle;
public DrawPoints() {
// List of predicted points
mPredictedDrawPoints = new ArrayList<>();
mPredictedVertices = new ArrayList<>();
// Vertex buffer for line shaders
mVertexBuffer = new AutoGrowByteBuffer(DEFAULT_BUFFER_SIZE * Vertex.TOTAL_SIZE);
IntBuffer tempVertexBuffer = IntBuffer.allocate(1);
GLES20.glGenBuffers(1, tempVertexBuffer);
mGLVertexBufferHandle = tempVertexBuffer.get();
// Square buffer for bitmap shaders
mSquareBuffer = new AutoGrowByteBuffer(DEFAULT_BUFFER_SIZE * Square.TOTAL_SIZE);
IntBuffer tempSquareBuffer = IntBuffer.allocate(1);
GLES20.glGenBuffers(1, tempSquareBuffer);
mGLSquareBufferHandle = tempSquareBuffer.get();
// Texture coordinate buffer for bitmap shaders
mTextureCoordinateBuffer = new AutoGrowByteBuffer(DEFAULT_BUFFER_SIZE * TEXTURE_COORDINATE_SIZE);
IntBuffer tempTextureCoordinateBuffer = IntBuffer.allocate(1);
GLES20.glGenBuffers(1, tempTextureCoordinateBuffer);
mGLTextureCoordinateBufferHandle = tempTextureCoordinateBuffer.get();
// Texture color buffer for bitmap shaders
mTextureColorBuffer = new AutoGrowByteBuffer(DEFAULT_BUFFER_SIZE * TEXTURE_COLOR_SIZE);
IntBuffer tempTextureColorBuffer = IntBuffer.allocate(1);
GLES20.glGenBuffers(1, tempTextureColorBuffer);
mGLTextureColorBufferHandle = tempTextureColorBuffer.get();
// Square indices buffer for bitmap shaders
mSquareIndexBuffer = new AutoGrowByteBuffer(DEFAULT_BUFFER_SIZE * SQUARE_INDICES_SIZE);
IntBuffer tempSquareIndexBuffer = IntBuffer.allocate(1);
GLES20.glGenBuffers(1, tempSquareIndexBuffer);
mGLSquareIndexBufferHandle = tempSquareIndexBuffer.get();
}
// Draw point count
public int count() {
return mPointCount;
}
public void count(int newCount) {
mPointCount = newCount;
}
// Clear current drawing and predictions
// Don't erase or re-allocate memory, just move the position pointers
public void clear() {
clearPrediction();
mVertexBuffer.position(0);
mSquareBuffer.position(0);
mTextureCoordinateBuffer.position(0);
mTextureColorBuffer.position(0);
mSquareIndexBuffer.position(0);
mPointCount = 0;
}
// Clear prediction lists
public void clearPrediction() {
mPredictedDrawPoints.clear();
mPredictedVertices.clear();
}
// Line shaders need to keep track of last drawn point. Set last point to null to prevent
// starting the next line from beginning from the last point if the pen has been lifted
public void endStroke() {
mPreviousVertex = null;
mPreviousPredictionVertex = null;
}
/**
* Add a point to the drawing list. For line brushes that use GL_LINES, two points must be
* added to the draw buffer (start and finish) as well as color info.
* For bitmap brushes, vertex, texture coordinates, color, and array indices need to be
* generated.
* @param drawPoint The float point to add to the drawing list
*/
public void addDrawPoint(DrawPoint drawPoint) {
// For bitmap shaders, just add one point
addSquare(drawPoint);
// For line shaders, add start and finish of gesture
if (null == mPreviousVertex) {
// This is the first point, queue it up as the beginning of the line
mPreviousVertex = new Vertex(drawPoint);
// Add same point twice to start off a new line
addVertex(mPreviousVertex);
addVertex(mPreviousVertex);
} else {
// This is the middle or end of gesture, draw from last point to the new point
Vertex newVertex = new Vertex(drawPoint);
// Add start and finish
addVertex(mPreviousVertex);
addVertex(newVertex);
mPreviousVertex = newVertex;
}
mPointCount++;
}
// Add a predicted point to the prediction lists
public void addPredictionDrawPoint(DrawPoint drawPoint) {
// Add predicted drawpoints for bitmap shaders
mPredictedDrawPoints.add(drawPoint);
// Add predicted vertices for line shaders
mPredictedVertices.add(new Vertex(drawPoint));
}
// Add a Vertex to the line shader buffer
public void addVertex(Vertex vertex) {
mVertexBuffer.put(vertex.data);
}
// Add a list of predicted vertices for the line shader
public void addPredictedVerticesForDraw(Iterable<Vertex> vertices) {
// If there is no line currently being drawn, do not add a prediction
if (null != mPreviousVertex) {
// Predictions can contain several points, if this is the first point of a prediction,
// start at the last drawn point
if (null == mPreviousPredictionVertex) {
mPreviousPredictionVertex = mPreviousVertex;
}
for (Vertex v : vertices) {
// For drawn lines, add vertices in groups of 2 (beginning and end)
addVertex(mPreviousPredictionVertex);
addVertex(v);
mPreviousPredictionVertex = v;
mPointCount++;
}
}
mPreviousPredictionVertex = null;
}
// Add a list of predicted points to draw to the bitmap shader buffes
public void addPredictedDrawPointsForDraw(Iterable<DrawPoint> drawPoints) {
for (DrawPoint drawPoint : drawPoints) {
// For drawn lines, add vertices in groups of 2 (beginning and end)
addSquare(drawPoint);
mPointCount++;
}
}
// Add vertex, index, color, and texture coord info to the bitmap buffers
public void addSquare(DrawPoint drawPoint) {
Square s = new Square(drawPoint);
mSquareBuffer.put(s.data);
addTextureCoordinates();
addTextureColor(drawPoint);
addSquareIndices();
}
// Add a new texture coordinate value
// Currently this is invariable for every square (sample the whole texture)
private void addTextureCoordinates() {
// Sample the whole texture
float[] texture_coord = {
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
mTextureCoordinateBuffer.put(texture_coord);
}
// Add the draw point's color info to the texture color buffer
private void addTextureColor(DrawPoint drawPoint) {
addTextureColor(drawPoint.red, drawPoint.green, drawPoint.blue);
}
private void addTextureColor(float r, float g, float b) {
float[] colors = { r, g, b, 1.0f };
// Add colors 4 times, one for each vertex
// Here is where gradients, etc. could be added
mTextureColorBuffer.put(colors);
mTextureColorBuffer.put(colors);
mTextureColorBuffer.put(colors);
mTextureColorBuffer.put(colors);
}
// Add indexes for bitmap shaders. This is 2 triangles / 4 corners that make up a square
// For square 3 - 2 order is: 0, 1, 2, 2, 3, 0
// 0 - 1
private void addSquareIndices() {
final int start = mPointCount * 4; // 4 corners to define a square
// Two triangles that make up a square, starting at the current count of vertices
int[] indices = {
start, start + 1, start + 2,
start + 2, start + 3, start
};
mSquareIndexBuffer.put(indices);
}
// A vertex is a 2D-point followed by float RGB color values, range: 0.0 - 1.0
public static class Vertex {
public static final int POSITION_DIM = 2;
public static final int COLOR_DIM = 3;
public static final int TOTAL_DIM = POSITION_DIM + COLOR_DIM;
public static final int POSITION_SIZE = POSITION_DIM * FLOAT_SIZE;
public static final int COLOR_SIZE = COLOR_DIM * FLOAT_SIZE;
public static final int TOTAL_SIZE = POSITION_SIZE + COLOR_SIZE;
public static final int POSITION_OFFSET = 0;
public static final int COLOR_OFFSET = POSITION_OFFSET + POSITION_SIZE;
public float[] data = new float[TOTAL_DIM];
public Vertex(float x, float y) {
data[0] = x;
data[1] = y;
// Default to black
data[POSITION_DIM] = .0f;
data[POSITION_DIM + 1] = .0f;
data[POSITION_DIM + 2] = .0f;
}
public Vertex(float x, float y, float r, float g, float b) {
this(x, y);
setColor(r, g, b);
}
public Vertex(DrawPoint drawPoint) {
data[0] = drawPoint.point.x;
data[1] = drawPoint.point.y;
// Default to black
data[POSITION_DIM] = drawPoint.red;
data[POSITION_DIM + 1] = drawPoint.green;
data[POSITION_DIM + 2] = drawPoint.blue;
}
public void setColor(float r, float g, float b) {
data[POSITION_DIM] = r;
data[POSITION_DIM + 1] = g;
data[POSITION_DIM + 2] = b;
}
}
// A square is SQUARE_SIZE px box defined by 2 triangles centered on the given 2D-point
// To be drawn by the bitmap shader, each point that needs a square drawn around it requires
// 4 vertices (8 coordinates), 8 texture coordinates, 3 rgb colour values, and 6 indices.
// This class stores only the vertices
public static class Square {
public static final float SQUARE_SIZE_PX = 100f;
public static final float DISTANCE_FROM_CENTER = SQUARE_SIZE_PX / 2.0f;
public static final int POSITION_DIM = 8;
public static final int TOTAL_DIM = POSITION_DIM;
public static final int POSITION_SIZE = POSITION_DIM * FLOAT_SIZE;
public static final int TOTAL_SIZE = POSITION_SIZE;
public float[] data = new float[TOTAL_DIM];
public Square(float x, float y) {
// Vertex 1
data[0] = x - DISTANCE_FROM_CENTER;
data[1] = y - DISTANCE_FROM_CENTER;
// Vertex 2
data[2] = x + DISTANCE_FROM_CENTER;
data[3] = y - DISTANCE_FROM_CENTER;
// Vertex 3
data[4] = x + DISTANCE_FROM_CENTER;
data[5] = y + DISTANCE_FROM_CENTER;
// Vertex 4
data[6] = x - DISTANCE_FROM_CENTER;
data[7] = y + DISTANCE_FROM_CENTER;
}
// Create a Square from a Vertex
public Square(Vertex v) {
this(v.data[0], v.data[1]);
}
// Create a Square from a DrawPoint
public Square(DrawPoint drawPoint) {
this(drawPoint.point.x, drawPoint.point.y);
}
// Create a Square from a PointF
}
}
| |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.testscenario.client;
import java.util.List;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.HasVerticalAlignment;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Widget;
import org.drools.workbench.models.testscenarios.shared.ExecutionTrace;
import org.drools.workbench.models.testscenarios.shared.Scenario;
import org.drools.workbench.models.testscenarios.shared.VerifyFact;
import org.drools.workbench.models.testscenarios.shared.VerifyField;
import org.drools.workbench.screens.testscenario.client.resources.i18n.TestScenarioConstants;
import org.drools.workbench.screens.testscenario.client.resources.images.TestScenarioAltedImages;
import org.drools.workbench.screens.testscenario.client.resources.images.TestScenarioImages;
import org.drools.workbench.screens.testscenario.client.utils.ScenarioUtils;
import org.gwtbootstrap3.client.ui.Button;
import org.gwtbootstrap3.client.ui.ListBox;
import org.gwtbootstrap3.client.ui.constants.IconType;
import org.kie.soup.project.datamodel.oracle.MethodInfo;
import org.kie.soup.project.datamodel.oracle.ModelField;
import org.kie.workbench.common.widgets.client.datamodel.AsyncPackageDataModelOracle;
import org.kie.workbench.common.widgets.client.resources.CommonImages;
import org.uberfire.client.callbacks.Callback;
import org.uberfire.ext.widgets.common.client.common.ClickableLabel;
import org.uberfire.ext.widgets.common.client.common.SmallLabel;
import org.uberfire.ext.widgets.common.client.common.popups.FormStylePopup;
import org.uberfire.ext.widgets.common.client.common.popups.footers.ModalFooterOKCancelButtons;
public class VerifyFactWidget extends Composite {
private Grid outer;
private boolean showResults;
private String type;
private AsyncPackageDataModelOracle oracle;
private Scenario scenario;
private ExecutionTrace executionTrace;
public VerifyFactWidget(final VerifyFact vf,
final Scenario sc,
final AsyncPackageDataModelOracle oracle,
final ExecutionTrace executionTrace,
final boolean showResults) {
outer = new Grid(2,
1);
outer.getCellFormatter().setStyleName(0,
0,
"modeller-fact-TypeHeader"); //NON-NLS
outer.getCellFormatter().setAlignment(0,
0,
HasHorizontalAlignment.ALIGN_CENTER,
HasVerticalAlignment.ALIGN_MIDDLE);
outer.setStyleName("modeller-fact-pattern-Widget"); //NON-NLS
this.oracle = oracle;
this.scenario = sc;
this.executionTrace = executionTrace;
HorizontalPanel ab = new HorizontalPanel();
ClickableLabel label = null;
final ClickHandler handler = new ClickHandler() {
public void onClick(ClickEvent w) {
final ListBox fieldsListBox = new ListBox();
VerifyFactWidget.this.oracle.getFieldCompletions(type,
new Callback<ModelField[]>() {
@Override
public void callback(final ModelField[] fields) {
// Add fields
for (int i = 0; i < fields.length; i++) {
fieldsListBox.addItem(fields[i].getName());
}
// Add methods
oracle.getMethodInfos(type,
new Callback<List<MethodInfo>>() {
@Override
public void callback(List<MethodInfo> result) {
for (MethodInfo info : result) {
if (info.getParams().isEmpty() && !"void".equals(info.getReturnClassType())) {
fieldsListBox.addItem(info.getName());
}
}
}
});
}
});
final FormStylePopup pop = new FormStylePopup(TestScenarioAltedImages.INSTANCE.RuleAsset(),
TestScenarioConstants.INSTANCE.ChooseAFieldToAdd());
pop.addRow(fieldsListBox);
pop.add(new ModalFooterOKCancelButtons(new Command() {
@Override
public void execute() {
String f = fieldsListBox.getItemText(fieldsListBox.getSelectedIndex());
vf.getFieldValues().add(new VerifyField(f,
"",
"=="));
FlexTable data = render(vf);
outer.setWidget(1,
0,
data);
pop.hide();
}
}, new Command() {
@Override
public void execute() {
pop.hide();
}
}
));
pop.show();
}
};
if (!vf.anonymous) {
type = (String) sc.getVariableTypes().get(vf.getName());
label = new ClickableLabel(TestScenarioConstants.INSTANCE.scenarioFactTypeHasValues(type,
vf.getName()),
handler);
} else {
type = vf.getName();
label = new ClickableLabel(TestScenarioConstants.INSTANCE.AFactOfType0HasValues(vf.getName()),
handler);
}
ab.add(label);
this.showResults = showResults;
outer.setWidget(0,
0,
ab);
initWidget(outer);
FlexTable data = render(vf);
outer.setWidget(1,
0,
data);
}
private FlexTable render(final VerifyFact vf) {
FlexTable data = new FlexTable();
for (int i = 0; i < vf.getFieldValues().size(); i++) {
final VerifyField fld = (VerifyField) vf.getFieldValues().get(i);
data.setWidget(i,
1,
new SmallLabel(fld.getFieldName() + ":"));
data.getFlexCellFormatter().setHorizontalAlignment(i,
1,
HasHorizontalAlignment.ALIGN_RIGHT);
final ListBox opr = new ListBox();
opr.addItem(TestScenarioConstants.INSTANCE.equalsScenario(),
"==");
opr.addItem(TestScenarioConstants.INSTANCE.doesNotEqualScenario(),
"!=");
if (fld.getOperator().equals("==")) {
opr.setSelectedIndex(0);
} else {
opr.setSelectedIndex(1);
}
opr.addChangeHandler(new ChangeHandler() {
public void onChange(ChangeEvent event) {
fld.setOperator(opr.getValue(opr.getSelectedIndex()));
}
});
data.setWidget(i,
2,
opr);
Widget cellEditor = new VerifyFieldConstraintEditor(type,
newValue -> fld.setExpected(newValue),
fld,
oracle,
this.scenario,
this.executionTrace);
data.setWidget(i,
3,
cellEditor);
Button deleteButton = new Button();
deleteButton.setIcon(IconType.TRASH);
deleteButton.setTitle(TestScenarioConstants.INSTANCE.RemoveThisFieldExpectation());
deleteButton.addClickHandler(clickEvent -> {
if (Window.confirm(TestScenarioConstants.INSTANCE.AreYouSureYouWantToRemoveThisFieldExpectation(
fld.getFieldName()))) {
vf.getFieldValues().remove(fld);
FlexTable renderedTableAfterDelete = render(vf);
outer.setWidget(1,
0,
renderedTableAfterDelete);
}
});
data.setWidget(i,
4,
deleteButton);
if (showResults && fld.getSuccessResult() != null) {
if (!fld.getSuccessResult().booleanValue()) {
data.setWidget(i,
0,
new Image(CommonImages.INSTANCE.warning()));
data.setWidget(i,
5,
new HTML(TestScenarioConstants.INSTANCE.ActualResult(fld.getActualResult())));
data.getCellFormatter().addStyleName(i,
5,
"testErrorValue"); //NON-NLS
} else {
data.setWidget(i,
0,
new Image(TestScenarioImages.INSTANCE.testPassed()));
}
}
}
ScenarioUtils.addBottomAndRightPaddingToTableCells(data);
return data;
}
}
| |
package org.hcjf.io.net.broadcast;
import org.hcjf.bson.BsonDecoder;
import org.hcjf.bson.BsonDocument;
import org.hcjf.bson.BsonEncoder;
import org.hcjf.log.Log;
import org.hcjf.properties.SystemProperties;
import org.hcjf.service.Service;
import org.hcjf.utils.Introspection;
import java.io.Closeable;
import java.io.IOException;
import java.net.*;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.time.Instant;
import java.time.LocalTime;
import java.time.temporal.ChronoField;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* This service provides capabilities to register broadcast task in order to notify
* for all the net this task.
* @author javaito
*/
public class BroadcastService extends Service<BroadcastConsumer> {
private static final String IMPLEMENTATION_FIELD_NAME = "implementation";
private static final String IP_PROTOCOL_VERSION_4 = "4";
private static final String IP_PROTOCOL_VERSION_6 = "6";
private static final BroadcastService instance;
private static final UUID instanceId;
private Map<String, BroadcastInterface> interfaces;
private BroadcastSender sender;
private Map<String, BroadcastReceiver> receivers;
private Map<String, BroadcastConsumer> consumers;
private boolean shuttingDown;
private MessageDigest messageDigest;
static {
instance = new BroadcastService(SystemProperties.get(SystemProperties.Net.Broadcast.SERVICE_NAME));
instanceId = UUID.randomUUID();
}
private BroadcastService(String serviceName) {
super(serviceName, 2);
}
/**
* Returns the singleton instance of the service.
* @return Service instance.
*/
public static BroadcastService getInstance() {
return instance;
}
/**
* This method initialize all the necessary components of the service.
*/
@Override
protected void init() {
this.interfaces = new HashMap<>();
this.consumers = new HashMap<>();
this.sender = new BroadcastSender();
this.receivers = new HashMap<>();
try {
this.messageDigest = MessageDigest.getInstance(SystemProperties.get(SystemProperties.Net.Broadcast.SIGNATURE_ALGORITHM));
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException("Broadcast signature algorithm not found", e);
}
fork(sender);
}
/**
* Verify if the service is in the shutting down process.
* @return True if the service in the shutting down process or not in the otherwise.
*/
public boolean isShuttingDown() {
return shuttingDown;
}
@Override
protected void shutdown(ShutdownStage stage) {
this.shuttingDown = true;
synchronized (this.sender) {
this.sender.notify();
}
}
/**
* Register a consumer into the the service.
* @param consumer Object with the logic to consume the service.
*/
@Override
public void registerConsumer(BroadcastConsumer consumer) {
this.consumers.put(consumer.getTaskName(), consumer);
synchronized (this.sender) {
this.sender.notify();
}
}
/**
* Unregister a consumer of the service.
* @param consumer Consumer to unregister.
*/
@Override
public void unregisterConsumer(BroadcastConsumer consumer) {
this.consumers.remove(consumer);
}
/**
* Returns the broadcast interface, this interface contains the datagram socket to send a receive the
* task between all the host of the net.
* @param consumer Consumer with the information to create the interface.
* @return Broadcast interface instance.
* @throws SocketException
*/
private synchronized BroadcastInterface getBroadcastInterface(BroadcastConsumer consumer) throws SocketException {
Integer port = consumer.getPort();
String interfaceId = BroadcastInterface.createInterfaceId(consumer.getNetInterfaceName(), port);
BroadcastInterface result = interfaces.get(interfaceId);
if(result == null) {
//Obtain all the net interfaces of the operating system.
Enumeration interfacesEnumeration = NetworkInterface.getNetworkInterfaces();
boolean done = false;
//Check for each interfaces if the interface is the same that the interface into the consumer.
while(interfacesEnumeration.hasMoreElements()){
NetworkInterface networkInterface = (NetworkInterface) interfacesEnumeration.nextElement();
if(networkInterface.getName().equals(consumer.getNetInterfaceName())){
//Find the first interface address with the same protocol version that the consumer.
for(InterfaceAddress interfaceAddress : networkInterface.getInterfaceAddresses()){
switch(consumer.getIpVersion()) {
case IP_PROTOCOL_VERSION_4: {
done = interfaceAddress.getAddress() instanceof Inet4Address;
break;
}
case IP_PROTOCOL_VERSION_6: {
done = interfaceAddress.getAddress() instanceof Inet6Address;
break;
}
default: {
throw new IllegalArgumentException("Invalid ip version: " + consumer.getIpVersion());
}
}
if(done) {
//Creates a broadcast interface using the interface address
result = new BroadcastInterface(
consumer.getNetInterfaceName(), port,
interfaceAddress.getAddress(), interfaceAddress.getBroadcast());
interfaces.put(result.getId(), result);
//Creates a new instance of the receiver.
BroadcastReceiver broadcastReceiver = new BroadcastReceiver(result);
receivers.put(result.getId(), broadcastReceiver);
//Create a new thread to the interface receiver.
fork(broadcastReceiver);
break;
}
}
}
if(done) {
break;
}
}
}
return result;
}
/**
* This method creates a signature using the name of the task, private key
* of the consumer and the minute and seconds of the instant.
* @param name Name of the task.
* @param privateKey Private key of the consumer.
* @param time Minute and second of the instant.
* @return Signature created.
*/
private String sign(String name, String privateKey, String time) {
String signature = name + privateKey + time;
return new String(messageDigest.digest(signature.getBytes()));
}
/**
* Serialize the broadcast message and transform it in a byte array with bson format.
* @param message Message to serialize.
* @return Serialized message.
*/
private byte[] encode(BroadcastMessage message) {
BsonDocument document = new BsonDocument();
Map<String, Introspection.Getter> getters = Introspection.getGetters(message.getClass());
getters.forEach((K, V) -> {
try {
document.put(K, V.get(message));
} catch (Exception e) {}
});
return BsonEncoder.encode(document);
}
/**
* Creates a broadcast message instance from a bson stored into the byte array.
* @param body Byte array with bson format.
* @return Decoded message instance.
*/
private BroadcastMessage decode(byte[] body) {
BroadcastMessage message;
try {
BsonDocument document = BsonDecoder.decode(body);
message = (BroadcastMessage)
Class.forName(document.get(IMPLEMENTATION_FIELD_NAME).getAsString()).getConstructor().newInstance();
Map<String, Introspection.Setter> setters = Introspection.getSetters(message.getClass());
setters.forEach((K, V) -> {
try {
V.set(message, document.get(K).getValue());
} catch (Exception e) {}
});
} catch (Exception e) {
throw new RuntimeException();
}
return message;
}
/**
* This runnable contains the logic to send a ping message periodically,
* and send the shutdown message when the kill signal start the shutdown process
* into the instance of system.
*/
private class BroadcastSender implements Runnable {
@Override
public void run() {
BsonDocument document;
Instant instant;
LocalTime localTime;
String hour;
String minute;
PingMessage pingMessage;
BroadcastInterface broadcastInterface;
//Main loop
while(!instance.isShuttingDown()) {
//Consumers loop
for(BroadcastConsumer consumer : consumers.values()) {
try {
instant = Instant.now();
localTime = LocalTime.now();
hour = Integer.toString(localTime.get(ChronoField.HOUR_OF_DAY));
minute = Integer.toString(localTime.get(ChronoField.MINUTE_OF_HOUR));
broadcastInterface = getBroadcastInterface(consumer);
pingMessage = new PingMessage();
pingMessage.setTaskName(consumer.getTaskName());
pingMessage.setHost(broadcastInterface.getLocalAddress().getHostName());
pingMessage.setPort(broadcastInterface.getPort());
pingMessage.setInstanceId(instanceId);
pingMessage.setTimestamp(instant.toEpochMilli());
pingMessage.setSignature(instance.sign(consumer.getTaskName(),consumer.getPrivateKey(), hour + minute));
pingMessage.setCustomParameters(consumer.getPingParameters());
byte[] body = instance.encode(pingMessage);
DatagramPacket packet;
for (int i = broadcastInterface.getPort(); i < broadcastInterface.getPort() + 10; i++) {
packet = new DatagramPacket(body, body.length,
broadcastInterface.getBroadcastAddress(), i);
broadcastInterface.getBroadcastSocket().send(packet);
}
} catch (Exception ex) {
Log.w(SystemProperties.get(SystemProperties.Net.Broadcast.LOG_TAG), ex.getMessage());
}
}
synchronized (this) {
try {
this.wait(SystemProperties.getLong(SystemProperties.Net.Broadcast.SENDER_DELAY));
} catch (InterruptedException ex) {
}
}
}
ShutdownMessage shutdownMessage;
for(BroadcastConsumer consumer : consumers.values()) {
try {
instant = Instant.now();
localTime = LocalTime.now();
hour = Integer.toString(localTime.get(ChronoField.HOUR_OF_DAY));
minute = Integer.toString(localTime.get(ChronoField.MINUTE_OF_HOUR));
broadcastInterface = getBroadcastInterface(consumer);
shutdownMessage = new ShutdownMessage();
shutdownMessage.setTaskName(consumer.getTaskName());
shutdownMessage.setHost(broadcastInterface.getLocalAddress().getHostName());
shutdownMessage.setPort(broadcastInterface.getPort());
shutdownMessage.setInstanceId(instanceId);
shutdownMessage.setTimestamp(instant.toEpochMilli());
shutdownMessage.setSignature(instance.sign(consumer.getTaskName(),consumer.getPrivateKey(), hour + minute));
shutdownMessage.setCustomParameters(consumer.getPingParameters());
byte[] body = instance.encode(shutdownMessage);
DatagramPacket packet = new DatagramPacket(body, body.length,
broadcastInterface.getBroadcastAddress(), broadcastInterface.getPort());
broadcastInterface.getBroadcastSocket().send(packet);
broadcastInterface.close();
} catch (Exception ex) {
Log.w(SystemProperties.get(SystemProperties.Net.Broadcast.LOG_TAG),
"", ex);
}
}
}
}
/**
* This runnable is all the time listening the broadcast interface in order to
* receive the broadcast messages sending for other host into the net.
*/
private class BroadcastReceiver implements Runnable {
private final BroadcastInterface broadcastInterface;
public BroadcastReceiver(BroadcastInterface broadcastInterface) {
this.broadcastInterface = broadcastInterface;
}
@Override
public void run() {
Instant instant;
LocalTime localTime;
String hour;
String minute;
byte[] buffer;
DatagramPacket inputPacket;
BroadcastMessage broadcastMessage;
//Main loop
while(!instance.isShuttingDown()) {
try {
buffer = new byte[SystemProperties.getInteger(SystemProperties.Net.Broadcast.RECEIVER_BUFFER_SIZE)];
inputPacket = new DatagramPacket(buffer, buffer.length);
try {
broadcastInterface.getBroadcastSocket().receive(inputPacket);
} catch (IOException ex) {
continue;
}
broadcastMessage = instance.decode(inputPacket.getData());
BroadcastConsumer consumer = consumers.get(broadcastMessage.getTaskName());
if(broadcastMessage instanceof PingMessage) {
if(!broadcastMessage.getInstanceId().equals(instanceId)) {
consumer.onPing((PingMessage) broadcastMessage);
instant = Instant.now();
localTime = LocalTime.now();
hour = Integer.toString(localTime.get(ChronoField.HOUR_OF_DAY));
minute = Integer.toString(localTime.get(ChronoField.MINUTE_OF_HOUR));
PongMessage pongMessage = new PongMessage();
pongMessage.setTaskName(consumer.getTaskName());
pongMessage.setHost(broadcastInterface.getLocalAddress().getHostName());
pongMessage.setPort(broadcastInterface.getPort());
pongMessage.setInstanceId(instanceId);
pongMessage.setTimestamp(instant.toEpochMilli());
pongMessage.setSignature(instance.sign(consumer.getTaskName(),consumer.getPrivateKey(), hour + minute));
pongMessage.setCustomParameters(consumer.getPingParameters());
byte[] body = instance.encode(pongMessage);
DatagramPacket packet = new DatagramPacket(body, body.length,
InetAddress.getByName(broadcastMessage.getHost()), broadcastInterface.getPort());
broadcastInterface.getBroadcastSocket().send(packet);
}
} else if(broadcastMessage instanceof PongMessage) {
consumer.onPong((PongMessage) broadcastMessage);
} else if(broadcastMessage instanceof ShutdownMessage) {
if(!broadcastMessage.getHost().equals(broadcastInterface.getLocalAddress().getHostName())) {
consumer.onShutdown((ShutdownMessage) broadcastMessage);
}
}
} catch (Exception ex) {
Log.w(SystemProperties.get(SystemProperties.Net.Broadcast.LOG_TAG),
"Broadcast receiver error", broadcastInterface.getName(), ex);
}
}
}
}
/**
* This broadcast interface is created for each combination of net interface name and port,
* and contains all the information to create the socket to send a receive the broadcast messages.
*/
private static class BroadcastInterface implements Closeable {
private final String id;
private final String name;
private final Integer port;
private final InetAddress localAddress;
private final InetAddress broadcastAddress;
private final DatagramSocket broadcastSocket;
public BroadcastInterface(String name, Integer port,
InetAddress localAddress, InetAddress broadcastAddress) throws SocketException {
this.id = createInterfaceId(name, port);
this.name = name;
this.port = port;
this.localAddress = localAddress;
this.broadcastAddress = broadcastAddress;
this.broadcastSocket = new DatagramSocket(port);
}
/**
* Closes the broadcast socket.
* @throws IOException
*/
@Override
public void close() throws IOException {
broadcastSocket.close();
}
/**
* Returns the id of the broadcast interface.
* @return Interface id.
*/
public String getId() {
return id;
}
/**
* Returns the name of the interface.
* @return Name of the interface.
*/
public String getName() {
return name;
}
/**
* Returns the port of the interface.
* @return Port of the interface.
*/
public Integer getPort() {
return port;
}
/**
* Returns the local address of the interface.
* @return Local address.
*/
public InetAddress getLocalAddress() {
return localAddress;
}
/**
* Returns the broadcast address of the interface.
* @return Broadcast address.
*/
public InetAddress getBroadcastAddress() {
return broadcastAddress;
}
/**
* Return the datagram socket of the broadcast interface.
* @return Datagram socket.
*/
public DatagramSocket getBroadcastSocket() {
return broadcastSocket;
}
/**
* Utility method to create the interface id based on the name of the interface and the port.
* @param name Name of the interface.
* @param port Port of the interface.
* @return Generated id.
*/
public static String createInterfaceId(String name, Integer port) {
return name + port.toString();
}
}
/**
* Base class for all the broadcast messages.
*/
private static abstract class BroadcastMessage {
private String taskName;
private String host;
private Integer port;
private Long timestamp;
private String signature;
private UUID instanceId;
private Map<String,Object> customParameters;
/**
* Returns the task name.
* @return Task name.
*/
public String getTaskName() {
return taskName;
}
/**
* Sets the task name.
* @param taskName Task name.
*/
public void setTaskName(String taskName) {
this.taskName = taskName;
}
/**
* Returns the host name.
* @return Host name.
*/
public String getHost() {
return host;
}
/**
* Sets the host name.
* @param host Host name.
*/
public void setHost(String host) {
this.host = host;
}
/**
* Returns the port number.
* @return Port number.
*/
public Integer getPort() {
return port;
}
/**
* Set the port number.
* @param port Port number.
*/
public void setPort(Integer port) {
this.port = port;
}
/**
* Returns the creation timestamp.
* @return Creation timestamp.
*/
public Long getTimestamp() {
return timestamp;
}
/**
* Sets create timestamp.
* @param timestamp Creation timestamp.
*/
public void setTimestamp(Long timestamp) {
this.timestamp = timestamp;
}
/**
* Returns the package signature.
* @return Package signature.
*/
public String getSignature() {
return signature;
}
/**
* Sets the package signature.
* @param signature Package signature.
*/
public void setSignature(String signature) {
this.signature = signature;
}
public UUID getInstanceId() {
return instanceId;
}
public void setInstanceId(UUID instanceId) {
this.instanceId = instanceId;
}
public Map<String, Object> getCustomParameters() {
return customParameters;
}
public void setCustomParameters(Map<String, Object> customParameters) {
this.customParameters = customParameters;
}
public String getImplementation() {
return getClass().getName();
}
}
/**
* Message that is sending in order to publish the instance for all the net
*/
public static class PingMessage extends BroadcastMessage {}
/**
* Message that is sending as response for the Ping Message.
*/
public static class PongMessage extends BroadcastMessage {}
/**
* Message that is sending to notify for all the net that the instance is shutting down
*/
public static class ShutdownMessage extends BroadcastMessage {}
}
| |
/*
* The MIT License
*
* Copyright (c) 2018, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jenkins.slaves;
import hudson.EnvVars;
import hudson.Launcher;
import hudson.MarkupText;
import hudson.console.ConsoleAnnotationDescriptor;
import hudson.console.ConsoleAnnotator;
import hudson.console.ConsoleNote;
import hudson.model.AbstractBuild;
import hudson.model.BuildListener;
import hudson.model.Computer;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import hudson.model.Label;
import hudson.model.Slave;
import hudson.model.TaskListener;
import hudson.model.labels.LabelAtom;
import hudson.node_monitors.AbstractAsyncNodeMonitorDescriptor;
import hudson.node_monitors.AbstractNodeMonitorDescriptor;
import hudson.node_monitors.NodeMonitor;
import hudson.slaves.ComputerLauncher;
import hudson.tasks.BatchFile;
import hudson.tasks.Shell;
import jenkins.security.MasterToSlaveCallable;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.JenkinsRule;
import org.jvnet.hudson.test.SimpleCommandLauncher;
import org.jvnet.hudson.test.TestBuilder;
import org.jvnet.hudson.test.TestExtension;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.reflect.Method;
import java.net.URISyntaxException;
import java.util.Collection;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Tests for old Remoting agent versions
*/
public class OldRemotingAgentTest {
@Rule
public JenkinsRule j = new JenkinsRuleWithOldAgent();
@Rule
public TemporaryFolder tmpDir = new TemporaryFolder();
private File agentJar;
@Before
public void extractAgent() throws Exception {
agentJar = new File(tmpDir.getRoot(), "old-agent.jar");
FileUtils.copyURLToFile(OldRemotingAgentTest.class.getResource("/old-remoting/remoting-minimal-supported.jar"), agentJar);
}
@Test
@Issue("JENKINS-48761")
public void shouldBeAbleToConnectAgentWithMinimalSupportedVersion() throws Exception {
Label agentLabel = new LabelAtom("old-agent");
Slave agent = j.createOnlineSlave(agentLabel);
boolean isUnix = agent.getComputer().isUnix();
assertThat("Received wrong agent version. A minimal supported version is expected",
agent.getComputer().getSlaveVersion(),
equalTo(RemotingVersionInfo.getMinimumSupportedVersion().toString()));
// Just ensure we are able to run something on the agent
FreeStyleProject project = j.createFreeStyleProject("foo");
project.setAssignedLabel(agentLabel);
project.getBuildersList().add(isUnix ? new Shell("echo Hello") : new BatchFile("echo 'hello'"));
j.buildAndAssertSuccess(project);
// Run agent monitors
NodeMonitorAssert.assertMonitors(NodeMonitor.getAll(), agent.getComputer());
}
@Issue("JENKINS-55257")
@Test
public void remoteConsoleNote() throws Exception {
Slave agent = j.createOnlineSlave();
FreeStyleProject project = j.createFreeStyleProject();
project.setAssignedLabel(agent.getSelfLabel());
project.getBuildersList().add(new TestBuilder() {
@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException {
build.getWorkspace().act(new RemoteConsoleNotePrinter(listener));
return true;
}
});
FreeStyleBuild b = j.buildAndAssertSuccess(project);
StringWriter sw = new StringWriter();
// The note will not actually work by default; we just want to ensure that the attempt is ignored without breaking the build.
// But for purposes of testing, check that the note really made it into the log.
boolean insecureOriginal = ConsoleNote.INSECURE;
ConsoleNote.INSECURE = true;
try {
b.getLogText().writeHtmlTo(0, sw);
} finally {
ConsoleNote.INSECURE = insecureOriginal;
}
assertThat(sw.toString(), containsString("@@@ANNOTATED@@@"));
}
private static final class RemoteConsoleNotePrinter extends MasterToSlaveCallable<Void, IOException> {
private final TaskListener listener;
RemoteConsoleNotePrinter(TaskListener listener) {
this.listener = listener;
}
@Override
public Void call() throws IOException {
listener.annotate(new RemoteConsoleNote());
listener.getLogger().println();
return null;
}
}
public static final class RemoteConsoleNote extends ConsoleNote<Object> {
@Override
public ConsoleAnnotator<Object> annotate(Object context, MarkupText text, int charPos) {
text.addMarkup(charPos, "@@@ANNOTATED@@@");
return null;
}
@TestExtension("remoteConsoleNote")
public static final class DescriptorImpl extends ConsoleAnnotationDescriptor {}
}
//TODO: move the logic to JTH
private class JenkinsRuleWithOldAgent extends JenkinsRule {
@Override
public ComputerLauncher createComputerLauncher(EnvVars env) throws URISyntaxException, IOException {
// EnvVars are ignored, simple Command Launcher does not offer this API in public
int sz = this.jenkins.getNodes().size();
return new SimpleCommandLauncher(String.format("\"%s/bin/java\" %s -jar \"%s\"",
System.getProperty("java.home"),
SLAVE_DEBUG_PORT > 0 ? " -Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=" + (SLAVE_DEBUG_PORT + sz) : "",
agentJar.getAbsolutePath()));
}
}
private static class NodeMonitorAssert extends NodeMonitor {
static void assertMonitors(Collection<NodeMonitor> toCheck, Computer c) throws AssertionError {
for (NodeMonitor monitor : toCheck) {
assertMonitor(monitor, c);
}
}
static void assertMonitor(NodeMonitor monitor, Computer c) throws AssertionError {
AbstractNodeMonitorDescriptor<?> descriptor = monitor.getDescriptor();
final Method monitorMethod;
try {
monitorMethod = AbstractAsyncNodeMonitorDescriptor.class.getDeclaredMethod("monitor", Computer.class);
} catch (NoSuchMethodException ex) {
//TODO: make the API visible for testing?
throw new AssertionError("Cannot invoke monitor " + monitor + ", no monitor(Computer.class) method in the Descriptor. It will be ignored. ", ex);
}
try {
monitorMethod.setAccessible(true);
Object res = monitorMethod.invoke(descriptor, c);
System.out.println("Successfully executed monitor " + monitor);
} catch (Exception ex) {
throw new AssertionError("Failed to run monitor " + monitor + " for computer " + c, ex);
} finally {
monitorMethod.setAccessible(false);
}
}
}
}
| |
/*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
*
* Copyright (c) Bug 2016.
*
*
*/
package cn.scujcc.bug.bitcoinplatformandroid.view;
import android.R;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.widget.LinearLayout;
class SlidingTabStrip extends LinearLayout {
private static final int DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS = 2;
private static final byte DEFAULT_BOTTOM_BORDER_COLOR_ALPHA = 0x26;
private static final int SELECTED_INDICATOR_THICKNESS_DIPS = 2;
private static final int DEFAULT_SELECTED_INDICATOR_COLOR = 0xFF009688;
private static final int DEFAULT_DIVIDER_THICKNESS_DIPS = 1;
private static final byte DEFAULT_DIVIDER_COLOR_ALPHA = 0x20;
private static final float DEFAULT_DIVIDER_HEIGHT = 0.5f;
private final int mBottomBorderThickness;
private final Paint mBottomBorderPaint;
private final int mSelectedIndicatorThickness;
private final Paint mSelectedIndicatorPaint;
private final int mDefaultBottomBorderColor;
private final Paint mDividerPaint;
private final float mDividerHeight;
private final SimpleTabColorizer mDefaultTabColorizer;
private int mSelectedPosition;
private float mSelectionOffset;
private SlidingTabLayout.TabColorizer mCustomTabColorizer;
SlidingTabStrip(Context context) {
this(context, null);
}
SlidingTabStrip(Context context, AttributeSet attrs) {
super(context, attrs);
setWillNotDraw(false);
final float density = getResources().getDisplayMetrics().density;
TypedValue outValue = new TypedValue();
context.getTheme().resolveAttribute(R.attr.colorForeground, outValue, true);
final int themeForegroundColor = outValue.data;
mDefaultBottomBorderColor = setColorAlpha(themeForegroundColor,
DEFAULT_BOTTOM_BORDER_COLOR_ALPHA);
mDefaultTabColorizer = new SimpleTabColorizer();
mDefaultTabColorizer.setIndicatorColors(DEFAULT_SELECTED_INDICATOR_COLOR);
mDefaultTabColorizer.setDividerColors(setColorAlpha(themeForegroundColor,
DEFAULT_DIVIDER_COLOR_ALPHA));
mBottomBorderThickness = (int) (DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS * density);
mBottomBorderPaint = new Paint();
mBottomBorderPaint.setColor(mDefaultBottomBorderColor);
mSelectedIndicatorThickness = (int) (SELECTED_INDICATOR_THICKNESS_DIPS * density);
mSelectedIndicatorPaint = new Paint();
mDividerHeight = DEFAULT_DIVIDER_HEIGHT;
mDividerPaint = new Paint();
mDividerPaint.setStrokeWidth((int) (DEFAULT_DIVIDER_THICKNESS_DIPS * density));
}
/**
* Set the alpha value of the {@code color} to be the given {@code alpha} value.
*/
private static int setColorAlpha(int color, byte alpha) {
return Color.argb(alpha, Color.red(color), Color.green(color), Color.blue(color));
}
/**
* Blend {@code color1} and {@code color2} using the given ratio.
*
* @param ratio of which to blend. 1.0 will return {@code color1}, 0.5 will give an even blend,
* 0.0 will return {@code color2}.
*/
private static int blendColors(int color1, int color2, float ratio) {
final float inverseRation = 1f - ratio;
float r = (Color.red(color1) * ratio) + (Color.red(color2) * inverseRation);
float g = (Color.green(color1) * ratio) + (Color.green(color2) * inverseRation);
float b = (Color.blue(color1) * ratio) + (Color.blue(color2) * inverseRation);
return Color.rgb((int) r, (int) g, (int) b);
}
void setCustomTabColorizer(SlidingTabLayout.TabColorizer customTabColorizer) {
mCustomTabColorizer = customTabColorizer;
invalidate();
}
void setSelectedIndicatorColors(int... colors) {
// Make sure that the custom colorizer is removed
mCustomTabColorizer = null;
mDefaultTabColorizer.setIndicatorColors(colors);
invalidate();
}
void setDividerColors(int... colors) {
// Make sure that the custom colorizer is removed
mCustomTabColorizer = null;
mDefaultTabColorizer.setDividerColors(colors);
invalidate();
}
void onViewPagerPageChanged(int position, float positionOffset) {
mSelectedPosition = position;
mSelectionOffset = positionOffset;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
final int height = getHeight();
final int childCount = getChildCount();
final int dividerHeightPx = (int) (Math.min(Math.max(0f, mDividerHeight), 1f) * height);
final SlidingTabLayout.TabColorizer tabColorizer = mCustomTabColorizer != null
? mCustomTabColorizer
: mDefaultTabColorizer;
// Thick colored underline below the current selection
if (childCount > 0) {
View selectedTitle = getChildAt(mSelectedPosition);
int left = selectedTitle.getLeft();
int right = selectedTitle.getRight();
int color = tabColorizer.getIndicatorColor(mSelectedPosition);
if (mSelectionOffset > 0f && mSelectedPosition < (getChildCount() - 1)) {
int nextColor = tabColorizer.getIndicatorColor(mSelectedPosition + 1);
if (color != nextColor) {
color = blendColors(nextColor, color, mSelectionOffset);
}
// Draw the selection partway between the tabs
View nextTitle = getChildAt(mSelectedPosition + 1);
left = (int) (mSelectionOffset * nextTitle.getLeft() +
(1.0f - mSelectionOffset) * left);
right = (int) (mSelectionOffset * nextTitle.getRight() +
(1.0f - mSelectionOffset) * right);
}
mSelectedIndicatorPaint.setColor(color);
canvas.drawRect(left, height - mSelectedIndicatorThickness, right,
height, mSelectedIndicatorPaint);
}
// Thin underline along the entire bottom edge
canvas.drawRect(0, height - mBottomBorderThickness, getWidth(), height, mBottomBorderPaint);
// Vertical separators between the titles
int separatorTop = (height - dividerHeightPx) / 2;
for (int i = 0; i < childCount - 1; i++) {
View child = getChildAt(i);
mDividerPaint.setColor(tabColorizer.getDividerColor(i));
canvas.drawLine(child.getRight(), separatorTop, child.getRight(),
separatorTop + dividerHeightPx, mDividerPaint);
}
}
private static class SimpleTabColorizer implements SlidingTabLayout.TabColorizer {
private int[] mIndicatorColors;
private int[] mDividerColors;
@Override
public final int getIndicatorColor(int position) {
return mIndicatorColors[position % mIndicatorColors.length];
}
@Override
public final int getDividerColor(int position) {
return mDividerColors[position % mDividerColors.length];
}
void setIndicatorColors(int... colors) {
mIndicatorColors = colors;
}
void setDividerColors(int... colors) {
mDividerColors = colors;
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cognitoidentity;
import com.amazonaws.services.cognitoidentity.model.*;
import com.amazonaws.annotation.ThreadSafe;
/**
* Interface for accessing Amazon Cognito Identity asynchronously. Each
* asynchronous method will return a Java Future object representing the
* asynchronous operation; overloads which accept an {@code AsyncHandler} can be
* used to receive notification when an asynchronous operation completes.
* <p>
* <fullname>Amazon Cognito</fullname>
* <p>
* Amazon Cognito is a web service that delivers scoped temporary credentials to
* mobile devices and other untrusted environments. Amazon Cognito uniquely
* identifies a device and supplies the user with a consistent identity over the
* lifetime of an application.
* </p>
* <p>
* Using Amazon Cognito, you can enable authentication with one or more
* third-party identity providers (Facebook, Google, or Login with Amazon), and
* you can also choose to support unauthenticated access from your app. Cognito
* delivers a unique identifier for each user and acts as an OpenID token
* provider trusted by AWS Security Token Service (STS) to access temporary,
* limited-privilege AWS credentials.
* </p>
* <p>
* To provide end-user credentials, first make an unsigned call to <a>GetId</a>.
* If the end user is authenticated with one of the supported identity
* providers, set the <code>Logins</code> map with the identity provider token.
* <code>GetId</code> returns a unique identifier for the user.
* </p>
* <p>
* Next, make an unsigned call to <a>GetCredentialsForIdentity</a>. This call
* expects the same <code>Logins</code> map as the <code>GetId</code> call, as
* well as the <code>IdentityID</code> originally returned by <code>GetId</code>
* . Assuming your identity pool has been configured via the
* <a>SetIdentityPoolRoles</a> operation, <code>GetCredentialsForIdentity</code>
* will return AWS credentials for your use. If your pool has not been
* configured with <code>SetIdentityPoolRoles</code>, or if you want to follow
* legacy flow, make an unsigned call to <a>GetOpenIdToken</a>, which returns
* the OpenID token necessary to call STS and retrieve AWS credentials. This
* call expects the same <code>Logins</code> map as the <code>GetId</code> call,
* as well as the <code>IdentityID</code> originally returned by
* <code>GetId</code>. The token returned by <code>GetOpenIdToken</code> can be
* passed to the STS operation <a href=
* "http://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html"
* >AssumeRoleWithWebIdentity</a> to retrieve AWS credentials.
* </p>
* <p>
* If you want to use Amazon Cognito in an Android, iOS, or Unity application,
* you will probably want to make API calls via the AWS Mobile SDK. To learn
* more, see the <a href="http://docs.aws.amazon.com/mobile/index.html">AWS
* Mobile SDK Developer Guide</a>.
* </p>
*/
@ThreadSafe
public class AmazonCognitoIdentityAsyncClient extends
AmazonCognitoIdentityClient implements AmazonCognitoIdentityAsync {
private static final int DEFAULT_THREAD_POOL_SIZE = 50;
private final java.util.concurrent.ExecutorService executorService;
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity. A credentials provider chain will be used that searches
* for credentials in this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Credential profiles file at the default location (~/.aws/credentials)
* shared by all AWS SDKs and the AWS CLI</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AmazonCognitoIdentityAsyncClient() {
this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain());
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity. A credentials provider chain will be used that searches
* for credentials in this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Credential profiles file at the default location (~/.aws/credentials)
* shared by all AWS SDKs and the AWS CLI</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* a number of threads equal to the maximum number of concurrent connections
* configured via {@code ClientConfiguration.getMaxConnections()}.
*
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to Amazon Cognito Identity (ex: proxy settings, retry
* counts, etc).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.ClientConfiguration clientConfiguration) {
this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain(),
clientConfiguration, java.util.concurrent.Executors
.newFixedThreadPool(clientConfiguration
.getMaxConnections()));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the specified AWS account credentials.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials) {
this(awsCredentials, java.util.concurrent.Executors
.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the specified AWS account credentials and executor
* service. Default client settings will be used.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials,
java.util.concurrent.ExecutorService executorService) {
this(awsCredentials, configFactory.getConfig(), executorService);
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the specified AWS account credentials, executor
* service, and client configuration options.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials,
com.amazonaws.ClientConfiguration clientConfiguration,
java.util.concurrent.ExecutorService executorService) {
super(awsCredentials, clientConfiguration);
this.executorService = executorService;
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the specified AWS account credentials provider.
* Default client settings will be used.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider) {
this(awsCredentialsProvider, java.util.concurrent.Executors
.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the provided AWS account credentials provider and
* client configuration options.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* a number of threads equal to the maximum number of concurrent connections
* configured via {@code ClientConfiguration.getMaxConnections()}.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
com.amazonaws.ClientConfiguration clientConfiguration) {
this(awsCredentialsProvider, clientConfiguration,
java.util.concurrent.Executors
.newFixedThreadPool(clientConfiguration
.getMaxConnections()));
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the specified AWS account credentials provider and
* executor service. Default client settings will be used.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
java.util.concurrent.ExecutorService executorService) {
this(awsCredentialsProvider, configFactory.getConfig(), executorService);
}
/**
* Constructs a new asynchronous client to invoke service methods on Amazon
* Cognito Identity using the specified AWS account credentials provider,
* executor service, and client configuration options.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AmazonCognitoIdentityAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
com.amazonaws.ClientConfiguration clientConfiguration,
java.util.concurrent.ExecutorService executorService) {
super(awsCredentialsProvider, clientConfiguration);
this.executorService = executorService;
}
/**
* Returns the executor service used by this client to execute async
* requests.
*
* @return The executor service used by this client to execute async
* requests.
*/
public java.util.concurrent.ExecutorService getExecutorService() {
return executorService;
}
@Override
public java.util.concurrent.Future<CreateIdentityPoolResult> createIdentityPoolAsync(
CreateIdentityPoolRequest request) {
return createIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateIdentityPoolResult> createIdentityPoolAsync(
final CreateIdentityPoolRequest request,
final com.amazonaws.handlers.AsyncHandler<CreateIdentityPoolRequest, CreateIdentityPoolResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<CreateIdentityPoolResult>() {
@Override
public CreateIdentityPoolResult call() throws Exception {
CreateIdentityPoolResult result;
try {
result = createIdentityPool(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DeleteIdentitiesResult> deleteIdentitiesAsync(
DeleteIdentitiesRequest request) {
return deleteIdentitiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteIdentitiesResult> deleteIdentitiesAsync(
final DeleteIdentitiesRequest request,
final com.amazonaws.handlers.AsyncHandler<DeleteIdentitiesRequest, DeleteIdentitiesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DeleteIdentitiesResult>() {
@Override
public DeleteIdentitiesResult call() throws Exception {
DeleteIdentitiesResult result;
try {
result = deleteIdentities(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DeleteIdentityPoolResult> deleteIdentityPoolAsync(
DeleteIdentityPoolRequest request) {
return deleteIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteIdentityPoolResult> deleteIdentityPoolAsync(
final DeleteIdentityPoolRequest request,
final com.amazonaws.handlers.AsyncHandler<DeleteIdentityPoolRequest, DeleteIdentityPoolResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DeleteIdentityPoolResult>() {
@Override
public DeleteIdentityPoolResult call() throws Exception {
DeleteIdentityPoolResult result;
try {
result = deleteIdentityPool(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DescribeIdentityResult> describeIdentityAsync(
DescribeIdentityRequest request) {
return describeIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeIdentityResult> describeIdentityAsync(
final DescribeIdentityRequest request,
final com.amazonaws.handlers.AsyncHandler<DescribeIdentityRequest, DescribeIdentityResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DescribeIdentityResult>() {
@Override
public DescribeIdentityResult call() throws Exception {
DescribeIdentityResult result;
try {
result = describeIdentity(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<DescribeIdentityPoolResult> describeIdentityPoolAsync(
DescribeIdentityPoolRequest request) {
return describeIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeIdentityPoolResult> describeIdentityPoolAsync(
final DescribeIdentityPoolRequest request,
final com.amazonaws.handlers.AsyncHandler<DescribeIdentityPoolRequest, DescribeIdentityPoolResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<DescribeIdentityPoolResult>() {
@Override
public DescribeIdentityPoolResult call() throws Exception {
DescribeIdentityPoolResult result;
try {
result = describeIdentityPool(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetCredentialsForIdentityResult> getCredentialsForIdentityAsync(
GetCredentialsForIdentityRequest request) {
return getCredentialsForIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetCredentialsForIdentityResult> getCredentialsForIdentityAsync(
final GetCredentialsForIdentityRequest request,
final com.amazonaws.handlers.AsyncHandler<GetCredentialsForIdentityRequest, GetCredentialsForIdentityResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetCredentialsForIdentityResult>() {
@Override
public GetCredentialsForIdentityResult call()
throws Exception {
GetCredentialsForIdentityResult result;
try {
result = getCredentialsForIdentity(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetIdResult> getIdAsync(
GetIdRequest request) {
return getIdAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetIdResult> getIdAsync(
final GetIdRequest request,
final com.amazonaws.handlers.AsyncHandler<GetIdRequest, GetIdResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetIdResult>() {
@Override
public GetIdResult call() throws Exception {
GetIdResult result;
try {
result = getId(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetIdentityPoolRolesResult> getIdentityPoolRolesAsync(
GetIdentityPoolRolesRequest request) {
return getIdentityPoolRolesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetIdentityPoolRolesResult> getIdentityPoolRolesAsync(
final GetIdentityPoolRolesRequest request,
final com.amazonaws.handlers.AsyncHandler<GetIdentityPoolRolesRequest, GetIdentityPoolRolesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetIdentityPoolRolesResult>() {
@Override
public GetIdentityPoolRolesResult call() throws Exception {
GetIdentityPoolRolesResult result;
try {
result = getIdentityPoolRoles(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenResult> getOpenIdTokenAsync(
GetOpenIdTokenRequest request) {
return getOpenIdTokenAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenResult> getOpenIdTokenAsync(
final GetOpenIdTokenRequest request,
final com.amazonaws.handlers.AsyncHandler<GetOpenIdTokenRequest, GetOpenIdTokenResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetOpenIdTokenResult>() {
@Override
public GetOpenIdTokenResult call() throws Exception {
GetOpenIdTokenResult result;
try {
result = getOpenIdToken(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenForDeveloperIdentityResult> getOpenIdTokenForDeveloperIdentityAsync(
GetOpenIdTokenForDeveloperIdentityRequest request) {
return getOpenIdTokenForDeveloperIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetOpenIdTokenForDeveloperIdentityResult> getOpenIdTokenForDeveloperIdentityAsync(
final GetOpenIdTokenForDeveloperIdentityRequest request,
final com.amazonaws.handlers.AsyncHandler<GetOpenIdTokenForDeveloperIdentityRequest, GetOpenIdTokenForDeveloperIdentityResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetOpenIdTokenForDeveloperIdentityResult>() {
@Override
public GetOpenIdTokenForDeveloperIdentityResult call()
throws Exception {
GetOpenIdTokenForDeveloperIdentityResult result;
try {
result = getOpenIdTokenForDeveloperIdentity(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListIdentitiesResult> listIdentitiesAsync(
ListIdentitiesRequest request) {
return listIdentitiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListIdentitiesResult> listIdentitiesAsync(
final ListIdentitiesRequest request,
final com.amazonaws.handlers.AsyncHandler<ListIdentitiesRequest, ListIdentitiesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<ListIdentitiesResult>() {
@Override
public ListIdentitiesResult call() throws Exception {
ListIdentitiesResult result;
try {
result = listIdentities(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListIdentityPoolsResult> listIdentityPoolsAsync(
ListIdentityPoolsRequest request) {
return listIdentityPoolsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListIdentityPoolsResult> listIdentityPoolsAsync(
final ListIdentityPoolsRequest request,
final com.amazonaws.handlers.AsyncHandler<ListIdentityPoolsRequest, ListIdentityPoolsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<ListIdentityPoolsResult>() {
@Override
public ListIdentityPoolsResult call() throws Exception {
ListIdentityPoolsResult result;
try {
result = listIdentityPools(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<LookupDeveloperIdentityResult> lookupDeveloperIdentityAsync(
LookupDeveloperIdentityRequest request) {
return lookupDeveloperIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<LookupDeveloperIdentityResult> lookupDeveloperIdentityAsync(
final LookupDeveloperIdentityRequest request,
final com.amazonaws.handlers.AsyncHandler<LookupDeveloperIdentityRequest, LookupDeveloperIdentityResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<LookupDeveloperIdentityResult>() {
@Override
public LookupDeveloperIdentityResult call()
throws Exception {
LookupDeveloperIdentityResult result;
try {
result = lookupDeveloperIdentity(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<MergeDeveloperIdentitiesResult> mergeDeveloperIdentitiesAsync(
MergeDeveloperIdentitiesRequest request) {
return mergeDeveloperIdentitiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<MergeDeveloperIdentitiesResult> mergeDeveloperIdentitiesAsync(
final MergeDeveloperIdentitiesRequest request,
final com.amazonaws.handlers.AsyncHandler<MergeDeveloperIdentitiesRequest, MergeDeveloperIdentitiesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<MergeDeveloperIdentitiesResult>() {
@Override
public MergeDeveloperIdentitiesResult call()
throws Exception {
MergeDeveloperIdentitiesResult result;
try {
result = mergeDeveloperIdentities(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<SetIdentityPoolRolesResult> setIdentityPoolRolesAsync(
SetIdentityPoolRolesRequest request) {
return setIdentityPoolRolesAsync(request, null);
}
@Override
public java.util.concurrent.Future<SetIdentityPoolRolesResult> setIdentityPoolRolesAsync(
final SetIdentityPoolRolesRequest request,
final com.amazonaws.handlers.AsyncHandler<SetIdentityPoolRolesRequest, SetIdentityPoolRolesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<SetIdentityPoolRolesResult>() {
@Override
public SetIdentityPoolRolesResult call() throws Exception {
SetIdentityPoolRolesResult result;
try {
result = setIdentityPoolRoles(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<UnlinkDeveloperIdentityResult> unlinkDeveloperIdentityAsync(
UnlinkDeveloperIdentityRequest request) {
return unlinkDeveloperIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<UnlinkDeveloperIdentityResult> unlinkDeveloperIdentityAsync(
final UnlinkDeveloperIdentityRequest request,
final com.amazonaws.handlers.AsyncHandler<UnlinkDeveloperIdentityRequest, UnlinkDeveloperIdentityResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<UnlinkDeveloperIdentityResult>() {
@Override
public UnlinkDeveloperIdentityResult call()
throws Exception {
UnlinkDeveloperIdentityResult result;
try {
result = unlinkDeveloperIdentity(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<UnlinkIdentityResult> unlinkIdentityAsync(
UnlinkIdentityRequest request) {
return unlinkIdentityAsync(request, null);
}
@Override
public java.util.concurrent.Future<UnlinkIdentityResult> unlinkIdentityAsync(
final UnlinkIdentityRequest request,
final com.amazonaws.handlers.AsyncHandler<UnlinkIdentityRequest, UnlinkIdentityResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<UnlinkIdentityResult>() {
@Override
public UnlinkIdentityResult call() throws Exception {
UnlinkIdentityResult result;
try {
result = unlinkIdentity(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
@Override
public java.util.concurrent.Future<UpdateIdentityPoolResult> updateIdentityPoolAsync(
UpdateIdentityPoolRequest request) {
return updateIdentityPoolAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateIdentityPoolResult> updateIdentityPoolAsync(
final UpdateIdentityPoolRequest request,
final com.amazonaws.handlers.AsyncHandler<UpdateIdentityPoolRequest, UpdateIdentityPoolResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<UpdateIdentityPoolResult>() {
@Override
public UpdateIdentityPoolResult call() throws Exception {
UpdateIdentityPoolResult result;
try {
result = updateIdentityPool(request);
} catch (Exception ex) {
if (asyncHandler != null) {
asyncHandler.onError(ex);
}
throw ex;
}
if (asyncHandler != null) {
asyncHandler.onSuccess(request, result);
}
return result;
}
});
}
/**
* Shuts down the client, releasing all managed resources. This includes
* forcibly terminating all pending asynchronous service calls. Clients who
* wish to give pending asynchronous service calls time to complete should
* call {@code getExecutorService().shutdown()} followed by
* {@code getExecutorService().awaitTermination()} prior to calling this
* method.
*/
@Override
public void shutdown() {
super.shutdown();
executorService.shutdownNow();
}
}
| |
package org.kivy.android;
import java.net.Socket;
import java.net.InetSocketAddress;
import android.os.SystemClock;
import java.io.InputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ArrayList;
import android.app.*;
import android.content.*;
import android.view.*;
import android.view.ViewGroup;
import android.view.SurfaceView;
import android.app.Activity;
import android.content.Intent;
import android.util.Log;
import android.widget.Toast;
import android.os.Bundle;
import android.os.PowerManager;
import android.graphics.PixelFormat;
import android.view.SurfaceHolder;
import android.content.Context;
import android.content.pm.PackageManager;
import android.content.pm.ApplicationInfo;
import android.content.Intent;
import android.widget.ImageView;
import java.io.InputStream;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.widget.AbsoluteLayout;
import android.view.ViewGroup.LayoutParams;
import android.webkit.WebViewClient;
import android.webkit.WebView;
import org.kivy.android.PythonUtil;
import org.kivy.android.WebViewLoader;
import org.renpy.android.ResourceManager;
import org.renpy.android.AssetExtract;
public class PythonActivity extends Activity {
// This activity is modified from a mixture of the SDLActivity and
// PythonActivity in the SDL2 bootstrap, but removing all the SDL2
// specifics.
private static final String TAG = "PythonActivity";
public static PythonActivity mActivity = null;
/** If shared libraries (e.g. SDL or the native application) could not be loaded. */
public static boolean mBrokenLibraries;
protected static ViewGroup mLayout;
protected static WebView mWebView;
protected static Thread mPythonThread;
private ResourceManager resourceManager = null;
private Bundle mMetaData = null;
private PowerManager.WakeLock mWakeLock = null;
public static void initialize() {
// The static nature of the singleton and Android quirkyness force us to initialize everything here
// Otherwise, when exiting the app and returning to it, these variables *keep* their pre exit values
mWebView = null;
mLayout = null;
mBrokenLibraries = false;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.v(TAG, "My oncreate running");
resourceManager = new ResourceManager(this);
Log.v(TAG, "Ready to unpack");
unpackData("private", getFilesDir());
this.mActivity = this;
Log.v("Python", "Device: " + android.os.Build.DEVICE);
Log.v("Python", "Model: " + android.os.Build.MODEL);
super.onCreate(savedInstanceState);
PythonActivity.initialize();
// Load shared libraries
String errorMsgBrokenLib = "";
try {
loadLibraries();
} catch(UnsatisfiedLinkError e) {
System.err.println(e.getMessage());
mBrokenLibraries = true;
errorMsgBrokenLib = e.getMessage();
} catch(Exception e) {
System.err.println(e.getMessage());
mBrokenLibraries = true;
errorMsgBrokenLib = e.getMessage();
}
if (mBrokenLibraries)
{
AlertDialog.Builder dlgAlert = new AlertDialog.Builder(this);
dlgAlert.setMessage("An error occurred while trying to load the application libraries. Please try again and/or reinstall."
+ System.getProperty("line.separator")
+ System.getProperty("line.separator")
+ "Error: " + errorMsgBrokenLib);
dlgAlert.setTitle("Python Error");
dlgAlert.setPositiveButton("Exit",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,int id) {
// if this button is clicked, close current activity
PythonActivity.mActivity.finish();
}
});
dlgAlert.setCancelable(false);
dlgAlert.create().show();
return;
}
// Set up the webview
mWebView = new WebView(this);
mWebView.getSettings().setJavaScriptEnabled(true);
mWebView.getSettings().setDomStorageEnabled(true);
mWebView.loadUrl("file:///" + mActivity.getFilesDir().getAbsolutePath() + "/_load.html");
mWebView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
mWebView.setWebViewClient(new WebViewClient() {
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
view.loadUrl(url);
return false;
}
});
mLayout = new AbsoluteLayout(this);
mLayout.addView(mWebView);
setContentView(mLayout);
String mFilesDirectory = mActivity.getFilesDir().getAbsolutePath();
Log.v(TAG, "Setting env vars for start.c and Python to use");
PythonActivity.nativeSetEnv("ANDROID_PRIVATE", mFilesDirectory);
PythonActivity.nativeSetEnv("ANDROID_ARGUMENT", mFilesDirectory);
PythonActivity.nativeSetEnv("ANDROID_APP_PATH", mFilesDirectory);
PythonActivity.nativeSetEnv("ANDROID_ENTRYPOINT", "main.pyo");
PythonActivity.nativeSetEnv("PYTHONHOME", mFilesDirectory);
PythonActivity.nativeSetEnv("PYTHONPATH", mFilesDirectory + ":" + mFilesDirectory + "/lib");
try {
Log.v(TAG, "Access to our meta-data...");
this.mMetaData = this.mActivity.getPackageManager().getApplicationInfo(
this.mActivity.getPackageName(), PackageManager.GET_META_DATA).metaData;
PowerManager pm = (PowerManager) this.mActivity.getSystemService(Context.POWER_SERVICE);
if ( this.mMetaData.getInt("wakelock") == 1 ) {
this.mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "Screen On");
}
} catch (PackageManager.NameNotFoundException e) {
}
final Thread pythonThread = new Thread(new PythonMain(), "PythonThread");
PythonActivity.mPythonThread = pythonThread;
pythonThread.start();
final Thread wvThread = new Thread(new WebViewLoaderMain(), "WvThread");
wvThread.start();
}
@Override
public void onDestroy() {
Log.i("Destroy", "end of app");
super.onDestroy();
// make sure all child threads (python_thread) are stopped
android.os.Process.killProcess(android.os.Process.myPid());
}
public void loadLibraries() {
PythonUtil.loadLibraries(getFilesDir());
}
public void recursiveDelete(File f) {
if (f.isDirectory()) {
for (File r : f.listFiles()) {
recursiveDelete(r);
}
}
f.delete();
}
/**
* Show an error using a toast. (Only makes sense from non-UI
* threads.)
*/
public void toastError(final String msg) {
final Activity thisActivity = this;
runOnUiThread(new Runnable () {
public void run() {
Toast.makeText(thisActivity, msg, Toast.LENGTH_LONG).show();
}
});
// Wait to show the error.
synchronized (this) {
try {
this.wait(1000);
} catch (InterruptedException e) {
}
}
}
public void unpackData(final String resource, File target) {
Log.v(TAG, "UNPACKING!!! " + resource + " " + target.getName());
// The version of data in memory and on disk.
String data_version = resourceManager.getString(resource + "_version");
String disk_version = null;
Log.v(TAG, "Data version is " + data_version);
// If no version, no unpacking is necessary.
if (data_version == null) {
return;
}
// Check the current disk version, if any.
String filesDir = target.getAbsolutePath();
String disk_version_fn = filesDir + "/" + resource + ".version";
try {
byte buf[] = new byte[64];
InputStream is = new FileInputStream(disk_version_fn);
int len = is.read(buf);
disk_version = new String(buf, 0, len);
is.close();
} catch (Exception e) {
disk_version = "";
}
// If the disk data is out of date, extract it and write the
// version file.
// if (! data_version.equals(disk_version)) {
if (! data_version.equals(disk_version)) {
Log.v(TAG, "Extracting " + resource + " assets.");
recursiveDelete(target);
target.mkdirs();
AssetExtract ae = new AssetExtract(this);
if (!ae.extractTar(resource + ".mp3", target.getAbsolutePath())) {
toastError("Could not extract " + resource + " data.");
}
try {
// Write .nomedia.
new File(target, ".nomedia").createNewFile();
// Write version file.
FileOutputStream os = new FileOutputStream(disk_version_fn);
os.write(data_version.getBytes());
os.close();
} catch (Exception e) {
Log.w("python", e);
}
}
}
public static void loadUrl(String url) {
class LoadUrl implements Runnable {
private String mUrl;
public LoadUrl(String url) {
mUrl = url;
}
public void run() {
mWebView.loadUrl(mUrl);
}
}
Log.i(TAG, "Opening URL: " + url);
mActivity.runOnUiThread(new LoadUrl(url));
}
public static ViewGroup getLayout() {
return mLayout;
}
long lastBackClick = SystemClock.elapsedRealtime();
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// Check if the key event was the Back button and if there's history
if ((keyCode == KeyEvent.KEYCODE_BACK) && mWebView.canGoBack()) {
mWebView.goBack();
return true;
}
// If it wasn't the Back key or there's no web page history, bubble up to the default
// system behavior (probably exit the activity)
if (SystemClock.elapsedRealtime() - lastBackClick > 2000){
lastBackClick = SystemClock.elapsedRealtime();
Toast.makeText(this, "Click again to close the app",
Toast.LENGTH_LONG).show();
return true;
}
lastBackClick = SystemClock.elapsedRealtime();
return super.onKeyDown(keyCode, event);
}
//----------------------------------------------------------------------------
// Listener interface for onNewIntent
//
public interface NewIntentListener {
void onNewIntent(Intent intent);
}
private List<NewIntentListener> newIntentListeners = null;
public void registerNewIntentListener(NewIntentListener listener) {
if ( this.newIntentListeners == null )
this.newIntentListeners = Collections.synchronizedList(new ArrayList<NewIntentListener>());
this.newIntentListeners.add(listener);
}
public void unregisterNewIntentListener(NewIntentListener listener) {
if ( this.newIntentListeners == null )
return;
this.newIntentListeners.remove(listener);
}
@Override
protected void onNewIntent(Intent intent) {
if ( this.newIntentListeners == null )
return;
this.onResume();
synchronized ( this.newIntentListeners ) {
Iterator<NewIntentListener> iterator = this.newIntentListeners.iterator();
while ( iterator.hasNext() ) {
(iterator.next()).onNewIntent(intent);
}
}
}
//----------------------------------------------------------------------------
// Listener interface for onActivityResult
//
public interface ActivityResultListener {
void onActivityResult(int requestCode, int resultCode, Intent data);
}
private List<ActivityResultListener> activityResultListeners = null;
public void registerActivityResultListener(ActivityResultListener listener) {
if ( this.activityResultListeners == null )
this.activityResultListeners = Collections.synchronizedList(new ArrayList<ActivityResultListener>());
this.activityResultListeners.add(listener);
}
public void unregisterActivityResultListener(ActivityResultListener listener) {
if ( this.activityResultListeners == null )
return;
this.activityResultListeners.remove(listener);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent intent) {
if ( this.activityResultListeners == null )
return;
this.onResume();
synchronized ( this.activityResultListeners ) {
Iterator<ActivityResultListener> iterator = this.activityResultListeners.iterator();
while ( iterator.hasNext() )
(iterator.next()).onActivityResult(requestCode, resultCode, intent);
}
}
public static void start_service(String serviceTitle, String serviceDescription,
String pythonServiceArgument) {
Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class);
String argument = PythonActivity.mActivity.getFilesDir().getAbsolutePath();
String filesDirectory = argument;
serviceIntent.putExtra("androidPrivate", argument);
serviceIntent.putExtra("androidArgument", argument);
serviceIntent.putExtra("serviceEntrypoint", "service/main.pyo");
serviceIntent.putExtra("pythonHome", argument);
serviceIntent.putExtra("pythonPath", argument + ":" + filesDirectory + "/lib");
serviceIntent.putExtra("serviceTitle", serviceTitle);
serviceIntent.putExtra("serviceDescription", serviceDescription);
serviceIntent.putExtra("pythonServiceArgument", pythonServiceArgument);
PythonActivity.mActivity.startService(serviceIntent);
}
public static void stop_service() {
Intent serviceIntent = new Intent(PythonActivity.mActivity, PythonService.class);
PythonActivity.mActivity.stopService(serviceIntent);
}
public static native void nativeSetEnv(String j_name, String j_value);
public static native int nativeInit(Object arguments);
}
class PythonMain implements Runnable {
@Override
public void run() {
PythonActivity.nativeInit(new String[0]);
}
}
class WebViewLoaderMain implements Runnable {
@Override
public void run() {
WebViewLoader.testConnection();
}
}
| |
/* -----------------------------------------------------------------------------
* Rule_cmdMulInt.java
* -----------------------------------------------------------------------------
*
* Producer : com.parse2.aparse.Parser 2.3
* Produced : Fri Apr 12 10:40:21 MUT 2013
*
* -----------------------------------------------------------------------------
*/
package com.litecoding.smali2java.parser.cmd.asmd.mul;
import java.util.ArrayList;
import com.litecoding.smali2java.builder.Visitor;
import com.litecoding.smali2java.parser.ParserContext;
import com.litecoding.smali2java.parser.Rule;
import com.litecoding.smali2java.parser.Terminal_StringValue;
import com.litecoding.smali2java.parser.smali.Rule_codeRegister;
import com.litecoding.smali2java.parser.smali.Rule_codeRegisterVDst;
import com.litecoding.smali2java.parser.smali.Rule_commentSequence;
import com.litecoding.smali2java.parser.smali.Rule_listSeparator;
import com.litecoding.smali2java.parser.smali.Rule_optPadding;
import com.litecoding.smali2java.parser.smali.Rule_padding;
import com.litecoding.smali2java.parser.text.Rule_CRLF;
final public class Rule_cmdMulInt extends Rule
{
private Rule_cmdMulInt(String spelling, ArrayList<Rule> rules)
{
super(spelling, rules);
}
public Object accept(Visitor visitor)
{
return visitor.visit(this);
}
public static Rule_cmdMulInt parse(ParserContext context)
{
context.push("cmdMulInt");
boolean parsed = true;
int s0 = context.index;
ArrayList<Rule> e0 = new ArrayList<Rule>();
Rule rule;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e1 = new ArrayList<Rule>();
int s1 = context.index;
parsed = true;
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_optPadding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.parse(context, "mul-int");
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_padding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegisterVDst.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_listSeparator.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegister.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_listSeparator.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegister.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_optPadding.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
boolean f1 = true;
@SuppressWarnings("unused")
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
int g1 = context.index;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e2 = new ArrayList<Rule>();
int s2 = context.index;
parsed = true;
if (parsed)
{
boolean f2 = true;
int c2 = 0;
for (int i2 = 0; i2 < 1 && f2; i2++)
{
rule = Rule_padding.parse(context);
if ((f2 = rule != null))
{
e2.add(rule);
c2++;
}
}
parsed = c2 == 1;
}
if (parsed)
{
boolean f2 = true;
int c2 = 0;
for (int i2 = 0; i2 < 1 && f2; i2++)
{
rule = Rule_commentSequence.parse(context);
if ((f2 = rule != null))
{
e2.add(rule);
c2++;
}
}
parsed = c2 == 1;
}
if (parsed)
e1.addAll(e2);
else
context.index = s2;
}
}
f1 = context.index > g1;
if (parsed) c1++;
}
parsed = true;
}
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_CRLF.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
e0.addAll(e1);
else
context.index = s1;
}
}
rule = null;
if (parsed)
rule = new Rule_cmdMulInt(context.text.substring(s0, context.index), e0);
else
context.index = s0;
context.pop("cmdMulInt", parsed);
return (Rule_cmdMulInt)rule;
}
}
/* -----------------------------------------------------------------------------
* eof
* -----------------------------------------------------------------------------
*/
| |
/*
* Copyright 2014 Roy Clarkson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.royclarkson.springagram;
import android.app.ActionBar;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.os.Bundle;
import android.support.v4.widget.DrawerLayout;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.royclarkson.springagram.model.ApiResource;
import com.royclarkson.springagram.model.GalleryResource;
import com.royclarkson.springagram.model.ItemResource;
import java.util.List;
/**
* @author Roy Clarkson
*/
public class MainActivity extends Activity
implements NavigationDrawerFragment.NavigationDrawerCallbacks,
HomeFragment.HomeFragmentListener,
PhotoListFragment.PhotoListFragmentListener,
PhotoDetailFragment.PhotoDetailFragmentListener,
PhotoAddFragment.PhotoAddFragmentListener,
PhotoAddToGalleryFragment.PhotoAddToGalleryFragmentListener,
GalleryListFragment.GalleryListFragmentListener,
GalleryAddFragment.GalleryAddFragmentListener,
GalleryPhotoListFragment.GalleryPhotoListFragmentListener {
private NavigationDrawerFragment navigationDrawerFragment;
private ApiResource apiResource;
private List<ItemResource> photos;
private List<GalleryResource> galleries;
/**
* Used to store the last screen title. For use in {@link #restoreActionBar()}.
*/
private CharSequence title;
private int menuPosition = 0;
//***************************************
// Activity methods
//***************************************
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
navigationDrawerFragment = (NavigationDrawerFragment)
getFragmentManager().findFragmentById(R.id.navigation_drawer);
title = getTitle();
// Set up the drawer.
navigationDrawerFragment.setUp(
R.id.navigation_drawer,
(DrawerLayout) findViewById(R.id.drawer_layout));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (!navigationDrawerFragment.isDrawerOpen()) {
// Only show items in the action bar relevant to this screen
// if the drawer is not showing. Otherwise, let the drawer
// decide what to show in the action bar.
switch (this.menuPosition) {
case 1:
getMenuInflater().inflate(R.menu.photos_list_main, menu);
break;
case 2:
getMenuInflater().inflate(R.menu.gallery_list_main, menu);
break;
case 0:
default:
getMenuInflater().inflate(R.menu.main, menu);
break;
}
restoreActionBar();
return true;
}
return super.onCreateOptionsMenu(menu);
}
public void restoreActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setTitle(title);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_add_photo) {
showPhotoAddFragment();
return true;
} else if (id == R.id.action_refresh_photo_list) {
FragmentManager fragmentManager = getFragmentManager();
PhotoListFragment galleryListFragment =
(PhotoListFragment) fragmentManager.findFragmentByTag(PhotoListFragment.TAG);
galleryListFragment.fetchPhotoList();
return true;
} else if (id == R.id.action_add_gallery) {
showGalleryAddFragment();
return true;
} else if (id == R.id.action_refresh_gallery_list) {
FragmentManager fragmentManager = getFragmentManager();
GalleryListFragment galleryListFragment =
(GalleryListFragment) fragmentManager.findFragmentByTag(GalleryListFragment.TAG);
galleryListFragment.fetchGalleryList();
return true;
}
return super.onOptionsItemSelected(item);
}
private void showPhotoAddFragment() {
FragmentManager fragmentManager = getFragmentManager();
String url = this.apiResource.getLink(ApiResource.REL_ITEMS).getHref();
PhotoAddFragment photoAddFragment = PhotoAddFragment.newInstance(url);
FragmentTransaction transaction = fragmentManager.beginTransaction()
.add(R.id.container, photoAddFragment)
.addToBackStack(null);
transaction.commit();
}
private void showGalleryAddFragment() {
FragmentManager fragmentManager = getFragmentManager();
String url = this.apiResource.getLink(ApiResource.REL_GALLERIES).getHref();
GalleryAddFragment galleryAddFragment = GalleryAddFragment.newInstance(url);
FragmentTransaction transaction = fragmentManager.beginTransaction()
.add(R.id.container, galleryAddFragment)
.addToBackStack(null);
transaction.commit();
}
//***************************************
// NavigationDrawerCallbacks methods
//***************************************
@Override
public void onNavigationDrawerItemSelected(int position) {
this.menuPosition = position;
String url = null;
Fragment fragment = null;
String tag = null;
switch (position) {
case 0:
url = getString(R.string.base_uri);
fragment = HomeFragment.newInstance(url);
tag = HomeFragment.TAG;
break;
case 1:
if (this.apiResource != null) {
url = this.apiResource.getLink(ApiResource.REL_ITEMS).getHref();
}
fragment = PhotoListFragment.newInstance(url);
tag = PhotoListFragment.TAG;
break;
case 2:
if (this.apiResource != null) {
url = this.apiResource.getLink(ApiResource.REL_GALLERIES).getHref();
}
fragment = GalleryListFragment.newInstance(url);
tag = GalleryListFragment.TAG;
break;
}
// update the main content by replacing fragments
if (fragment != null) {
FragmentManager fragmentManager = getFragmentManager();
fragmentManager.popBackStack();
fragmentManager.beginTransaction()
.replace(R.id.container, fragment, tag)
.commit();
}
}
//***************************************
// HomeFragmentListener methods
//***************************************
@Override
public void onResourceDownloadComplete(ApiResource apiResource) {
this.apiResource = apiResource;
}
public void onNetworkError(String message) {
Toast toast = Toast.makeText(this, message, Toast.LENGTH_LONG);
toast.setGravity(Gravity.CENTER, 0, 0);
toast.show();
}
//***************************************
// PhotoListFragmentListener methods
//***************************************
@Override
public void onDownloadPhotosComplete(List<ItemResource> photos) {
this.photos = photos;
}
@Override
public void onPhotoSelected(int position) {
PhotoDetailFragment photoDetailFragment = PhotoDetailFragment.newInstance(position);
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction transaction = fragmentManager.beginTransaction()
.add(R.id.container, photoDetailFragment)
.addToBackStack(null);
transaction.commit();
}
@Override
public ItemResource getPhotoByPosition(int position) {
return this.photos.get(position);
}
@Override
public void onDeletePhotoByPosition(int position) {
this.photos.remove(position);
}
@Override
public void onPhotoAddToGallerySelected(int position) {
ItemResource item = this.photos.get(position);
String itemGalleryUrl = item.getLink(ItemResource.REL_GALLERY).getHref();
PhotoAddToGalleryFragment photoAddToGalleryFragment = PhotoAddToGalleryFragment.newInstance(itemGalleryUrl);
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction transaction = fragmentManager.beginTransaction()
.add(R.id.container, photoAddToGalleryFragment)
.addToBackStack(null);
transaction.commit();
}
//***************************************
// PhotoDetailFragmentListener methods
//***************************************
// @Override
// public PhotoResource getPhotoByPosition(int position) {
// return this.photos.get(position);
// }
//***************************************
// PhotoDetailFragmentListener methods
//***************************************
@Override
public void onPhotoAddComplete() {
FragmentManager fragmentManager = getFragmentManager();
fragmentManager.popBackStack();
PhotoListFragment photoListFragment =
(PhotoListFragment) fragmentManager.findFragmentByTag(PhotoListFragment.TAG);
photoListFragment.fetchPhotoList();
}
//***************************************
// PhotoAddToGalleryFragmentListener methods
//***************************************
@Override
public List<GalleryResource> getGalleryList() {
return this.galleries;
}
@Override
public void onPhotoAddToGalleryComplete() {
getFragmentManager().popBackStack();
}
//***************************************
// GalleryListFragmentListener methods
//***************************************
@Override
public void onDownloadGalleriesComplete(List<GalleryResource> galleries) {
this.galleries = galleries;
}
@Override
public void onGallerySelected(int position) {
GalleryResource gallery = this.galleries.get(position);
String url = gallery.getLink(GalleryResource.REL_ITEMS).getHref();
GalleryPhotoListFragment galleryPhotoListFragment = GalleryPhotoListFragment.newInstance(url);
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction transaction = fragmentManager.beginTransaction()
.add(R.id.container, galleryPhotoListFragment)
.addToBackStack(null);
transaction.commit();
}
@Override
public GalleryResource getGalleryByPosition(int position) {
return this.galleries.get(position);
}
@Override
public void onDeleteGalleryByPosition(int position) {
this.galleries.remove(position);
}
//***************************************
// GalleryAddFragmentListener methods
//***************************************
public void onGalleryAddComplete() {
FragmentManager fragmentManager = getFragmentManager();
fragmentManager.popBackStack();
GalleryListFragment galleryListFragment =
(GalleryListFragment) fragmentManager.findFragmentByTag(GalleryListFragment.TAG);
galleryListFragment.fetchGalleryList();
}
//***************************************
// GalleryPhotoListFragmentListener methods
//***************************************
@Override
public void onDownloadGalleryPhotosComplete(List<ItemResource> photos) {
this.photos = photos;
}
@Override
public void onGalleryPhotoSelected(int position) {
PhotoDetailFragment photoDetailFragment = PhotoDetailFragment.newInstance(position);
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction transaction = fragmentManager.beginTransaction()
.add(R.id.container, photoDetailFragment)
.addToBackStack(null);
transaction.commit();
}
@Override
public ItemResource getGalleryPhotoByPosition(int position) {
return this.photos.get(position);
}
@Override
public void onRemovePhotoFromGalleryByPosition(int position) {
this.photos.remove(position);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.formatter.blocks;
import com.intellij.formatting.*;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.tree.LeafPsiElement;
import com.intellij.psi.templateLanguages.OuterLanguageElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.GroovyLanguage;
import org.jetbrains.plugins.groovy.formatter.AlignmentProvider;
import org.jetbrains.plugins.groovy.formatter.FormattingContext;
import org.jetbrains.plugins.groovy.formatter.processors.GroovyIndentProcessor;
import org.jetbrains.plugins.groovy.formatter.processors.GroovyWrappingProcessor;
import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes;
import org.jetbrains.plugins.groovy.lang.lexer.TokenSets;
import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
import org.jetbrains.plugins.groovy.lang.parser.GroovyParserDefinition;
import org.jetbrains.plugins.groovy.lang.psi.GrQualifiedReference;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
import org.jetbrains.plugins.groovy.lang.psi.GroovyFileBase;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.GrListOrMap;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.GrThrowsClause;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrLabeledStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrStatement;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariableDeclaration;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrArgumentLabel;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrArgumentList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrNamedArgument;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrCodeBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.clauses.GrTraditionalForClause;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.literals.GrLiteral;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.literals.GrString;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.path.GrMethodCallExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameterList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrExtendsClause;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinitionBody;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.util.GrStringUtil;
import org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
/**
* Utility class to generate myBlock hierarchy
*
* @author ilyas
*/
public class GroovyBlockGenerator {
private static final TokenSet NESTED = TokenSet.create(
GroovyElementTypes.REFERENCE_EXPRESSION,
GroovyElementTypes.PATH_INDEX_PROPERTY,
GroovyElementTypes.PATH_METHOD_CALL,
GroovyElementTypes.PATH_PROPERTY_REFERENCE
);
private static final Logger LOG = Logger.getInstance(GroovyBlockGenerator.class);
private final GroovyBlock myBlock;
private final ASTNode myNode;
private final AlignmentProvider myAlignmentProvider;
private final GroovyWrappingProcessor myWrappingProcessor;
private final FormattingContext myContext;
public GroovyBlockGenerator(GroovyBlock block) {
myBlock = block;
myNode = myBlock.getNode();
myContext = block.getContext();
myAlignmentProvider = myContext.getAlignmentProvider();
myWrappingProcessor = new GroovyWrappingProcessor(myBlock);
}
static List<ASTNode> getClosureBodyVisibleChildren(final ASTNode node) {
List<ASTNode> children = visibleChildren(node);
if (!children.isEmpty()) {
ASTNode first = children.get(0);
if (first.getElementType() == GroovyTokenTypes.mLCURLY) children.remove(0);
}
if (!children.isEmpty()) {
ASTNode last = children.get(children.size() - 1);
if (last.getElementType() == GroovyTokenTypes.mRCURLY) children.remove(children.size() - 1);
}
return children;
}
public List<Block> generateSubBlocks() {
//For binary expressions
PsiElement blockPsi = myNode.getPsi();
IElementType elementType = myNode.getElementType();
if (blockPsi instanceof GrBinaryExpression && !(blockPsi.getParent() instanceof GrBinaryExpression)) {
return generateForBinaryExpr();
}
//For multiline strings
if ((elementType == GroovyTokenTypes.mSTRING_LITERAL || elementType == GroovyTokenTypes.mGSTRING_LITERAL) && myBlock.getTextRange().equals(myNode.getTextRange())) {
String text = myNode.getText();
if (text.length() > 6) {
if (text.substring(0, 3).equals("'''") && text.substring(text.length() - 3).equals("'''") ||
text.substring(0, 3).equals("\"\"\"") & text.substring(text.length() - 3).equals("\"\"\"")) {
return generateForMultiLineString();
}
}
}
//for gstrings
if (elementType == GroovyElementTypes.GSTRING || elementType == GroovyElementTypes.REGEX || elementType ==
GroovyTokenTypes.mREGEX_LITERAL || elementType ==
GroovyTokenTypes.mDOLLAR_SLASH_REGEX_LITERAL) {
final FormattingContext context =
myNode.getPsi() instanceof GrString && ((GrString)myNode.getPsi()).isPlainString() ? myContext.createContext(true) : myContext;
final ArrayList<Block> subBlocks = new ArrayList<>();
ASTNode[] children = getGroovyChildren(myNode);
for (ASTNode childNode : children) {
if (childNode.getTextRange().getLength() > 0) {
subBlocks.add(new GroovyBlock(childNode, getIndent(childNode), Wrap.createWrap(WrapType.NONE, false), context));
}
}
return subBlocks;
}
// chained properties, calls, indexing, etc
if (NESTED.contains(elementType) && blockPsi.getParent() != null && !NESTED.contains(blockPsi.getParent().getNode().getElementType())) {
final List<Block> subBlocks = new ArrayList<>();
AlignmentProvider.Aligner dotsAligner = myContext.getSettings().ALIGN_MULTILINE_CHAINED_METHODS ? myAlignmentProvider.createAligner(false) : null;
final Wrap wrap = myWrappingProcessor.getChainedMethodCallWrap();
addNestedChildren(myNode.getPsi(), subBlocks, dotsAligner, true, wrap);
return subBlocks;
}
if (blockPsi instanceof GrListOrMap && ((GrListOrMap)blockPsi).isMap() && myContext.getGroovySettings().ALIGN_NAMED_ARGS_IN_MAP) {
AlignmentProvider.Aligner labels = myAlignmentProvider.createAligner(false);
AlignmentProvider.Aligner exprs = myAlignmentProvider.createAligner(true);
GrNamedArgument[] namedArgs = ((GrListOrMap)blockPsi).getNamedArguments();
for (GrNamedArgument arg : namedArgs) {
GrArgumentLabel label = arg.getLabel();
if (label != null) labels.append(label);
PsiElement colon = arg.getColon();
if (colon == null) colon = arg.getExpression();
if (colon != null) exprs.append(colon);
}
}
// For Parameter lists
if (isListLikeClause(blockPsi)) {
final ArrayList<Block> subBlocks = new ArrayList<>();
List<ASTNode> astNodes = visibleChildren(myNode);
if (mustAlign(blockPsi, astNodes)) {
final AlignmentProvider.Aligner aligner = myAlignmentProvider.createAligner(false);
for (ASTNode node : astNodes) {
if (!isKeyword(node)) aligner.append(node.getPsi());
}
}
for (ASTNode childNode : astNodes) {
subBlocks.add(new GroovyBlock(childNode, getIndent(childNode), getChildWrap(childNode), myContext));
}
return subBlocks;
}
boolean classLevel = blockPsi instanceof GrTypeDefinitionBody;
if (blockPsi instanceof GrClosableBlock &&
((GrClosableBlock)blockPsi).getArrow() != null &&
((GrClosableBlock)blockPsi).getParameters().length > 0 &&
!getClosureBodyVisibleChildren(myNode).isEmpty()) {
GrClosableBlock closableBlock = (GrClosableBlock)blockPsi;
ArrayList<Block> blocks = new ArrayList<>();
PsiElement lbrace = closableBlock.getLBrace();
if (lbrace != null) {
ASTNode node = lbrace.getNode();
blocks.add(new GroovyBlock(node, getIndent(node), Wrap.createWrap(WrapType.NONE, false), myContext));
}
/* {
Indent indent = GroovyIndentProcessor.getChildIndent(myBlock, parameterListNode);
GroovyBlock block = new GroovyBlock(parameterListNode, indent, myWrap, mySettings, myGroovySettings, myAlignmentProvider);
blocks.add(block);
}
{
PsiElement arrow = closableBlock.getArrow();
ASTNode node = arrow.getNode();
Indent indent = GroovyIndentProcessor.getChildIndent(myBlock, node);
GroovyBlock block = new GroovyBlock(node, indent, myWrap, mySettings, myGroovySettings, myAlignmentProvider);
blocks.add(block);
}*/
{
Indent indent = Indent.getNormalIndent();
ASTNode parameterListNode = closableBlock.getParameterList().getNode();
ClosureBodyBlock bodyBlock = new ClosureBodyBlock(parameterListNode, indent, Wrap.createWrap(WrapType.NONE, false), myContext);
blocks.add(bodyBlock);
}
PsiElement rbrace = closableBlock.getRBrace();
if (rbrace != null) {
ASTNode node = rbrace.getNode();
blocks.add(new GroovyBlock(node, getIndent(node), Wrap.createWrap(WrapType.NONE, false), myContext));
}
return blocks;
}
if (blockPsi instanceof GrCodeBlock || blockPsi instanceof GroovyFile || classLevel) {
return generateSubBlockForCodeBlocks(classLevel, visibleChildren(myNode), myContext.getGroovySettings().INDENT_LABEL_BLOCKS);
}
if (blockPsi instanceof GrMethod) {
final ArrayList<Block> subBlocks = new ArrayList<>();
for (ASTNode childNode : getGroovyChildren(myNode)) {
if (childNode.getElementType() == GroovyTokenTypes.mLPAREN) continue;
if (childNode.getElementType() == GroovyTokenTypes.mRPAREN) continue;
if (childNode.getElementType() == GroovyElementTypes.PARAMETERS_LIST) {
subBlocks.add(new ParameterListBlock(((GrMethod)blockPsi), Indent.getNoneIndent(), Wrap.createWrap(WrapType.NONE, false), myContext));
}
else if (canBeCorrectBlock(childNode)) {
subBlocks.add(new GroovyBlock(childNode, getIndent(childNode), getChildWrap(childNode), myContext));
}
}
return subBlocks;
}
else if (blockPsi instanceof GrTraditionalForClause) {
if (myContext.getSettings().ALIGN_MULTILINE_FOR) {
final GrTraditionalForClause clause = (GrTraditionalForClause)blockPsi;
final AlignmentProvider.Aligner parenthesesAligner = myAlignmentProvider.createAligner(false);
parenthesesAligner.append(clause.getInitialization());
parenthesesAligner.append(clause.getCondition());
parenthesesAligner.append(clause.getUpdate());
}
}
else if (blockPsi instanceof GrBinaryExpression) {
if (myContext.getSettings().ALIGN_MULTILINE_BINARY_OPERATION) {
final GrBinaryExpression binary = (GrBinaryExpression)blockPsi;
final GrExpression left = binary.getLeftOperand();
final GrExpression right = binary.getRightOperand();
if (left != null && right != null) {
myAlignmentProvider.addPair(left, right, false);
}
}
}
else if (blockPsi instanceof GrAssignmentExpression) {
if (myContext.getSettings().ALIGN_MULTILINE_ASSIGNMENT) {
final GrAssignmentExpression assignment = (GrAssignmentExpression)blockPsi;
final GrExpression lValue = assignment.getLValue();
final GrExpression rValue = assignment.getRValue();
if (lValue != null && rValue != null) {
myAlignmentProvider.addPair(lValue, rValue, false);
}
}
}
else if (blockPsi instanceof GrConditionalExpression) {
if (myContext.getSettings().ALIGN_MULTILINE_TERNARY_OPERATION) {
final GrConditionalExpression conditional = (GrConditionalExpression)blockPsi;
final AlignmentProvider.Aligner exprAligner = myAlignmentProvider.createAligner(false);
exprAligner.append(conditional.getCondition());
if (!(conditional instanceof GrElvisExpression)) {
exprAligner.append(conditional.getThenBranch());
}
exprAligner.append(conditional.getElseBranch());
ASTNode question = conditional.getNode().findChildByType(GroovyTokenTypes.mQUESTION);
ASTNode colon = conditional.getNode().findChildByType(GroovyTokenTypes.mCOLON);
if (question != null && colon != null) {
AlignmentProvider.Aligner questionColonAligner = myAlignmentProvider.createAligner(false);
questionColonAligner.append(question.getPsi());
questionColonAligner.append(colon.getPsi());
}
}
}
// For other cases
final ArrayList<Block> subBlocks = new ArrayList<>();
for (ASTNode childNode : visibleChildren(myNode)) {
subBlocks.add(new GroovyBlock(childNode, getIndent(childNode), getChildWrap(childNode), myContext));
}
return subBlocks;
}
private Wrap getChildWrap(ASTNode childNode) {
return myWrappingProcessor.getChildWrap(childNode);
}
@NotNull
public List<Block> generateSubBlockForCodeBlocks(boolean classLevel, final List<ASTNode> children, boolean indentLabelBlocks) {
final ArrayList<Block> subBlocks = new ArrayList<>();
if (indentLabelBlocks && isCodeBlock()) {
List<ASTNode> flattenChildren = flattenChildren(children);
calculateAlignments(flattenChildren, classLevel);
for (int i = 0; i < flattenChildren.size(); i++) {
ASTNode childNode = flattenChildren.get(i);
if (childNode.getElementType() == GroovyElementTypes.LABELED_STATEMENT) {
int start = i;
do {
i++;
}
while (i < flattenChildren.size() &&
flattenChildren.get(i).getElementType() != GroovyElementTypes.LABELED_STATEMENT &&
flattenChildren.get(i).getElementType() != GroovyTokenTypes.mRCURLY);
subBlocks.add(new GrLabelBlock(childNode, flattenChildren.subList(start + 1, i), classLevel, getIndent(childNode), getChildWrap(childNode), myContext));
i--;
}
else {
subBlocks.add(new GroovyBlock(childNode, getIndent(childNode), getChildWrap(childNode), myContext));
}
}
}
else {
calculateAlignments(children, classLevel);
for (ASTNode childNode : children) {
subBlocks.add(new GroovyBlock(childNode, getIndent(childNode), getChildWrap(childNode), myContext));
}
}
return subBlocks;
}
private boolean isCodeBlock() {
IElementType type = myNode.getElementType();
return type == GroovyElementTypes.OPEN_BLOCK ||
type == GroovyElementTypes.CLOSABLE_BLOCK ||
type == GroovyElementTypes.CONSTRUCTOR_BODY ||
type == GroovyParserDefinition.GROOVY_FILE;
}
private static List<ASTNode> flattenChildren(List<ASTNode> children) {
ArrayList<ASTNode> result = ContainerUtil.newArrayList();
for (ASTNode child : children) {
processNodeFlattening(result, child);
}
return result;
}
private static void processNodeFlattening(ArrayList<ASTNode> result, ASTNode child) {
result.add(child);
if (child.getElementType() == GroovyElementTypes.LABELED_STATEMENT) {
for (ASTNode node : visibleChildren(child)) {
processNodeFlattening(result, node);
}
}
}
private Indent getIndent(ASTNode childNode) {
return new GroovyIndentProcessor().getChildIndent(myBlock, childNode);
}
private void calculateAlignments(List<ASTNode> children, boolean classLevel) {
List<GrStatement> currentGroup = null;
boolean spock = true;
for (ASTNode child : children) {
PsiElement psi = child.getPsi();
if (psi instanceof GrLabeledStatement) {
alignGroup(currentGroup, spock, classLevel);
currentGroup = ContainerUtil.newArrayList();
spock = true;
}
else if (currentGroup != null && spock && isTablePart(psi)) {
currentGroup.add((GrStatement)psi);
}
else if (psi instanceof GrVariableDeclaration) {
GrVariable[] variables = ((GrVariableDeclaration)psi).getVariables();
if (variables.length > 0) {
if (!classLevel || currentGroup == null || fieldGroupEnded(psi) || spock) {
alignGroup(currentGroup, spock, classLevel);
currentGroup = ContainerUtil.newArrayList();
spock = false;
}
currentGroup.add((GrStatement)psi);
}
}
else {
if (shouldSkip(classLevel, psi)) continue;
alignGroup(currentGroup, spock, classLevel);
currentGroup = null;
}
}
if (currentGroup != null) {
alignGroup(currentGroup, spock, classLevel);
}
}
private boolean shouldSkip(boolean classLevel, PsiElement psi) {
if (psi instanceof PsiComment) {
PsiElement prev = psi.getPrevSibling();
if (prev != null) {
if (!classLevel || !PsiUtil.isNewLine(prev) || !fieldGroupEnded(psi)) {
return true;
}
}
}
if (psi.getParent() instanceof GrLabeledStatement) {
if (psi instanceof GrLiteral && GrStringUtil.isStringLiteral((GrLiteral)psi) //skip string comments at the beginning of spock table
|| !(psi instanceof GrStatement)) {
return true;
}
}
return false;
}
private void alignGroup(@Nullable List<GrStatement> group, boolean spock, boolean classLevel) {
if (group == null) {
return;
}
if (spock) {
alignSpockTable(group);
} else {
alignVariableDeclarations(group, classLevel);
}
}
private void alignVariableDeclarations(List<GrStatement> group, boolean classLevel) {
AlignmentProvider.Aligner typeElement = myAlignmentProvider.createAligner(true);
AlignmentProvider.Aligner varName = myAlignmentProvider.createAligner(true);
AlignmentProvider.Aligner eq = myAlignmentProvider.createAligner(true);
for (GrStatement statement : group) {
GrVariableDeclaration varDeclaration = (GrVariableDeclaration) statement;
GrVariable[] variables = varDeclaration.getVariables();
for (GrVariable variable : variables) {
varName.append(variable.getNameIdentifierGroovy());
}
if (classLevel && myContext.getSettings().ALIGN_GROUP_FIELD_DECLARATIONS) {
typeElement.append(varDeclaration.getTypeElementGroovy());
ASTNode current_eq = variables[variables.length - 1].getNode().findChildByType(GroovyTokenTypes.mASSIGN);
if (current_eq != null) {
eq.append(current_eq.getPsi());
}
}
}
}
private void alignSpockTable(List<GrStatement> group) {
if (group.size() < 2) {
return;
}
GrStatement inner = group.get(0);
boolean embedded = inner != null && isTablePart(inner);
GrStatement first = embedded ? inner : group.get(1);
List<AlignmentProvider.Aligner> alignments = ContainerUtil
.map2List(getSpockTable(first), leaf -> myAlignmentProvider.createAligner(leaf, true, Alignment.Anchor.RIGHT));
int second = embedded ? 1 : 2;
for (int i = second; i < group.size(); i++) {
List<LeafPsiElement> table = getSpockTable(group.get(i));
for (int j = 0; j < Math.min(table.size(), alignments.size()); j++) {
alignments.get(j).append(table.get(j));
}
}
}
private boolean fieldGroupEnded(PsiElement psi) {
if (!myContext.getSettings().ALIGN_GROUP_FIELD_DECLARATIONS) return true;
PsiElement prevSibling = psi.getPrevSibling();
return prevSibling != null && StringUtil.countChars(prevSibling.getText(), '\n') >= myContext.getSettings().KEEP_BLANK_LINES_IN_DECLARATIONS;
}
private static List<LeafPsiElement> getSpockTable(GrStatement statement) {
LinkedList<LeafPsiElement> result = new LinkedList<>();
while (isTablePart(statement)) {
result.addFirst((LeafPsiElement)((GrBinaryExpression)statement).getOperationToken());
statement = ((GrBinaryExpression)statement).getLeftOperand();
}
return result;
}
private static boolean isTablePart(PsiElement psi) {
return psi instanceof GrBinaryExpression && (GroovyTokenTypes.mBOR == ((GrBinaryExpression)psi).getOperationTokenType() || GroovyTokenTypes.mLOR == ((GrBinaryExpression)psi).getOperationTokenType());
}
public static List<ASTNode> visibleChildren(ASTNode node) {
ArrayList<ASTNode> list = new ArrayList<>();
for (ASTNode astNode : getGroovyChildren(node)) {
if (canBeCorrectBlock(astNode)) {
list.add(astNode);
}
}
return list;
}
private boolean mustAlign(PsiElement blockPsi, List<ASTNode> children) {
// We don't want to align single call argument if it's a closure. The reason is that it looks better to have call like
//
// foo({
// println 'xxx'
// })
//
// than
//
// foo({
// println 'xxx'
// })
if (blockPsi instanceof GrArgumentList && myContext.getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS) {
return !(children.size() == 3 &&
children.get(0).getElementType() == GroovyTokenTypes.mLPAREN &&
(children.get(1).getElementType() == GroovyElementTypes.CLOSABLE_BLOCK || children.get(1).getElementType() ==
GroovyElementTypes.LIST_OR_MAP) &&
children.get(2).getElementType() == GroovyTokenTypes.mRPAREN);
}
if (blockPsi instanceof GrAssignmentExpression && ((GrAssignmentExpression)blockPsi).getRValue() instanceof GrAssignmentExpression) {
return myContext.getSettings().ALIGN_MULTILINE_ASSIGNMENT;
}
return blockPsi instanceof GrParameterList && myContext.getSettings().ALIGN_MULTILINE_PARAMETERS ||
blockPsi instanceof GrExtendsClause && myContext.getSettings().ALIGN_MULTILINE_EXTENDS_LIST ||
blockPsi instanceof GrThrowsClause && myContext.getSettings().ALIGN_MULTILINE_THROWS_LIST ||
blockPsi instanceof GrListOrMap && myContext.getGroovySettings().ALIGN_MULTILINE_LIST_OR_MAP;
}
private static boolean isListLikeClause(PsiElement blockPsi) {
return blockPsi instanceof GrParameterList ||
blockPsi instanceof GrArgumentList ||
blockPsi instanceof GrAssignmentExpression ||
blockPsi instanceof GrExtendsClause ||
blockPsi instanceof GrThrowsClause ||
blockPsi instanceof GrListOrMap;
}
private static boolean isKeyword(ASTNode node) {
if (node == null) return false;
return TokenSets.KEYWORDS.contains(node.getElementType()) ||
TokenSets.BRACES.contains(node.getElementType()) && !PlatformPatterns.psiElement().withText(")").withParent(GrArgumentList.class).afterLeaf(",").accepts(node.getPsi());
}
private List<Block> generateForMultiLineString() {
final ArrayList<Block> subBlocks = new ArrayList<>();
final int start = myNode.getTextRange().getStartOffset();
final int end = myNode.getTextRange().getEndOffset();
subBlocks.add(new GroovyBlockWithRange(myNode, Indent.getNoneIndent(), new TextRange(start, start + 3), Wrap.createWrap(WrapType.NONE, false), myContext));
subBlocks.add(new GroovyBlockWithRange(myNode, Indent.getAbsoluteNoneIndent(), new TextRange(start + 3, end - 3), Wrap.createWrap(WrapType.NONE, false), myContext));
subBlocks.add(new GroovyBlockWithRange(myNode, Indent.getAbsoluteNoneIndent(), new TextRange(end - 3, end), Wrap.createWrap(WrapType.NONE, false), myContext));
return subBlocks;
}
/**
* @param node Tree node
* @return true, if the current node can be myBlock node, else otherwise
*/
private static boolean canBeCorrectBlock(final ASTNode node) {
return !node.getText().trim().isEmpty();
}
private static ASTNode[] getGroovyChildren(final ASTNode node) {
PsiElement psi = node.getPsi();
if (psi instanceof OuterLanguageElement) {
TextRange range = node.getTextRange();
ArrayList<ASTNode> childList = new ArrayList<>();
PsiFile groovyFile = psi.getContainingFile().getViewProvider().getPsi(GroovyLanguage.INSTANCE);
if (groovyFile instanceof GroovyFileBase) {
addChildNodes(groovyFile, childList, range);
}
return childList.toArray(new ASTNode[childList.size()]);
}
return node.getChildren(null);
}
private static void addChildNodes(PsiElement elem, ArrayList<ASTNode> childNodes, TextRange range) {
ASTNode node = elem.getNode();
if (range.contains(elem.getTextRange()) && node != null) {
childNodes.add(node);
} else {
for (PsiElement child : elem.getChildren()) {
addChildNodes(child, childNodes, range);
}
}
}
/**
* Generates blocks for binary expressions
*
* @return
*/
private List<Block> generateForBinaryExpr() {
final ArrayList<Block> subBlocks = new ArrayList<>();
AlignmentProvider.Aligner
alignment = myContext.getSettings().ALIGN_MULTILINE_BINARY_OPERATION ? myAlignmentProvider.createAligner(false) : null;
GrBinaryExpression binary = (GrBinaryExpression)myNode.getPsi();
LOG.assertTrue(binary != null);
addBinaryChildrenRecursively(binary, subBlocks, Indent.getContinuationWithoutFirstIndent(), alignment);
return subBlocks;
}
/**
* Adds all children of specified element to given list
*
* @param elem
* @param list
* @param indent
* @param aligner
*/
private void addBinaryChildrenRecursively(PsiElement elem, List<Block> list, Indent indent, @Nullable AlignmentProvider.Aligner aligner) {
if (elem == null) return;
// For binary expressions
if ((elem instanceof GrBinaryExpression)) {
GrBinaryExpression myExpr = ((GrBinaryExpression) elem);
if (myExpr.getLeftOperand() instanceof GrBinaryExpression) {
addBinaryChildrenRecursively(myExpr.getLeftOperand(), list, Indent.getContinuationWithoutFirstIndent(), aligner);
}
PsiElement op = ((GrBinaryExpression)elem).getOperationToken();
for (ASTNode childNode : visibleChildren(elem.getNode())) {
PsiElement psi = childNode.getPsi();
if (!(psi instanceof GrBinaryExpression)) {
if (op != psi && aligner != null) {
aligner.append(psi);
}
list.add(new GroovyBlock(childNode, indent, getChildWrap(childNode), myContext));
}
}
if (myExpr.getRightOperand() instanceof GrBinaryExpression) {
addBinaryChildrenRecursively(myExpr.getRightOperand(), list, Indent.getContinuationWithoutFirstIndent(), aligner
);
}
}
}
private void addNestedChildren(final PsiElement elem,
List<Block> list,
@Nullable AlignmentProvider.Aligner aligner,
final boolean topLevel,
Wrap wrap) {
final List<ASTNode> children = visibleChildren(elem.getNode());
if (elem instanceof GrMethodCallExpression) {
GrExpression invokedExpression = ((GrMethodCallExpression)elem).getInvokedExpression();
if (invokedExpression instanceof GrQualifiedReference) {
final PsiElement nameElement = ((GrQualifiedReference)invokedExpression).getReferenceNameElement();
if (nameElement != null) {
List<ASTNode> grandChildren = visibleChildren(invokedExpression.getNode());
int i = 0;
while (i < grandChildren.size() && nameElement != grandChildren.get(i).getPsi()) i++;
if (i > 0) {
processNestedChildrenPrefix(list, aligner, false, grandChildren, i, wrap);
}
if (i < grandChildren.size()) {
LOG.assertTrue(nameElement == grandChildren.get(i).getPsi());
list.add(new MethodCallWithoutQualifierBlock(nameElement, wrap, topLevel, children, elem, myContext));
}
return;
}
}
}
processNestedChildrenPrefix(list, aligner, topLevel, children, children.size(), wrap);
}
private static boolean isAfterMultiLineClosure(ASTNode dot) {
PsiElement dotPsi = dot.getPsi();
PsiElement prev = PsiUtil.skipWhitespaces(dotPsi.getPrevSibling(), false);
if (prev != null) {
if (prev instanceof GrMethodCall) {
final PsiElement last = prev.getLastChild();
if (last instanceof GrClosableBlock) {
return last.getText().contains("\n");
}
}
}
return false;
}
private void processNestedChildrenPrefix(List<Block> list,
@Nullable AlignmentProvider.Aligner aligner,
boolean topLevel,
List<ASTNode> children,
int limit,
Wrap wrap) {
ASTNode fst = children.get(0);
LOG.assertTrue(limit > 0);
if (NESTED.contains(fst.getElementType())) {
addNestedChildren(fst.getPsi(), list, aligner, false, wrap);
}
else {
Indent indent = Indent.getContinuationWithoutFirstIndent();
list.add(new GroovyBlock(fst, indent, getChildWrap(fst), myContext));
}
addNestedChildrenSuffix(list, aligner, topLevel, children, limit);
}
void addNestedChildrenSuffix(List<Block> list,
@Nullable AlignmentProvider.Aligner aligner,
boolean topLevel,
List<ASTNode> children,
int limit) {
for (int i = 1; i < limit; i++) {
ASTNode childNode = children.get(i);
if (canBeCorrectBlock(childNode)) {
IElementType type = childNode.getElementType();
Indent indent = topLevel || NESTED.contains(type) || type == GroovyTokenTypes.mIDENT || TokenSets.DOTS.contains(type) && !isAfterMultiLineClosure(
childNode) ?
Indent.getContinuationWithoutFirstIndent() :
Indent.getNoneIndent();
if (aligner != null && TokenSets.DOTS.contains(type)) {
aligner.append(childNode.getPsi());
}
list.add(new GroovyBlock(childNode, indent, getChildWrap(childNode), myContext));
}
}
}
}
| |
/*
* Copyright (c) 2017, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.scmt.desk;
import static com.salesforce.scmt.utils.DeskJsonMapUtil.deskCaseToSalesforceJsonMap;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.desk.java.apiclient.model.ApiResponse;
import com.desk.java.apiclient.model.Case;
import com.desk.java.apiclient.model.CaseStatus;
import com.desk.java.apiclient.model.SortDirection;
import com.desk.java.apiclient.service.CaseService;
import com.salesforce.scmt.rabbitmq.RabbitConfiguration;
import com.salesforce.scmt.utils.DeskUtil;
import com.salesforce.scmt.utils.JsonUtil;
import com.salesforce.scmt.utils.RabbitUtil;
import com.salesforce.scmt.utils.SalesforceConstants;
import com.salesforce.scmt.utils.SalesforceConstants.CaseFields;
import com.salesforce.scmt.utils.SalesforceConstants.DeskMigrationFields;
import com.salesforce.scmt.utils.Utils;
import com.sforce.async.OperationEnum;
import retrofit.Response;
public class DeskCaseMigration<D extends Serializable> extends DeskBase<D>
{
public DeskCaseMigration(DeskUtil du, Map<String, String> config) {
super(du, config);
// TODO Auto-generated constructor stub
}
private static final int DESK_PAGE_SIZE_CASE = 100; // API doc report this as 500, but the max size is really 100
private List<Long> attachmentIdList = new ArrayList<>();
@Override
protected DeskBaseResponse<ApiResponse<D>> callDesk(DeskUtil du)
{
// get a service
CaseService service = du.getDeskClient().cases();
Response<ApiResponse<Case>> resp = null;
try
{
if (!delta)
{
resp = service
.searchCasesById(lastRecordId, DESK_PAGE_SIZE_CASE, page, "id", SortDirection.ASC, null, null)
.execute();
}
else
{
resp = service.searchCasesByUpdatedDate(updatedAt, DESK_PAGE_SIZE_CASE, page, "updated_at",
SortDirection.ASC, null, null).execute();
}
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
DeskBaseResponse<ApiResponse<D>> d = new DeskBaseResponse<>();
d.errorCode = resp.code();
d.setIsSuccess(resp.isSuccess());
d.body = (ApiResponse<D>) resp.body();
d.setHeaders(resp.headers());
d.setMessage(resp.message());
return d;
}
@Override
protected long getId(D d)
{
return ((Case) d).getId();
}
@Override
protected long getUpdatedAt(D d)
{
return (long) (((Case) d).getUpdatedAt().getTime() / 1000);
}
@SuppressWarnings("unchecked")
@Override
protected Map<String, Object> objectSpecificProcessing(Map<String, Object> clientSettings,
DeskBaseResponse<ApiResponse<D>> dResp) throws Exception
{
// build list of case id's
for (Case c : ((ApiResponse<Case>) dResp.body).getEntriesAsList())
{
// check if this case has attachments
if (c.getActiveAttachmentsCount() > 0)
{
// add the case id so I can retrieve those on a separate process
attachmentIdList.add(c.getId());
}
}
Utils.log(String.format("Request Count: [%d], Case List Size: [%d], Attachment Id List Size: [%d]",
requestCount, recList.size(), attachmentIdList.size()));
// check if there are enough attachment id's to send to queue
if (attachmentIdList.size() >= SalesforceConstants.API_MAX_SIZE)
{
// add the case id's with attachments to the feed settings
clientSettings.put("case_ids_with_attachments",
attachmentIdList.subList(0, SalesforceConstants.API_MAX_SIZE));
// push the list of case id's to the rabbit mq to retrieve the feed
// for these cases
RabbitUtil.publishToQueue(RabbitConfiguration.QUEUE_DESK_FEED_MIGRATION,
RabbitConfiguration.EXCHANGE_FORMULA1, JsonUtil.toJson(clientSettings).getBytes());
// clear the items sent to the queue
attachmentIdList.subList(0, SalesforceConstants.API_MAX_SIZE).clear();
}
return clientSettings;
}
@Override
protected String createJob(DeskUtil du) throws Exception
{
return du.getSalesforceService().createBulkJob(SalesforceConstants.OBJ_CASE, CaseFields.DeskId,
OperationEnum.upsert);
}
@Override
protected Map<String, String> objectSpecificBulkProcessing(Map<String, String> config) throws Exception
{
// set the job to migrate cases and set the start_id
config.put("migrateCases", "true");
// config.put("updated_at", (delta ?
// String.valueOf(recList.get(recList.size() -
// 1).getUpdatedAt().getTime()) : ""));
config.put("start_id", String.valueOf(getId(recList.get(recList.size() - 1)) + 1));
RabbitUtil.publishToQueue(RabbitConfiguration.QUEUE_DESK_DATA_MIGRATION, RabbitConfiguration.EXCHANGE_TRACTOR,
JsonUtil.toJson(config).getBytes());
return config;
}
@Override
protected void objectSpecificBulkComplete(DeskUtil du) throws Exception
{
System.out.println("AttachmentIdListSize: " + attachmentIdList.size());
while (!attachmentIdList.isEmpty())
{
// find the upper list index
int iMax = (attachmentIdList.size() > SalesforceConstants.API_MAX_SIZE ? SalesforceConstants.API_MAX_SIZE
: attachmentIdList.size());
// add the case id's with attachments to the feed settings
Map<String, Object> clientSettings = du.getDeskService().getClientSettings();
clientSettings.put("case_ids_with_attachments", attachmentIdList.subList(0, iMax));
// push the list of case id's to the rabbit mq to retrieve the feed
// for these cases
RabbitUtil.publishToQueue(RabbitConfiguration.QUEUE_DESK_FEED_MIGRATION,
RabbitConfiguration.EXCHANGE_FORMULA1, JsonUtil.toJson(clientSettings).getBytes());
// clear the items sent to the queue
attachmentIdList.subList(0, iMax).clear();
}
du.updateMigrationStatus(DeskMigrationFields.StatusComplete, "Cases", dr);
}
@Override
protected void objectSpecificBulkCleanup(DeskUtil du) throws Exception
{
// create new job
this.jobId = du.getSalesforceService().createBulkJob(SalesforceConstants.OBJ_CASE, CaseFields.DeskId,
OperationEnum.upsert);
// get the upper boundary for the created/updated timestamp
Case record = (Case) recList.get(recList.size() - 1);
int lastTimestamp = (int) ((delta ? record.getUpdatedAt().getTime() : record.getCreatedAt().getTime()) / 1000);
// close the bulk job
du.getSalesforceService().closeBulkJob(jobId, du.getDeskService().getMigrationId());
// log the upper timestamp boundary
dr.addError(String.format("Migrated all records created/updated before: [%d]", lastTimestamp));
}
@Override
protected List<Map<String, Object>> deskObjectToSalesforceObject(DeskUtil du, D d) throws Exception
{
ArrayList<Map<String, Object>> a = new ArrayList<Map<String, Object>>();
try
{
a.add(deskCaseToSalesforceJsonMap(du, (Case) d, config));
}
catch(Exception e)
{
dr.incrementErrorCount(1);
dr.addError(e.toString());
}
return a;
}
@Override
protected boolean skipObject(D d) {
return ((Case) d).getStatus() == CaseStatus.DELETED;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.xml.ws.spi;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.security.PrivilegedAction;
import java.util.Properties;
import org.apache.geronimo.osgi.locator.ProviderLocator;
/**
* This code is designed to implement the pluggability
* feature and is designed to both compile and run on JDK version 1.1 and
* later. The code also runs both as part of an unbundled jar file and
* when bundled as part of the JDK.
* <p/>
* This class is duplicated for each subpackage so keep it in sync.
* It is package private and therefore is not exposed as part of the JAXWS
* API.
*/
class FactoryFinder {
/**
* Set to true for debugging.
*/
private static final boolean debug = false;
private static void debugPrintln(String msg) {
if (debug) {
System.err.println("Factory Finder:" + msg);
}
}
/**
* Figure out which ClassLoader to use. For JDK 1.2 and later use
* the context ClassLoader.
*
* @return the <code>ClassLoader</code>
* @throws ConfigurationError if this class is unable to work with the
* host JDK
*/
private static ClassLoader findClassLoader()
throws ConfigurationError {
// REVIEW This doPriv block may be unnecessary because this method is private and
// the caller already has a doPriv. I added the doPriv in case someone changes the
// visibility of this method to non-private.
ClassLoader cl = (ClassLoader)
doPrivileged( new PrivilegedAction() {
public Object run() {
Method m = null;
try {
m = Thread.class.getMethod("getContextClassLoader", (Class []) null);
} catch (NoSuchMethodException e) {
// Assume that we are running JDK 1.1, use the current ClassLoader
debugPrintln("assuming JDK 1.1");
return FactoryFinder.class.getClassLoader();
}
try {
return (ClassLoader) m.invoke(Thread.currentThread(), (Object []) null);
} catch (IllegalAccessException e) {
// assert(false)
throw new ConfigurationError("Unexpected IllegalAccessException",
e);
} catch (InvocationTargetException e) {
// assert(e.getTargetException() instanceof SecurityException)
throw new ConfigurationError("Unexpected InvocationTargetException",
e);
}
}
}
);
return cl;
}
/**
* Create an instance of a class using the specified
* <code>ClassLoader</code>, or if that fails from the
* <code>ClassLoader</code> that loaded this class.
*
* @param className the name of the class to instantiate
* @param classLoader a <code>ClassLoader</code> to load the class from
* @return a new <code>Object</code> that is an instance of the class of
* the given name from the given class loader
* @throws ConfigurationError if the class could not be found or
* instantiated
*/
private static Object newInstance(String className,
ClassLoader classLoader)
throws ConfigurationError {
final ClassLoader iClassLoader = classLoader;
final String iClassName = className;
// REVIEW This doPriv block may be unnecessary because this method is private and
// the caller already has a doPriv. I added the doPriv in case someone changes the
// visibility of this method to non-private.
Object obj =
doPrivileged( new PrivilegedAction() {
public Object run() {
try {
return ProviderLocator.loadClass(iClassName, FactoryFinder.class, iClassLoader).newInstance();
} catch (ClassNotFoundException x) {
throw new ConfigurationError(
"Provider " + iClassName + " not found", x);
} catch (Exception x) {
throw new ConfigurationError(
"Provider " + iClassName + " could not be instantiated: " + x,
x);
}
}
});
return obj;
}
/**
* Finds the implementation Class object in the specified order. Main
* entry point.
*
* @param factoryId Name of the factory to find, same as
* a property name
* @param fallbackClassName Implementation class name, if nothing else
* is found. Use null to mean no fallback.
* @return Class object of factory, never null
* @throws FactoryFinder.ConfigurationError
* Package private so this code can be shared.
*/
static Object find(String factoryId, String fallbackClassName)
throws ConfigurationError {
final String iFactoryId = factoryId;
final String iFallbackClassName = fallbackClassName;
Object obj =
doPrivileged( new PrivilegedAction() {
public Object run() {
debugPrintln("debug is on");
ClassLoader classLoader = findClassLoader();
// Use the system property first
try {
String systemProp =
System.getProperty(iFactoryId);
if (systemProp != null) {
debugPrintln("found system property " + systemProp);
return newInstance(systemProp, classLoader);
}
} catch (SecurityException se) {
}
// try to read from $java.home/lib/jaxws.properties
try {
String javah = System.getProperty("java.home");
String configFile = javah + File.separator +
"lib" + File.separator + "jaxws.properties";
File f = new File(configFile);
if (f.exists()) {
Properties props = new Properties();
props.load(new FileInputStream(f));
String factoryClassName = props.getProperty(iFactoryId);
debugPrintln("found java.home property " + factoryClassName);
return newInstance(factoryClassName, classLoader);
}
} catch (Exception ex) {
if (debug) ex.printStackTrace();
}
String serviceId = "META-INF/services/" + iFactoryId;
// try to find services in CLASSPATH
try {
InputStream is = null;
if (classLoader == null) {
is = ClassLoader.getSystemResourceAsStream(serviceId);
} else {
is = classLoader.getResourceAsStream(serviceId);
}
if (is != null) {
debugPrintln("found " + serviceId);
// Read the service provider name in UTF-8 as specified in
// the jar spec. Unfortunately this fails in Microsoft
// VJ++, which does not implement the UTF-8
// encoding. Theoretically, we should simply let it fail in
// that case, since the JVM is obviously broken if it
// doesn't support such a basic standard. But since there
// are still some users attempting to use VJ++ for
// development, we have dropped in a fallback which makes a
// second attempt using the platform's default encoding. In
// VJ++ this is apparently ASCII, which is a subset of
// UTF-8... and since the strings we'll be reading here are
// also primarily limited to the 7-bit ASCII range (at
// least, in English versions), this should work well
// enough to keep us on the air until we're ready to
// officially decommit from VJ++. [Edited comment from
// jkesselm]
BufferedReader rd;
try {
rd = new BufferedReader(new InputStreamReader(is, "UTF-8"));
} catch (java.io.UnsupportedEncodingException e) {
rd = new BufferedReader(new InputStreamReader(is));
}
String factoryClassName = rd.readLine();
rd.close();
if (factoryClassName != null &&
! "".equals(factoryClassName)) {
debugPrintln("loaded from services: " + factoryClassName);
return newInstance(factoryClassName, classLoader);
}
}
} catch (Exception ex) {
if (debug) ex.printStackTrace();
}
if (iFallbackClassName == null) {
throw new ConfigurationError(
"Provider for " + iFactoryId + " cannot be found", null);
}
debugPrintln("loaded from fallback value: " + iFallbackClassName);
return newInstance(iFallbackClassName, classLoader);
}
});
return obj;
}
private static Object doPrivileged(PrivilegedAction action) {
SecurityManager sm = System.getSecurityManager();
if (sm == null) {
return(action.run());
} else {
return java.security.AccessController.doPrivileged(action);
}
}
static class ConfigurationError extends Error {
// fixme: should this be refactored to use the jdk1.4 exception
// wrapping?
private Exception exception;
/**
* Construct a new instance with the specified detail string and
* exception.
*
* @param msg the Message for this error
* @param x an Exception that caused this failure, or null
*/
ConfigurationError(String msg, Exception x) {
super(msg);
this.exception = x;
}
Exception getException() {
return exception;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.util;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.memory.CarbonUnsafe;
/**
* Util class for byte comparision
*/
public final class ByteUtil {
public static final int SIZEOF_BYTE = 1;
public static final int SIZEOF_SHORT = 2;
public static final int SIZEOF_SHORT_INT = 3;
public static final int SIZEOF_INT = 4;
public static final int SIZEOF_LONG = 8;
public static final int SIZEOF_DOUBLE = 8;
public static final String UTF8_CSN = StandardCharsets.UTF_8.name();
private ByteUtil() {
}
/**
* Compare method for bytes
*
* @param buffer1
* @param buffer2
* @return
*/
public static int compare(byte[] buffer1, byte[] buffer2) {
// Short circuit equal case
if (buffer1 == buffer2) {
return 0;
}
int len1 = buffer1.length;
int len2 = buffer2.length;
int offset1 = 0;
int offset2 = 0;
// Call UnsafeComparer compareTo for comparision.
return ByteUtil.UnsafeComparer.INSTANCE
.compareTo(buffer1, offset1, len1, buffer2, offset2, len2);
}
/**
* covert the long[] to int[]
*
* @param longArray
* @return
*/
public static int[] convertToIntArray(long[] longArray) {
int[] intArray = new int[longArray.length];
for (int i = 0; i < longArray.length; i++) {
intArray[i] = (int) longArray[i];
}
return intArray;
}
/**
* convert number in byte to more readable format
* @param sizeInbyte
* @return
*/
public static String convertByteToReadable(long sizeInbyte) {
String readableSize;
if (sizeInbyte < CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR) {
readableSize = sizeInbyte + " Byte";
} else if (sizeInbyte < CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR *
CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR) {
readableSize = sizeInbyte / CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR + " KB";
} else {
readableSize = sizeInbyte / CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR /
CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR + " MB";
}
return readableSize;
}
/**
* Unsafe comparator
*/
public enum UnsafeComparer {
/**
* instance.
*/
INSTANCE;
/**
* Returns true if x1 is less than x2, when both values are treated as
* unsigned.
*/
static boolean lessThanUnsigned(long x1, long x2) {
return (x1 + Long.MIN_VALUE) < (x2 + Long.MIN_VALUE);
}
/**
* Lexicographically compare two arrays.
*
* @param buffer1 left operand
* @param buffer2 right operand
* @param offset1 Where to start comparing in the left buffer
* @param offset2 Where to start comparing in the right buffer
* @param length1 How much to compare from the left buffer
* @param length2 How much to compare from the right buffer
* @return 0 if equal, < 0 if left is less than right, etc.
*/
public int compareTo(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2,
int length2) {
// Short circuit equal case
if (buffer1 == buffer2 && offset1 == offset2 && length1 == length2) {
return 0;
}
int minLength = Math.min(length1, length2);
int minWords = minLength / SIZEOF_LONG;
int offset1Adj = offset1 + CarbonUnsafe.BYTE_ARRAY_OFFSET;
int offset2Adj = offset2 + CarbonUnsafe.BYTE_ARRAY_OFFSET;
/*
* Compare 8 bytes at a time. Benchmarking shows comparing 8 bytes
* at a time is no slower than comparing 4 bytes at a time even on
* 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
for (int i = 0; i < minWords * SIZEOF_LONG; i += SIZEOF_LONG) {
long lw = CarbonUnsafe.getUnsafe().getLong(buffer1, offset1Adj + (long) i);
long rw = CarbonUnsafe.getUnsafe().getLong(buffer2, offset2Adj + (long) i);
long diff = lw ^ rw;
if (diff != 0) {
if (!CarbonUnsafe.ISLITTLEENDIAN) {
return lessThanUnsigned(lw, rw) ? -1 : 1;
}
// Use binary search
int n = 0;
int y;
int x = (int) diff;
if (x == 0) {
x = (int) (diff >>> 32);
n = 32;
}
y = x << 16;
if (y == 0) {
n += 16;
} else {
x = y;
}
y = x << 8;
if (y == 0) {
n += 8;
}
return (int) (((lw >>> n) & 0xFFL) - ((rw >>> n) & 0xFFL));
}
}
// The epilogue to cover the last (minLength % 8) elements.
for (int i = minWords * SIZEOF_LONG; i < minLength; i++) {
int a = (buffer1[offset1 + i] & 0xff);
int b = (buffer2[offset2 + i] & 0xff);
if (a != b) {
return a - b;
}
}
return length1 - length2;
}
public int compareTo(byte[] buffer1, byte[] buffer2) {
// Short circuit equal case
if (buffer1 == buffer2) {
return 0;
}
int len1 = buffer1.length;
int len2 = buffer2.length;
int minLength = (len1 <= len2) ? len1 : len2;
return compareTo(buffer1, buffer2, len1, len2, minLength);
}
public int compareTo(byte[] buffer1, byte[] buffer2, int len1, int len2, int minLength) {
int minWords = 0;
/*
* Compare 8 bytes at a time. Benchmarking shows comparing 8 bytes
* at a time is no slower than comparing 4 bytes at a time even on
* 32-bit. On the other hand, it is substantially faster on 64-bit.
*/
if (minLength > 7) {
minWords = minLength / SIZEOF_LONG;
for (int i = 0; i < minWords * SIZEOF_LONG; i += SIZEOF_LONG) {
long lw =
CarbonUnsafe.getUnsafe().getLong(buffer1, CarbonUnsafe.BYTE_ARRAY_OFFSET + (long) i);
long rw =
CarbonUnsafe.getUnsafe().getLong(buffer2, CarbonUnsafe.BYTE_ARRAY_OFFSET + (long) i);
long diff = lw ^ rw;
if (diff != 0) {
if (!CarbonUnsafe.ISLITTLEENDIAN) {
return lessThanUnsigned(lw, rw) ? -1 : 1;
}
// Use binary search
int k = 0;
int y;
int x = (int) diff;
if (x == 0) {
x = (int) (diff >>> 32);
k = 32;
}
y = x << 16;
if (y == 0) {
k += 16;
} else {
x = y;
}
y = x << 8;
if (y == 0) {
k += 8;
}
return (int) (((lw >>> k) & 0xFFL) - ((rw >>> k) & 0xFFL));
}
}
}
// The epilogue to cover the last (minLength % 8) elements.
for (int i = minWords * SIZEOF_LONG; i < minLength; i++) {
int a = (buffer1[i] & 0xff);
int b = (buffer2[i] & 0xff);
if (a != b) {
return a - b;
}
}
return len1 - len2;
}
public boolean equals(byte[] buffer1, byte[] buffer2) {
if (buffer1.length != buffer2.length) {
return false;
}
int len = buffer1.length / 8;
long currentOffset = CarbonUnsafe.BYTE_ARRAY_OFFSET;
for (int i = 0; i < len; i++) {
long lw = CarbonUnsafe.getUnsafe().getLong(buffer1, currentOffset);
long rw = CarbonUnsafe.getUnsafe().getLong(buffer2, currentOffset);
if (lw != rw) {
return false;
}
currentOffset += 8;
}
len = buffer1.length % 8;
if (len > 0) {
for (int i = 0; i < len; i += 1) {
long lw = CarbonUnsafe.getUnsafe().getByte(buffer1, currentOffset);
long rw = CarbonUnsafe.getUnsafe().getByte(buffer2, currentOffset);
if (lw != rw) {
return false;
}
currentOffset += 1;
}
}
return true;
}
public boolean equals(byte[] buffer1, int offset1, int length1, byte[] buffer2, int offset2,
int length2) {
if (length1 != length2) {
return false;
}
int len = length1 / 8;
long currentOffset = CarbonUnsafe.BYTE_ARRAY_OFFSET;
for (int i = 0; i < len; i++) {
long lw = CarbonUnsafe.getUnsafe().getLong(buffer1, currentOffset + offset1);
long rw = CarbonUnsafe.getUnsafe().getLong(buffer2, currentOffset + offset2);
if (lw != rw) {
return false;
}
currentOffset += 8;
}
len = buffer1.length % 8;
if (len > 0) {
for (int i = 0; i < len; i += 1) {
long lw = CarbonUnsafe.getUnsafe().getByte(buffer1, currentOffset + offset1);
long rw = CarbonUnsafe.getUnsafe().getByte(buffer2, currentOffset + offset2);
if (lw != rw) {
return false;
}
currentOffset += 1;
}
}
return true;
}
/**
* Comparing the 2 byte buffers. This is used in case of data load sorting step.
*
* @param byteBuffer1
* @param byteBuffer2
* @return
*/
public int compareTo(ByteBuffer byteBuffer1, ByteBuffer byteBuffer2) {
// Short circuit equal case
if (byteBuffer1 == byteBuffer2) {
return 0;
}
int len1 = byteBuffer1.remaining();
int len2 = byteBuffer2.remaining();
byte[] buffer1 = new byte[len1];
byte[] buffer2 = new byte[len2];
byteBuffer1.get(buffer1);
byteBuffer2.get(buffer2);
return compareTo(buffer1, buffer2);
}
}
/**
* Stirng => byte[]
*
* @param s
* @return
*/
public static byte[] toBytes(String s) {
try {
return s.getBytes(UTF8_CSN);
} catch (UnsupportedEncodingException e) {
// should never happen!
throw new IllegalArgumentException("UTF8 decoding is not supported", e);
}
}
/**
* byte[] => String
*
* @param b
* @param off
* @param len
* @return
*/
public static String toString(final byte[] b, int off, int len) {
if (b == null) {
return null;
}
if (len == 0) {
return "";
}
try {
return new String(b, off, len, UTF8_CSN);
} catch (UnsupportedEncodingException e) {
// should never happen!
throw new IllegalArgumentException("UTF8 encoding is not supported", e);
}
}
/**
* boolean => byte[]
*
* @param b
* @return
*/
public static byte[] toBytes(final boolean b) {
return new byte[] { b ? (byte) -1 : (byte) 0 };
}
/**
* byte[] => boolean
*
* @param b
* @return
*/
public static boolean toBoolean(final byte[] b) {
if (b.length != 1) {
throw new IllegalArgumentException("Array has wrong size: " + b.length);
}
return b[0] != (byte) 0;
}
public static boolean toBoolean(final byte b) {
return b != (byte) 0;
}
/**
* short => byte[]
*
* @param val
* @return
*/
public static byte[] toBytes(short val) {
val = (short)(val ^ Short.MIN_VALUE);
byte[] b = new byte[SIZEOF_SHORT];
b[1] = (byte) val;
val >>= 8;
b[0] = (byte) val;
return b;
}
/**
* byte[] => short
*
* @param bytes
* @param offset
* @param length
* @return
*/
public static short toShort(byte[] bytes, int offset, final int length) {
if (length != SIZEOF_SHORT || offset + length > bytes.length) {
throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_SHORT);
}
short n = 0;
if (CarbonUnsafe.getUnsafe() != null) {
if (CarbonUnsafe.ISLITTLEENDIAN) {
n = Short.reverseBytes(
CarbonUnsafe.getUnsafe().getShort(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET));
} else {
n = CarbonUnsafe.getUnsafe().getShort(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET);
}
} else {
n ^= bytes[offset] & 0xFF;
n <<= 8;
n ^= bytes[offset + 1] & 0xFF;
}
return (short)(n ^ Short.MIN_VALUE);
}
/**
* int => byte[]
*
* @param val
* @return
*/
public static byte[] toBytes(int val) {
val = val ^ Integer.MIN_VALUE;
byte[] b = new byte[4];
for (int i = 3; i > 0; i--) {
b[i] = (byte) val;
val >>>= 8;
}
b[0] = (byte) val;
return b;
}
/**
* int => byte[3]
* supported range is [-8388608, 8388607], note that Math.pow(2, 24) == 8388608
*/
public static byte[] to3Bytes(int val) {
assert val <= (Math.pow(2, 23) - 1) && val >= (-Math.pow(2, 23));
return new byte[]{ (byte)(val >> 16), (byte)(val >> 8), (byte)val };
}
/**
* convert 3 bytes to int
*/
public static int valueOf3Bytes(byte[] val, int offset) {
assert val.length >= offset + 3;
if (val[offset] < 0) {
return (((val[offset] & 0xFFFF) << 16) |
((val[offset + 1] & 0xFF) << 8) |
((val[offset + 2] & 0xFF)));
} else {
return (((val[offset] & 0xFF) << 16) |
((val[offset + 1] & 0xFF) << 8) |
((val[offset + 2] & 0xFF)));
}
}
/**
* byte[] => int
*
* @param bytes
* @param offset
* @param length
* @return
*/
public static int toInt(byte[] bytes, int offset, final int length) {
if (length != SIZEOF_INT || offset + length > bytes.length) {
throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_INT);
}
int n = 0;
if (CarbonUnsafe.getUnsafe() != null) {
if (CarbonUnsafe.ISLITTLEENDIAN) {
n = Integer.reverseBytes(
CarbonUnsafe.getUnsafe().getInt(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET));
} else {
n = CarbonUnsafe.getUnsafe().getInt(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET);
}
} else {
for (int i = offset; i < (offset + length); i++) {
n <<= 8;
n ^= bytes[i] & 0xFF;
}
}
return n ^ Integer.MIN_VALUE;
}
public static int toInt(byte[] bytes, int offset) {
return (((int)bytes[offset] & 0xff) << 24) + (((int)bytes[offset + 1] & 0xff) << 16) +
(((int)bytes[offset + 2] & 0xff) << 8) + ((int)bytes[offset + 3] & 0xff);
}
public static int toShort(byte[] bytes, int offset) {
return (((int)bytes[offset] & 0xff) << 8) + ((int)bytes[offset + 1] & 0xff);
}
public static void setInt(byte[] data, int offset, int value) {
data[offset] = (byte) (value >> 24);
data[offset + 1] = (byte) (value >> 16);
data[offset + 2] = (byte) (value >> 8);
data[offset + 3] = (byte) value;
}
public static void setShort(byte[] data, int offset, int value) {
data[offset] = (byte) (value >> 8);
data[offset + 1] = (byte) value;
}
/**
* long => byte[]
*
* @param val
* @return
*/
public static byte[] toBytes(long val) {
val = val ^ Long.MIN_VALUE;
byte[] b = new byte[8];
for (int i = 7; i > 0; i--) {
b[i] = (byte) val;
val >>>= 8;
}
b[0] = (byte) val;
return b;
}
public static byte[] toBytes(double val) {
return toBytes(Double.doubleToLongBits(val));
}
public static double toDouble(byte[] value, int offset, int length) {
return Double.longBitsToDouble(toLong(value, offset, length));
}
/**
* byte[] => long
*/
public static long toLong(byte[] bytes, int offset, final int length) {
if (length != SIZEOF_LONG || offset + length > bytes.length) {
throw explainWrongLengthOrOffset(bytes, offset, length, SIZEOF_LONG);
}
long l = 0;
if (CarbonUnsafe.getUnsafe() != null) {
if (CarbonUnsafe.ISLITTLEENDIAN) {
l = Long.reverseBytes(
CarbonUnsafe.getUnsafe().getLong(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET));
} else {
l = CarbonUnsafe.getUnsafe().getLong(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET);
}
} else {
for (int i = offset; i < offset + length; i++) {
l <<= 8;
l ^= bytes[i] & 0xFF;
}
}
return l ^ Long.MIN_VALUE;
}
private static IllegalArgumentException explainWrongLengthOrOffset(final byte[] bytes,
final int offset, final int length, final int expectedLength) {
String reason;
if (length != expectedLength) {
reason = "Wrong length: " + length + ", expected " + expectedLength;
} else {
reason = "offset (" + offset + ") + length (" + length + ") exceed the"
+ " capacity of the array: " + bytes.length;
}
return new IllegalArgumentException(reason);
}
/**
* Put an int value out to the specified byte array position.
*
* @param bytes the byte array
* @param offset position in the array
* @param val int to write out
* @return incremented offset
* @throws IllegalArgumentException if the byte array given doesn't have
* enough room at the offset specified.
*/
public static int putInt(byte[] bytes, int offset, int val) {
if (bytes.length - offset < SIZEOF_INT) {
throw new IllegalArgumentException(
"Not enough room to put an int at" + " offset " + offset + " in a " + bytes.length
+ " byte array");
}
if (CarbonUnsafe.getUnsafe() != null) {
if (CarbonUnsafe.ISLITTLEENDIAN) {
val = Integer.reverseBytes(val);
}
CarbonUnsafe.getUnsafe().putInt(bytes, offset + CarbonUnsafe.BYTE_ARRAY_OFFSET, val);
return offset + ByteUtil.SIZEOF_INT;
} else {
for (int i = offset + 3; i > offset; i--) {
bytes[i] = (byte) val;
val >>>= 8;
}
bytes[offset] = (byte) val;
return offset + SIZEOF_INT;
}
}
/**
* Put bytes at the specified byte array position.
*
* @param tgtBytes the byte array
* @param tgtOffset position in the array
* @param srcBytes array to write out
* @param srcOffset source offset
* @param srcLength source length
* @return incremented offset
*/
public static int putBytes(byte[] tgtBytes, int tgtOffset, byte[] srcBytes, int srcOffset,
int srcLength) {
System.arraycopy(srcBytes, srcOffset, tgtBytes, tgtOffset, srcLength);
return tgtOffset + srcLength;
}
/**
* flatten input byte[][] to byte[] and return
*/
public static byte[] flatten(byte[][] input) {
int totalSize = 0;
for (int i = 0; i < input.length; i++) {
totalSize += input[i].length;
}
byte[] flattenedData = new byte[totalSize];
int pos = 0;
for (int i = 0; i < input.length; i++) {
System.arraycopy(input[i], 0, flattenedData, pos, input[i].length);
pos += input[i].length;
}
return flattenedData;
}
}
| |
/*
* Copyright 2013 Bryce Cronkite-Ratcliff
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stamp.util.tree;
import java.lang.UnsupportedOperationException;
import java.util.ArrayList;
import java.util.Iterator;
/**
* Tree Nodes each contain a chronologically ordered
* list of their children, a reference to their parent
* and a data field of the type of the node. The
* parameterized types of the Tree and Node ought to
* agree. This implementaiton is mildly intrusive,
* so you could probably make do without keeping references
* to the tree around, although it might be useful on
* occasion.
*
* @author brycecr
*/
public class Node<T> {
/* List of Children */
protected ArrayList<Node<T>> children = null;
/* Reference to parent. Can be circular or null */
protected Node<T> parent = null;
/* The data stored by this node */
protected T data = null;
/**
* Initialize everything to null
*/
public Node() {
}
/**
* Initialize node with data id
*/
public Node(T id) {
data = id;
}
/**
* Initialize node with data id
* and a pre-made ArrayList of children.
* Doesn't do any type checking off the bat,
* so be careful.
*/
public Node(T id, ArrayList<Node<T>> initchildren) {
children = initchildren;
data = id;
}
/**
* Initialize node with data id and
* initial parent object initparent.
*/
public Node(T id, Node<T> initparent) {
parent = initparent;
data = id;
}
/**
* Initialize data to id, parent to init parent,
* and children to initchildren. Check your types well.
*/
public Node(T id, Node<T> initparent, ArrayList<Node<T>> initchildren) {
parent = initparent;
children = initchildren;
data = id;
}
/**
* @return parent node of this node. May be null.
*/
public Node<T> getParent() {
return parent;
}
/**
* @param newParent to set as parent of this node
*/
protected void setParent(Node<T> newParent) {
parent = newParent;
}
/**
* Creates a new node with data replacement
* removes toReplace from the children of this
* node, inserts the new node in its place,
* and adds toReplace to the children of the new node.
*/
public void replaceChild(Node<T> toReplace, T replacement) {
Node<T> newNode = new Node<T>(replacement, this);
if (children == null) {
return;
}
int ind = children.indexOf(toReplace);
if (ind == -1) {
// did not find toReplace in the children of this
return; // TODO throw exception?
}
// insert replacement in exact place of toReplace
children.set(ind, newNode);
// add toReplace to the children of newNode
newNode.addChild(toReplace);
toReplace.setParent(newNode);
}
/**
* @return ArrayList of children of this node
*/
public ArrayList<Node<T>> getChildren() {
return children;
}
/**
* Adds newchild to the end of the children
* List. Creates now ArrayList if necessary.
*
* @param newchild the node to insert
* @return the node just inserted
*/
public Node<T> addChild(Node<T> newchild) {
if (children == null) {
children = new ArrayList<Node<T>>();
}
children.add(newchild);
return newchild;
}
/**
* Creates new node with data newData and appends
* this to the end of the list of children for this
* node. Creates the list of children if necessary.
*
* @param newData to initialize new node with
* @return the new node created and inserted
*/
public Node<T> addChild(T newData) {
Node<T> newChild = new Node<T>(newData, this);
return addChild(newChild);
}
/**
* Set the null parent of this to newparent.
* @throws UnsupportedOperationException if current parent is not null
*/
public void addParent(Node<T> newparent) throws UnsupportedOperationException {
if (parent == null) {
parent = newparent;
} else {
throw new UnsupportedOperationException("Tree: Adding new "
+ "parent to Node with non-null parent.");
}
}
/**
* Returns the data object of this node
*/
public T getData() {
return data;
}
/**
* @return truth of whether this node has any children
*/
public boolean hasChildren() {
return !(children == null || children.size() == 0);
}
/**
* @return new NodeIterator over the children of this node
*/
public NodeIterator iterator() {
return new NodeIterator();
}
/**
* Just a shell on an arraylist iterator through the
* ArrayList of children
*/
class NodeIterator implements Iterator<Node<T>> {
Iterator<Node<T>> itr = null;
/**
* Initialize iterator. Your children better
* not be null or this will throw a NullPointerException
*/
public NodeIterator() {
itr = children.iterator();
}
/**
* @return if more children to iterate through
*/
public boolean hasNext() {
return itr.hasNext();
}
/**
* @return next child of this node
*/
public Node<T> next() {
return itr.next();
}
/**
* Remove the current child.
*/
public void remove() {
itr.remove();
}
}
}
| |
package org.commcare.backend.session.test;
import org.commcare.modern.session.SessionWrapper;
import org.commcare.modern.util.Pair;
import org.commcare.session.RemoteQuerySessionManager;
import org.commcare.suite.model.Action;
import org.commcare.suite.model.EntityDatum;
import org.commcare.test.utilities.CaseTestUtils;
import org.commcare.test.utilities.MockApp;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.ExternalDataInstance;
import org.javarosa.test_utils.ExprEvalUtils;
import org.javarosa.xpath.XPathMissingInstanceException;
import org.javarosa.xpath.XPathTypeMismatchException;
import org.javarosa.xpath.expr.FunctionUtils;
import org.javarosa.xpath.parser.XPathSyntaxException;
import org.commcare.session.SessionFrame;
import org.junit.Test;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Vector;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
/**
* This is a super basic test just to make sure the test infrastructure is working correctly
* and to act as an example of how to build template app tests.
*
* Created by ctsims on 8/14/2015.
*/
public class SessionStackTests {
@Test
public void testDoubleManagementAndOverlappingStack() throws Exception {
MockApp mockApp = new MockApp("/complex_stack/");
SessionWrapper session = mockApp.getSession();
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0");
assertEquals(SessionFrame.STATE_DATUM_COMPUTED, session.getNeededData());
session.setComputedDatum();
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
EntityDatum entityDatum = (EntityDatum)session.getNeededDatum();
assertEquals("case_id", entityDatum.getDataId());
Vector<Action> actions = session.getDetail(entityDatum.getShortDetail()).getCustomActions(session.getEvaluationContext());
if (actions == null || actions.isEmpty()) {
fail("Detail screen stack action was missing from app!");
}
Action dblManagement = actions.firstElement();
session.executeStackOperations(dblManagement.getStackOperations(), session.getEvaluationContext());
if (session.getNeededData() != null) {
fail("After executing stack frame steps, session should be redirected");
}
assertEquals("http://commcarehq.org/test/placeholder_destination", session.getForm());
EvaluationContext ec = session.getEvaluationContext();
CaseTestUtils.xpathEvalAndCompare(ec, "count(instance('session')/session/data/calculated_data)", 1);
CaseTestUtils.xpathEvalAndCompare(ec, "instance('session')/session/data/calculated_data", "new");
}
@Test
public void testViewNav() throws Exception {
MockApp mockApp = new MockApp("/complex_stack/");
SessionWrapper session = mockApp.getSession();
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m3-f0");
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
assertEquals("case_id_to_send", session.getNeededDatum().getDataId());
assertFalse("Session incorrectly determined a view command", session.isViewCommand(session.getCommand()));
session.setDatum("case_id_to_send", "case_one");
session.finishExecuteAndPop(session.getEvaluationContext());
assertEquals("m2", session.getCommand());
CaseTestUtils.xpathEvalAndCompare(session.getEvaluationContext(),
"instance('session')/session/data/case_id", "case_one");
CaseTestUtils.xpathEvalAndCompare(session.getEvaluationContext(),
"count(instance('session')/session/data/case_id_to_send)", "0");
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
}
@Test
public void testViewNonNav() throws Exception {
MockApp mockApp = new MockApp("/complex_stack/");
SessionWrapper session = mockApp.getSession();
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m4-f0");
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
assertEquals("case_id_to_view", session.getNeededDatum().getDataId());
assertTrue("Session incorrectly tagged a view command", session.isViewCommand(session.getCommand()));
}
@Test
public void testOutOfOrderStack() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
// Select a form that has 3 datum requirements to enter (in order from suite.xml: case_id,
// case_id_new_visit_0, usercase_id)
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0");
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0-f3");
// Set 2 of the 3 needed datums, but not in order (1st and 3rd)
session.setDatum("case_id", "case_id_value");
session.setDatum("usercase_id", "usercase_id_value");
// Session should now need the case_id_new_visit_0, which is a computed datum
assertEquals(SessionFrame.STATE_DATUM_COMPUTED, session.getNeededData());
// The key of the needed datum should be "case_id_new_visit_0"
assertEquals("case_id_new_visit_0", session.getNeededDatum().getDataId());
// Add the needed datum to the stack and confirm that the session is now ready to proceed
session.setDatum("case_id_new_visit_0", "visit_id_value");
assertEquals(null, session.getNeededData());
}
@Test
public void testOutOfOrderStackComplex() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
// Select a form that has 3 datum requirements to enter (in order from suite.xml: case_id,
// case_id_new_visit_0, usercase_id)
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0");
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0-f3");
// Set 2 of the 3 needed datums, so that the datum that is actually still needed (case_id)
// is NOT a computed value, but the "last" needed datum is a computed value
session.setDatum("case_id_new_visit_0", "visit_id_value");
session.setDatum("usercase_id", "usercase_id_value");
// Session should now see that it needs a normal datum val (NOT a computed val)
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
// The key of the needed datum should be "case_id"
assertEquals("case_id", session.getNeededDatum().getDataId());
// Add the needed datum to the stack and confirm that the session is now ready to proceed
session.setDatum("case_id", "case_id_value");
assertEquals(null, session.getNeededData());
}
@Test
public void testUnnecessaryDataOnStack() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
// Select a form that has 3 datum requirements to enter (in order from suite.xml: case_id,
// case_id_new_visit_0, usercase_id)
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0");
assertEquals(SessionFrame.STATE_COMMAND_ID, session.getNeededData());
session.setCommand("m0-f3");
// Put a bunch of random data on the stack such that there are more datums on the stack
// than the total number of needed datums for this session (which is 3)
session.setDatum("random_id_1", "random_val_1");
session.setDatum("random_id_2", "random_val_2");
session.setDatum("random_id_3", "random_val_3");
session.setDatum("random_id_4", "random_val_4");
// Now go through and check that the session effectively ignores the rubbish on the stack
// and still sees itself as needing each of the datums defined for this form, in the correct
// order
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
assertEquals("case_id", session.getNeededDatum().getDataId());
session.setDatum("case_id", "case_id_value");
assertEquals(SessionFrame.STATE_DATUM_COMPUTED, session.getNeededData());
assertEquals("case_id_new_visit_0", session.getNeededDatum().getDataId());
session.setDatum("case_id_new_visit_0", "visit_id_value");
assertEquals(SessionFrame.STATE_DATUM_COMPUTED, session.getNeededData());
assertEquals("usercase_id", session.getNeededDatum().getDataId());
session.setDatum("usercase_id", "usercase_id_value");
assertEquals(null, session.getNeededData());
}
/**
* Test that instance stored on the session stack (from remote query
* results), that isn't supposed to adhere to the casedb xml template,
* doesn't in fact adhere to it
*/
@Test
public void testNonCaseInstanceOnStack() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
session.setCommand("patient-noncase-search");
assertEquals(session.getNeededData(), SessionFrame.STATE_QUERY_REQUEST);
ExternalDataInstance dataInstance =
buildRemoteExternalDataInstance(this.getClass(), session,
"/session-tests-template/patient_query_result.xml");
session.setQueryDatum(dataInstance);
ExprEvalUtils.testEval("instance('patients')/patients/patient[@id = '321']/name",
session.getEvaluationContext(),
"calbert");
}
/**
* Test that instances stored on the session stack (from remote query
* results), adheres to the casedb xml template as expected from the query
* having the template="case" attribute Also ensure the instance is
* correctly popped off with the associated frame step
*/
@Test
public void testCaseInstancesOnStack() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
session.setCommand("patient-case-search");
assertEquals(session.getNeededData(), SessionFrame.STATE_QUERY_REQUEST);
ExternalDataInstance dataInstance =
buildRemoteExternalDataInstance(this.getClass(), session,
"/session-tests-template/patient_query_result.xml");
session.setQueryDatum(dataInstance);
ExprEvalUtils.testEval("instance('patients')/patients/case[@id = '123']/name",
session.getEvaluationContext(),
"bolivar");
// demonstrate that paths that aren't 'casedb/case/...' fail
ExprEvalUtils.testEval("instance('patients')/patients/patient[@id = '321']/name",
session.getEvaluationContext(),
new XPathTypeMismatchException());
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
assertEquals("case_id", session.getNeededDatum().getDataId());
session.setDatum("case_id", "case_id_value");
session.stepBack();
ExprEvalUtils.testEval("instance('patients')/patients/case[@id = '123']/name",
session.getEvaluationContext(),
"bolivar");
session.stepBack();
assertInstanceMissing(session, "instance('patients')/patients/case/bolivar");
session.setQueryDatum(dataInstance);
ExprEvalUtils.testEval("instance('patients')/patients/case[@id = '123']/name",
session.getEvaluationContext(),
"bolivar");
session.finishExecuteAndPop(session.getEvaluationContext());
assertInstanceMissing(session, "instance('patients')/patients/case/bolivar");
ExprEvalUtils.testEval("instance('session')/session/data/case_id",
session.getEvaluationContext(),
"bolivar");
}
@Test
public void testStackNavParsing() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
session.setCommand("smart-link-search");
assertEquals(session.getNeededData(), SessionFrame.STATE_QUERY_REQUEST);
}
@Test
public void testStackQueryParsing() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
session.setCommand("eof-nav-registry");
assertEquals(session.getNeededData(), SessionFrame.STATE_QUERY_REQUEST);
}
@Test
public void testActionParsing() throws Exception {
MockApp mApp = new MockApp("/complex_stack/");
SessionWrapper session = mApp.getSession();
session.setCommand("test-actions");
assertEquals(SessionFrame.STATE_DATUM_VAL, session.getNeededData());
EntityDatum entityDatum = (EntityDatum)session.getNeededDatum();
assertEquals("case_id", entityDatum.getDataId());
EvaluationContext ec = session.getEvaluationContext();
Vector<Action> actions = session.getDetail(entityDatum.getShortDetail()).getCustomActions(ec);
// Only 2 of the 3 actions should be returned, because 1 has a relevant condition of false()
assertEquals(2, actions.size());
Action actionToInspect = actions.get(1);
assertTrue(actionToInspect.hasActionBarIcon());
assertEquals("Jump to Menu 2 Form 1, with icon", actionToInspect.getDisplay().getText().evaluate(ec));
assertEquals(1, actionToInspect.getStackOperations().size());
assertTrue(actionToInspect.getAutoLaunchExpr()==null);
assertTrue(FunctionUtils.toString(actions.get(0).getAutoLaunchExpr().eval(ec)).contentEquals("true"));
}
private static void assertInstanceMissing(SessionWrapper session, String xpath)
throws XPathSyntaxException {
try {
ExprEvalUtils.xpathEval(session.getEvaluationContext(), xpath);
fail("instance('patients') should not be available");
} catch (XPathMissingInstanceException e) {
// expected
}
}
/**
* Make sure that stepping backwards before doing anything else doesn't crash
*/
@Test
public void testStepBackAtBase() throws Exception {
MockApp mockApp = new MockApp("/session-tests-template/");
SessionWrapper session = mockApp.getSession();
session.stepBack();
}
static ExternalDataInstance buildRemoteExternalDataInstance(Class cls,
SessionWrapper sessionWrapper,
String resourcePath) {
RemoteQuerySessionManager remoteQuerySessionManager =
RemoteQuerySessionManager.buildQuerySessionManager(sessionWrapper,
sessionWrapper.getEvaluationContext(), new ArrayList<>());
InputStream is = cls.getResourceAsStream(resourcePath);
Pair<ExternalDataInstance, String> instanceOrError =
remoteQuerySessionManager.buildExternalDataInstance(is);
assertNotNull(instanceOrError.first);
return instanceOrError.first;
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.test;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.bookkeeper.client.AsyncCallback.AddCallback;
import org.apache.bookkeeper.client.AsyncCallback.CloseCallback;
import org.apache.bookkeeper.client.AsyncCallback.ReadCallback;
import org.apache.bookkeeper.client.AsyncCallback.ReadLastConfirmedCallback;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BKException.BKIllegalOpException;
import org.apache.bookkeeper.client.BookKeeper.DigestType;
import org.apache.bookkeeper.client.LedgerEntry;
import org.apache.bookkeeper.client.LedgerHandle;
import org.apache.bookkeeper.streaming.LedgerInputStream;
import org.apache.bookkeeper.streaming.LedgerOutputStream;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This test tests read and write, synchronous and asynchronous, strings and
* integers for a BookKeeper client. The test deployment uses a ZooKeeper server
* and three BookKeepers.
*
*/
public class BookieReadWriteTest extends BookKeeperClusterTestCase
implements AddCallback, ReadCallback, ReadLastConfirmedCallback {
// Depending on the taste, select the amount of logging
// by decommenting one of the two lines below
// private static final Logger LOG = Logger.getRootLogger();
private static final Logger LOG = LoggerFactory.getLogger(BookieReadWriteTest.class);
byte[] ledgerPassword = "aaa".getBytes();
LedgerHandle lh, lh2;
long ledgerId;
// test related variables
int numEntriesToWrite = 200;
int maxInt = 2147483647;
Random rng; // Random Number Generator
ArrayList<byte[]> entries; // generated entries
ArrayList<Integer> entriesSize;
private final DigestType digestType;
public BookieReadWriteTest() {
super(3);
this.digestType = DigestType.CRC32;
String ledgerManagerFactory = "org.apache.bookkeeper.meta.HierarchicalLedgerManagerFactory";
// set ledger manager
baseConf.setLedgerManagerFactoryClassName(ledgerManagerFactory);
baseClientConf.setLedgerManagerFactoryClassName(ledgerManagerFactory);
}
class SyncObj {
long lastConfirmed;
volatile int counter;
boolean value;
AtomicInteger rc = new AtomicInteger(BKException.Code.OK);
Enumeration<LedgerEntry> ls = null;
public SyncObj() {
counter = 0;
lastConfirmed = LedgerHandle.INVALID_ENTRY_ID;
value = false;
}
void setReturnCode(int rc) {
this.rc.compareAndSet(BKException.Code.OK, rc);
}
int getReturnCode() {
return rc.get();
}
void setLedgerEntries(Enumeration<LedgerEntry> ls) {
this.ls = ls;
}
Enumeration<LedgerEntry> getLedgerEntries() {
return ls;
}
}
@Test
public void testOpenException() throws IOException, InterruptedException {
try {
lh = bkc.openLedger(0, digestType, ledgerPassword);
fail("Haven't thrown exception");
} catch (BKException e) {
LOG.warn("Successfully thrown and caught exception:", e);
}
}
/**
* test the streaming api for reading and writing.
*
* @throws IOException
*/
@Test
public void testStreamingClients() throws IOException, BKException, InterruptedException {
lh = bkc.createLedger(digestType, ledgerPassword);
// write a string so that we cna
// create a buffer of a single bytes
// and check for corner cases
String toWrite = "we need to check for this string to match " + "and for the record mahadev is the best";
LedgerOutputStream lout = new LedgerOutputStream(lh, 1);
byte[] b = toWrite.getBytes();
lout.write(b);
lout.close();
long lId = lh.getId();
lh.close();
// check for sanity
lh = bkc.openLedger(lId, digestType, ledgerPassword);
LedgerInputStream lin = new LedgerInputStream(lh, 1);
byte[] bread = new byte[b.length];
int read = 0;
while (read < b.length) {
read = read + lin.read(bread, read, b.length);
}
String newString = new String(bread);
assertTrue("these two should same", toWrite.equals(newString));
lin.close();
lh.close();
// create another ledger to write one byte at a time
lh = bkc.createLedger(digestType, ledgerPassword);
lout = new LedgerOutputStream(lh);
for (int i = 0; i < b.length; i++) {
lout.write(b[i]);
}
lout.close();
lId = lh.getId();
lh.close();
lh = bkc.openLedger(lId, digestType, ledgerPassword);
lin = new LedgerInputStream(lh);
bread = new byte[b.length];
read = 0;
while (read < b.length) {
read = read + lin.read(bread, read, b.length);
}
newString = new String(bread);
assertTrue("these two should be same ", toWrite.equals(newString));
lin.close();
lh.close();
}
private void testReadWriteAsyncSingleClient(int numEntries) throws IOException {
SyncObj sync = new SyncObj();
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
for (int i = 0; i < numEntriesToWrite; i++) {
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
entriesSize.add(entry.array().length);
lh.asyncAddEntry(entry.array(), this, sync);
}
// wait for all entries to be acknowledged
synchronized (sync) {
while (sync.counter < numEntriesToWrite) {
LOG.debug("Entries counter = " + sync.counter);
sync.wait();
}
assertEquals("Error adding", BKException.Code.OK, sync.getReturnCode());
}
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
// *** WRITING PART COMPLETE // READ PART BEGINS ***
// open ledger
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
LOG.debug("Number of entries written: " + (lh.getLastAddConfirmed() + 1));
assertTrue("Verifying number of entries written", lh.getLastAddConfirmed() == (numEntriesToWrite - 1));
// read entries
lh.asyncReadEntries(0, numEntriesToWrite - 1, this, sync);
synchronized (sync) {
while (!sync.value) {
sync.wait();
}
assertEquals("Error reading", BKException.Code.OK, sync.getReturnCode());
}
LOG.debug("*** READ COMPLETE ***");
// at this point, Enumeration<LedgerEntry> ls is filled with the returned
// values
int i = 0;
Enumeration<LedgerEntry> ls = sync.getLedgerEntries();
while (ls.hasMoreElements()) {
ByteBuffer origbb = ByteBuffer.wrap(entries.get(i));
Integer origEntry = origbb.getInt();
byte[] entry = ls.nextElement().getEntry();
ByteBuffer result = ByteBuffer.wrap(entry);
LOG.debug("Length of result: " + result.capacity());
LOG.debug("Original entry: " + origEntry);
Integer retrEntry = result.getInt();
LOG.debug("Retrieved entry: " + retrEntry);
assertTrue("Checking entry " + i + " for equality", origEntry.equals(retrEntry));
assertTrue("Checking entry " + i + " for size", entry.length == entriesSize.get(i));
i++;
}
assertTrue("Checking number of read entries", i == numEntriesToWrite);
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadWriteAsyncSingleClient200() throws IOException {
testReadWriteAsyncSingleClient(200);
}
/**
* Check that the add api with offset and length work correctly.
* First try varying the offset. Then the length with a fixed non-zero
* offset.
*/
@Test
public void testReadWriteRangeAsyncSingleClient() throws IOException {
SyncObj sync = new SyncObj();
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
byte[] bytes = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'};
lh.asyncAddEntry(bytes, 0, bytes.length, this, sync);
lh.asyncAddEntry(bytes, 0, 4, this, sync); // abcd
lh.asyncAddEntry(bytes, 3, 4, this, sync); // defg
lh.asyncAddEntry(bytes, 3, (bytes.length - 3), this, sync); // defghi
int numEntries = 4;
// wait for all entries to be acknowledged
synchronized (sync) {
while (sync.counter < numEntries) {
LOG.debug("Entries counter = " + sync.counter);
sync.wait();
}
assertEquals("Error adding", BKException.Code.OK, sync.getReturnCode());
}
try {
lh.asyncAddEntry(bytes, -1, bytes.length, this, sync);
fail("Shouldn't be able to use negative offset");
} catch (ArrayIndexOutOfBoundsException aiob) {
// expected
}
try {
lh.asyncAddEntry(bytes, 0, bytes.length + 1, this, sync);
fail("Shouldn't be able to use that much length");
} catch (ArrayIndexOutOfBoundsException aiob) {
// expected
}
try {
lh.asyncAddEntry(bytes, -1, bytes.length + 2, this, sync);
fail("Shouldn't be able to use negative offset "
+ "with that much length");
} catch (ArrayIndexOutOfBoundsException aiob) {
// expected
}
try {
lh.asyncAddEntry(bytes, 4, -3, this, sync);
fail("Shouldn't be able to use negative length");
} catch (ArrayIndexOutOfBoundsException aiob) {
// expected
}
try {
lh.asyncAddEntry(bytes, -4, -3, this, sync);
fail("Shouldn't be able to use negative offset & length");
} catch (ArrayIndexOutOfBoundsException aiob) {
// expected
}
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
// *** WRITING PART COMPLETE // READ PART BEGINS ***
// open ledger
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
LOG.debug("Number of entries written: " + (lh.getLastAddConfirmed() + 1));
assertTrue("Verifying number of entries written",
lh.getLastAddConfirmed() == (numEntries - 1));
// read entries
lh.asyncReadEntries(0, numEntries - 1, this, sync);
synchronized (sync) {
while (!sync.value) {
sync.wait();
}
assertEquals("Error reading", BKException.Code.OK, sync.getReturnCode());
}
LOG.debug("*** READ COMPLETE ***");
// at this point, Enumeration<LedgerEntry> ls is filled with the returned
// values
int i = 0;
Enumeration<LedgerEntry> ls = sync.getLedgerEntries();
while (ls.hasMoreElements()) {
byte[] expected = null;
byte[] entry = ls.nextElement().getEntry();
switch (i) {
case 0:
expected = Arrays.copyOfRange(bytes, 0, bytes.length);
break;
case 1:
expected = Arrays.copyOfRange(bytes, 0, 4);
break;
case 2:
expected = Arrays.copyOfRange(bytes, 3, 3 + 4);
break;
case 3:
expected = Arrays.copyOfRange(bytes, 3, 3 + (bytes.length - 3));
break;
}
assertNotNull("There are more checks than writes", expected);
String message = "Checking entry " + i + " for equality ["
+ new String(entry, "UTF-8") + ","
+ new String(expected, "UTF-8") + "]";
assertTrue(message, Arrays.equals(entry, expected));
i++;
}
assertTrue("Checking number of read entries", i == numEntries);
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
class ThrottleTestCallback implements ReadCallback {
int throttle;
ThrottleTestCallback(int threshold) {
this.throttle = threshold;
}
@Override
public void readComplete(int rc, LedgerHandle lh, Enumeration<LedgerEntry> seq, Object ctx) {
SyncObj sync = (SyncObj) ctx;
sync.setLedgerEntries(seq);
sync.setReturnCode(rc);
synchronized (sync) {
sync.counter += throttle;
sync.notify();
}
LOG.info("Current counter: " + sync.counter);
}
}
@Test
public void testSyncReadAsyncWriteStringsSingleClient() throws IOException {
SyncObj sync = new SyncObj();
LOG.info("TEST READ WRITE STRINGS MIXED SINGLE CLIENT");
String charset = "utf-8";
LOG.debug("Default charset: " + Charset.defaultCharset());
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
for (int i = 0; i < numEntriesToWrite; i++) {
int randomInt = rng.nextInt(maxInt);
byte[] entry = Integer.toString(randomInt).getBytes(charset);
entries.add(entry);
lh.asyncAddEntry(entry, this, sync);
}
// wait for all entries to be acknowledged
synchronized (sync) {
while (sync.counter < numEntriesToWrite) {
LOG.debug("Entries counter = " + sync.counter);
sync.wait();
}
assertEquals("Error adding", BKException.Code.OK, sync.getReturnCode());
}
LOG.debug("*** ASYNC WRITE COMPLETE ***");
// close ledger
lh.close();
// *** WRITING PART COMPLETED // READ PART BEGINS ***
// open ledger
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
LOG.debug("Number of entries written: " + (lh.getLastAddConfirmed() + 1));
assertTrue("Verifying number of entries written", lh.getLastAddConfirmed() == (numEntriesToWrite - 1));
// read entries
Enumeration<LedgerEntry> ls = lh.readEntries(0, numEntriesToWrite - 1);
LOG.debug("*** SYNC READ COMPLETE ***");
// at this point, Enumeration<LedgerEntry> ls is filled with the returned
// values
int i = 0;
while (ls.hasMoreElements()) {
byte[] origEntryBytes = entries.get(i++);
byte[] retrEntryBytes = ls.nextElement().getEntry();
LOG.debug("Original byte entry size: " + origEntryBytes.length);
LOG.debug("Saved byte entry size: " + retrEntryBytes.length);
String origEntry = new String(origEntryBytes, charset);
String retrEntry = new String(retrEntryBytes, charset);
LOG.debug("Original entry: " + origEntry);
LOG.debug("Retrieved entry: " + retrEntry);
assertTrue("Checking entry " + i + " for equality", origEntry.equals(retrEntry));
}
assertTrue("Checking number of read entries", i == numEntriesToWrite);
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadWriteSyncSingleClient() throws IOException {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
for (int i = 0; i < numEntriesToWrite; i++) {
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
lh.addEntry(entry.array());
}
lh.close();
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
LOG.debug("Number of entries written: " + lh.getLastAddConfirmed());
assertTrue("Verifying number of entries written", lh.getLastAddConfirmed() == (numEntriesToWrite - 1));
Enumeration<LedgerEntry> ls = lh.readEntries(0, numEntriesToWrite - 1);
int i = 0;
while (ls.hasMoreElements()) {
ByteBuffer origbb = ByteBuffer.wrap(entries.get(i++));
Integer origEntry = origbb.getInt();
ByteBuffer result = ByteBuffer.wrap(ls.nextElement().getEntry());
LOG.debug("Length of result: " + result.capacity());
LOG.debug("Original entry: " + origEntry);
Integer retrEntry = result.getInt();
LOG.debug("Retrieved entry: " + retrEntry);
assertTrue("Checking entry " + i + " for equality", origEntry.equals(retrEntry));
}
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadWriteZero() throws IOException {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
final CountDownLatch completeLatch = new CountDownLatch(numEntriesToWrite);
final AtomicInteger rc = new AtomicInteger(BKException.Code.OK);
for (int i = 0; i < numEntriesToWrite; i++) {
lh.asyncAddEntry(new byte[0], new AddCallback() {
public void addComplete(int rccb, LedgerHandle lh, long entryId, Object ctx) {
rc.compareAndSet(BKException.Code.OK, rccb);
completeLatch.countDown();
}
}, null);
}
completeLatch.await();
if (rc.get() != BKException.Code.OK) {
throw BKException.create(rc.get());
}
/*
* Write a non-zero entry
*/
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
lh.addEntry(entry.array());
lh.close();
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
LOG.debug("Number of entries written: " + lh.getLastAddConfirmed());
assertTrue("Verifying number of entries written", lh.getLastAddConfirmed() == numEntriesToWrite);
Enumeration<LedgerEntry> ls = lh.readEntries(0, numEntriesToWrite - 1);
int i = 0;
while (ls.hasMoreElements()) {
ByteBuffer result = ByteBuffer.wrap(ls.nextElement().getEntry());
LOG.debug("Length of result: " + result.capacity());
assertTrue("Checking if entry " + i + " has zero bytes", result.capacity() == 0);
}
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testMultiLedger() throws IOException {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
lh2 = bkc.createLedger(digestType, ledgerPassword);
long ledgerId = lh.getId();
long ledgerId2 = lh2.getId();
final CountDownLatch completeLatch = new CountDownLatch(numEntriesToWrite * 2);
final AtomicInteger rc = new AtomicInteger(BKException.Code.OK);
// bkc.initMessageDigest("SHA1");
LOG.info("Ledger ID 1: " + lh.getId() + ", Ledger ID 2: " + lh2.getId());
for (int i = 0; i < numEntriesToWrite; i++) {
lh.asyncAddEntry(new byte[0], new AddCallback() {
public void addComplete(int rc2, LedgerHandle lh, long entryId, Object ctx) {
rc.compareAndSet(BKException.Code.OK, rc2);
completeLatch.countDown();
}
}, null);
lh2.asyncAddEntry(new byte[0], new AddCallback() {
public void addComplete(int rc2, LedgerHandle lh, long entryId, Object ctx) {
rc.compareAndSet(BKException.Code.OK, rc2);
completeLatch.countDown();
}
}, null);
}
completeLatch.await();
if (rc.get() != BKException.Code.OK) {
throw BKException.create(rc.get());
}
lh.close();
lh2.close();
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
lh2 = bkc.openLedger(ledgerId2, digestType, ledgerPassword);
LOG.debug("Number of entries written: " + lh.getLastAddConfirmed() + ", " + lh2.getLastAddConfirmed());
assertTrue("Verifying number of entries written lh (" + lh.getLastAddConfirmed() + ")", lh
.getLastAddConfirmed() == (numEntriesToWrite - 1));
assertTrue("Verifying number of entries written lh2 (" + lh2.getLastAddConfirmed() + ")", lh2
.getLastAddConfirmed() == (numEntriesToWrite - 1));
Enumeration<LedgerEntry> ls = lh.readEntries(0, numEntriesToWrite - 1);
int i = 0;
while (ls.hasMoreElements()) {
ByteBuffer result = ByteBuffer.wrap(ls.nextElement().getEntry());
LOG.debug("Length of result: " + result.capacity());
assertTrue("Checking if entry " + i + " has zero bytes", result.capacity() == 0);
}
lh.close();
ls = lh2.readEntries(0, numEntriesToWrite - 1);
i = 0;
while (ls.hasMoreElements()) {
ByteBuffer result = ByteBuffer.wrap(ls.nextElement().getEntry());
LOG.debug("Length of result: " + result.capacity());
assertTrue("Checking if entry " + i + " has zero bytes", result.capacity() == 0);
}
lh2.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadWriteAsyncLength() throws IOException {
SyncObj sync = new SyncObj();
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
for (int i = 0; i < numEntriesToWrite; i++) {
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
entriesSize.add(entry.array().length);
lh.asyncAddEntry(entry.array(), this, sync);
}
// wait for all entries to be acknowledged
synchronized (sync) {
while (sync.counter < numEntriesToWrite) {
LOG.debug("Entries counter = " + sync.counter);
sync.wait();
}
assertEquals("Error adding", BKException.Code.OK, sync.getReturnCode());
}
long length = numEntriesToWrite * 4;
assertTrue("Ledger length before closing: " + lh.getLength(), lh.getLength() == length);
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
// *** WRITING PART COMPLETE // READ PART BEGINS ***
// open ledger
lh = bkc.openLedger(ledgerId, digestType, ledgerPassword);
assertTrue("Ledger length after opening: " + lh.getLength(), lh.getLength() == length);
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
private long writeNEntriesLastWriteSync(LedgerHandle lh, int numToWrite) throws Exception {
final CountDownLatch completeLatch = new CountDownLatch(numToWrite - 1);
final AtomicInteger rc = new AtomicInteger(BKException.Code.OK);
ByteBuffer entry = ByteBuffer.allocate(4);
for (int i = 0; i < numToWrite - 1; i++) {
entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
entriesSize.add(entry.array().length);
lh.asyncAddEntry(entry.array(), new AddCallback() {
public void addComplete(int rccb, LedgerHandle lh, long entryId, Object ctx) {
rc.compareAndSet(BKException.Code.OK, rccb);
completeLatch.countDown();
}
}, null);
}
completeLatch.await();
if (rc.get() != BKException.Code.OK) {
throw BKException.create(rc.get());
}
entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
entriesSize.add(entry.array().length);
lh.addEntry(entry.array());
return lh.getLastAddConfirmed();
}
@Test
public void testReadFromOpenLedger() throws Exception {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
long lac = writeNEntriesLastWriteSync(lh, numEntriesToWrite);
LedgerHandle lhOpen = bkc.openLedgerNoRecovery(ledgerId, digestType, ledgerPassword);
// no recovery opened ledger 's last confirmed entry id is less than written
// and it just can read until (i-1)
long toRead = lac - 1;
Enumeration<LedgerEntry> readEntry = lhOpen.readEntries(toRead, toRead);
assertTrue("Enumeration of ledger entries has no element", readEntry.hasMoreElements());
LedgerEntry e = readEntry.nextElement();
assertEquals(toRead, e.getEntryId());
assertArrayEquals(entries.get((int) toRead), e.getEntry());
// should not written to a read only ledger
try {
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
lhOpen.addEntry(entry.array());
fail("Should have thrown an exception here");
} catch (BKException.BKIllegalOpException bkioe) {
// this is the correct response
} catch (Exception ex) {
LOG.error("Unexpected exception", ex);
fail("Unexpected exception");
}
// close read only ledger should not change metadata
lhOpen.close();
lac = writeNEntriesLastWriteSync(lh, numEntriesToWrite);
assertEquals("Last confirmed add: ", lac, (numEntriesToWrite * 2) - 1);
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
/*
* Asynchronous call to read last confirmed entry
*/
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
writeNEntriesLastWriteSync(lh, numEntriesToWrite);
SyncObj sync = new SyncObj();
lh.asyncReadLastConfirmed(this, sync);
// Wait for for last confirmed
synchronized (sync) {
while (sync.lastConfirmed == -1) {
LOG.debug("Counter = " + sync.lastConfirmed);
sync.wait();
}
assertEquals("Error reading", BKException.Code.OK, sync.getReturnCode());
}
assertEquals("Last confirmed add", sync.lastConfirmed, (numEntriesToWrite - 2));
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadFromOpenLedgerOpenOnce() throws Exception {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
LedgerHandle lhOpen = bkc.openLedgerNoRecovery(ledgerId, digestType, ledgerPassword);
writeNEntriesLastWriteSync(lh, numEntriesToWrite / 2);
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
// no recovery opened ledger 's last confirmed entry id is
// less than written
// and it just can read until (i-1)
int toRead = numEntriesToWrite / 2 - 2;
long readLastConfirmed = lhOpen.readLastConfirmed();
assertTrue(readLastConfirmed != 0);
Enumeration<LedgerEntry> readEntry = lhOpen.readEntries(toRead, toRead);
assertTrue("Enumeration of ledger entries has no element", readEntry.hasMoreElements());
LedgerEntry e = readEntry.nextElement();
assertEquals(toRead, e.getEntryId());
assertArrayEquals(entries.get(toRead), e.getEntry());
// should not written to a read only ledger
try {
lhOpen.addEntry(entry.array());
fail("Should have thrown an exception here");
} catch (BKException.BKIllegalOpException bkioe) {
// this is the correct response
} catch (Exception ex) {
LOG.error("Unexpected exception", ex);
fail("Unexpected exception");
}
writeNEntriesLastWriteSync(lh, numEntriesToWrite / 2);
long last = lh.readLastConfirmed();
assertTrue("Last confirmed add: " + last, last == (numEntriesToWrite - 2));
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
// close read only ledger should not change metadata
lhOpen.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadFromOpenLedgerZeroAndOne() throws Exception {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
LedgerHandle lhOpen = bkc.openLedgerNoRecovery(ledgerId, digestType, ledgerPassword);
/*
* We haven't written anything, so it should be empty.
*/
LOG.debug("Checking that it is empty");
long readLastConfirmed = lhOpen.readLastConfirmed();
assertTrue("Last confirmed has the wrong value",
readLastConfirmed == LedgerHandle.INVALID_ENTRY_ID);
/*
* Writing one entry.
*/
LOG.debug("Going to write one entry");
ByteBuffer entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
entriesSize.add(entry.array().length);
lh.addEntry(entry.array());
/*
* The hint should still indicate that there is no confirmed
* add.
*/
LOG.debug("Checking that it is still empty even after writing one entry");
readLastConfirmed = lhOpen.readLastConfirmed();
assertTrue(readLastConfirmed == LedgerHandle.INVALID_ENTRY_ID);
/*
* Adding one more, and this time we should expect to
* see one entry.
*/
entry = ByteBuffer.allocate(4);
entry.putInt(rng.nextInt(maxInt));
entry.position(0);
entries.add(entry.array());
entriesSize.add(entry.array().length);
lh.addEntry(entry.array());
LOG.info("Checking that it has an entry");
readLastConfirmed = lhOpen.readLastConfirmed();
assertTrue(readLastConfirmed == 0L);
// close ledger
lh.close();
// close read only ledger should not change metadata
lhOpen.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testWriteUsingReadOnlyHandle() throws Exception {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
long lac = writeNEntriesLastWriteSync(lh, numEntriesToWrite);
LedgerHandle lhOpen = bkc.openLedgerNoRecovery(ledgerId, digestType, ledgerPassword);
// addEntry on ReadOnlyHandle should fail
CountDownLatch latch = new CountDownLatch(1);
final int[] rcArray = { 0 };
lhOpen.asyncAddEntry("".getBytes(), new AddCallback() {
@Override
public void addComplete(int rc, LedgerHandle lh, long entryId, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
latch.countDown();
}
}, latch);
latch.await();
if (rcArray[0] != BKException.Code.IllegalOpException) {
Assert.fail("Test1 - asyncAddOperation is supposed to be failed, but it got following rc - "
+ KeeperException.Code.get(rcArray[0]));
}
// addEntry on ReadOnlyHandle should fail
latch = new CountDownLatch(1);
rcArray[0] = 0;
lhOpen.asyncAddEntry("".getBytes(), 0, 0, new AddCallback() {
@Override
public void addComplete(int rc, LedgerHandle lh, long entryId, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
latch.countDown();
}
}, latch);
latch.await();
if (rcArray[0] != BKException.Code.IllegalOpException) {
Assert.fail(
"Test2 - asyncAddOperation is supposed to fail with IllegalOpException, but it got following rc - "
+ KeeperException.Code.get(rcArray[0]));
}
// close readonlyhandle
latch = new CountDownLatch(1);
rcArray[0] = 0;
lhOpen.asyncClose(new CloseCallback() {
@Override
public void closeComplete(int rc, LedgerHandle lh, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
latch.countDown();
}
}, latch);
latch.await();
if (rcArray[0] != KeeperException.Code.OK.intValue()) {
Assert.fail("Test3 - asyncClose failed because of exception - " + KeeperException.Code.get(rcArray[0]));
}
// close of readonlyhandle should not affect the writehandle
writeNEntriesLastWriteSync(lh, 5);
lh.close();
}
@Test
public void testLedgerHandle() throws Exception {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
long lac = writeNEntriesLastWriteSync(lh, 5);
// doing addEntry with entryid using regular Ledgerhandle should fail
CountDownLatch latch = new CountDownLatch(1);
final int[] rcArray = { 0 };
lh.asyncAddEntry(lac + 1, "".getBytes(), new AddCallback() {
@Override
public void addComplete(int rc, LedgerHandle lh, long entryId, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
latch.countDown();
}
}, latch);
latch.await();
if (rcArray[0] != BKException.Code.IllegalOpException) {
Assert.fail(
"Test1 - addEntry with EntryID is expected to fail with IllegalOpException, "
+ "but it got following rc - " + KeeperException.Code.get(rcArray[0]));
}
// doing addEntry with entryid using regular Ledgerhandle should fail
latch = new CountDownLatch(1);
rcArray[0] = 0;
lh.asyncAddEntry(lac + 1, "".getBytes(), 0, 0, new AddCallback() {
@Override
public void addComplete(int rc, LedgerHandle lh, long entryId, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
latch.countDown();
}
}, latch);
latch.await();
if (rcArray[0] != BKException.Code.IllegalOpException) {
Assert.fail(
"Test2 - addEntry with EntryID is expected to fail with IllegalOpException,"
+ "but it got following rc - " + KeeperException.Code.get(rcArray[0]));
}
// doing addEntry with entryid using regular Ledgerhandle should fail
try {
lh.addEntry(lac + 1, "".getBytes());
Assert.fail("Test3 - addEntry with EntryID is expected to fail");
} catch (BKIllegalOpException E) {
}
// doing addEntry with entryid using regular Ledgerhandle should fail
try {
lh.addEntry(lac + 1, "".getBytes(), 0, 0);
Assert.fail("Test4 - addEntry with EntryID is expected to fail");
} catch (BKIllegalOpException E) {
}
lh.close();
}
@Test
public void testLastConfirmedAdd() throws Exception {
try {
// Create a ledger
lh = bkc.createLedger(digestType, ledgerPassword);
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
writeNEntriesLastWriteSync(lh, numEntriesToWrite);
long last = lh.readLastConfirmed();
assertTrue("Last confirmed add: " + last, last == (numEntriesToWrite - 2));
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
/*
* Asynchronous call to read last confirmed entry
*/
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
writeNEntriesLastWriteSync(lh, numEntriesToWrite);
SyncObj sync = new SyncObj();
lh.asyncReadLastConfirmed(this, sync);
// Wait for for last confirmed
synchronized (sync) {
while (sync.lastConfirmed == LedgerHandle.INVALID_ENTRY_ID) {
LOG.debug("Counter = " + sync.lastConfirmed);
sync.wait();
}
assertEquals("Error reading", BKException.Code.OK, sync.getReturnCode());
}
assertTrue("Last confirmed add: " + sync.lastConfirmed, sync.lastConfirmed == (numEntriesToWrite - 2));
LOG.debug("*** WRITE COMPLETE ***");
// close ledger
lh.close();
} catch (BKException e) {
LOG.error("Test failed", e);
fail("Test failed due to BookKeeper exception");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOG.error("Test failed", e);
fail("Test failed due to interruption");
}
}
@Test
public void testReadLastConfirmed() throws Exception {
// Create a ledger and add entries
lh = bkc.createLedger(digestType, ledgerPassword);
// bkc.initMessageDigest("SHA1");
ledgerId = lh.getId();
LOG.info("Ledger ID: " + lh.getId());
long previousLAC = writeNEntriesLastWriteSync(lh, 5);
// add more entries after opening ReadonlyLedgerHandle
LedgerHandle lhOpen = bkc.openLedgerNoRecovery(ledgerId, digestType, ledgerPassword);
long currentLAC = writeNEntriesLastWriteSync(lh, 5);
// get LAC instance variable of ReadHandle and verify if it is equal to (previousLAC - 1)
long readLAC = lhOpen.getLastAddConfirmed();
Assert.assertEquals("Test1 - For ReadHandle LAC", (previousLAC - 1), readLAC);
// close the write LedgerHandle and sleep for 500 msec to make sure all close watchers are called
lh.close();
Thread.sleep(500);
// now call asyncReadLastConfirmed and verify if it is equal to currentLAC
CountDownLatch latch = new CountDownLatch(1);
final int[] rcArray = { 0 };
final long[] lastConfirmedArray = { 0 };
lhOpen.asyncReadLastConfirmed(new ReadLastConfirmedCallback() {
@Override
public void readLastConfirmedComplete(int rc, long lastConfirmed, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
lastConfirmedArray[0] = lastConfirmed;
latch.countDown();
}
}, latch);
latch.await();
Assert.assertEquals("Test3 - asyncReadLastConfirmed response", KeeperException.Code.OK.intValue(), rcArray[0]);
Assert.assertEquals("Test3 - ReadLAC", currentLAC, lastConfirmedArray[0]);
// similarly try calling asyncTryReadLastConfirmed and verify if it is equal to currentLAC
latch = new CountDownLatch(1);
rcArray[0] = 0;
lastConfirmedArray[0] = 0;
lhOpen.asyncTryReadLastConfirmed(new ReadLastConfirmedCallback() {
@Override
public void readLastConfirmedComplete(int rc, long lastConfirmed, Object ctx) {
CountDownLatch latch = (CountDownLatch) ctx;
rcArray[0] = rc;
lastConfirmedArray[0] = lastConfirmed;
latch.countDown();
}
}, latch);
latch.await();
Assert.assertEquals("Test4 - asyncTryReadLastConfirmed response", KeeperException.Code.OK.intValue(),
rcArray[0]);
Assert.assertEquals("Test4 - ReadLAC", currentLAC, lastConfirmedArray[0]);
// similarly try calling tryReadLastConfirmed and verify if it is equal to currentLAC
long tryReadLAC = lhOpen.tryReadLastConfirmed();
Assert.assertEquals("Test5 - ReadLAC", currentLAC, tryReadLAC);
}
@Override
public void addComplete(int rc, LedgerHandle lh, long entryId, Object ctx) {
SyncObj sync = (SyncObj) ctx;
sync.setReturnCode(rc);
synchronized (sync) {
sync.counter++;
sync.notify();
}
}
@Override
public void readComplete(int rc, LedgerHandle lh, Enumeration<LedgerEntry> seq, Object ctx) {
SyncObj sync = (SyncObj) ctx;
sync.setLedgerEntries(seq);
sync.setReturnCode(rc);
synchronized (sync) {
sync.value = true;
sync.notify();
}
}
@Override
public void readLastConfirmedComplete(int rc, long lastConfirmed, Object ctx) {
SyncObj sync = (SyncObj) ctx;
sync.setReturnCode(rc);
synchronized (sync) {
sync.lastConfirmed = lastConfirmed;
sync.notify();
}
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
rng = new Random(System.currentTimeMillis()); // Initialize the Random
// Number Generator
entries = new ArrayList<byte[]>(); // initialize the entries list
entriesSize = new ArrayList<Integer>();
}
/* Clean up a directory recursively */
protected boolean cleanUpDir(File dir) {
if (dir.isDirectory()) {
LOG.info("Cleaning up " + dir.getName());
String[] children = dir.list();
for (String string : children) {
boolean success = cleanUpDir(new File(dir, string));
if (!success) {
return false;
}
}
}
// The directory is now empty so delete it
return dir.delete();
}
/**
* Used for testing purposes, void.
*/
class EmptyWatcher implements Watcher {
@Override
public void process(WatchedEvent event) {
}
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.util.ElementsChooser;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.components.ExportableComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.VerticalFlowLayout;
import com.intellij.openapi.util.AsyncResult;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.FieldPanel;
import com.intellij.util.Consumer;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.*;
import java.util.List;
public class ChooseComponentsToExportDialog extends DialogWrapper {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.actions.ChooseComponentsToExportDialog");
private final ElementsChooser<ComponentElementProperties> myChooser;
private final FieldPanel myPathPanel;
@NonNls
public static final String DEFAULT_PATH = FileUtil.toSystemDependentName(PathManager.getConfigPath()+"/"+"settings.jar");
private final boolean myShowFilePath;
private final String myDescription;
public ChooseComponentsToExportDialog(List<ExportableComponent> components,
Map<File, Set<ExportableComponent>> fileToComponents,
boolean showFilePath, final String title, String description) {
super(false);
myDescription = description;
myShowFilePath = showFilePath;
Map<ExportableComponent, ComponentElementProperties> componentToContainingListElement = new LinkedHashMap<ExportableComponent, ComponentElementProperties>();
for (ExportableComponent component : components) {
if (!addToExistingListElement(component, componentToContainingListElement, fileToComponents)) {
ComponentElementProperties componentElementProperties = new ComponentElementProperties();
componentElementProperties.addComponent(component);
componentToContainingListElement.put(component, componentElementProperties);
}
}
final Set<ComponentElementProperties> componentElementProperties = new LinkedHashSet<ComponentElementProperties>(componentToContainingListElement.values());
myChooser = new ElementsChooser<ComponentElementProperties>(true);
myChooser.setColorUnmarkedElements(false);
for (final ComponentElementProperties componentElementProperty : componentElementProperties) {
myChooser.addElement(componentElementProperty, true, componentElementProperty);
}
myChooser.sort(new Comparator<ComponentElementProperties>() {
@Override
public int compare(ComponentElementProperties o1,
ComponentElementProperties o2) {
return o1.toString().compareTo(o2.toString());
}
});
final ActionListener browseAction = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
chooseSettingsFile(myPathPanel.getText(), getWindow(), IdeBundle.message("title.export.file.location"), IdeBundle.message("prompt.choose.export.settings.file.path"))
.doWhenDone(new Consumer<String>() {
@Override
public void consume(String path) {
myPathPanel.setText(FileUtil.toSystemDependentName(path));
}
});
}
};
myPathPanel = new FieldPanel(IdeBundle.message("editbox.export.settings.to"), null, browseAction, null);
String exportPath = PropertiesComponent.getInstance().getOrInit("export.settings.path", DEFAULT_PATH);
myPathPanel.setText(exportPath);
myPathPanel.setChangeListener(new Runnable() {
@Override
public void run() {
updateControls();
}
});
updateControls();
setTitle(title);
init();
}
private void updateControls() {
setOKActionEnabled(!StringUtil.isEmptyOrSpaces(myPathPanel.getText()));
}
@NotNull
@Override
protected Action[] createLeftSideActions() {
AbstractAction selectAll = new AbstractAction("Select &All") {
@Override
public void actionPerformed(ActionEvent e) {
myChooser.setAllElementsMarked(true);
}
};
AbstractAction selectNone = new AbstractAction("Select &None") {
@Override
public void actionPerformed(ActionEvent e) {
myChooser.setAllElementsMarked(false);
}
};
AbstractAction invert = new AbstractAction("&Invert") {
@Override
public void actionPerformed(ActionEvent e) {
myChooser.invertSelection();
}
};
return new Action[]{selectAll, selectNone, invert};
}
@Override
protected void doOKAction() {
PropertiesComponent.getInstance().setValue("export.settings.path", myPathPanel.getText());
super.doOKAction();
}
private static boolean addToExistingListElement(ExportableComponent component,
Map<ExportableComponent,ComponentElementProperties> componentToContainingListElement,
Map<File, Set<ExportableComponent>> fileToComponents) {
final File[] exportFiles = component.getExportFiles();
File file = null;
for (File exportFile : exportFiles) {
final Set<ExportableComponent> tiedComponents = fileToComponents.get(exportFile);
for (final ExportableComponent tiedComponent : tiedComponents) {
if (tiedComponent == component) continue;
final ComponentElementProperties elementProperties = componentToContainingListElement.get(tiedComponent);
if (elementProperties != null && !FileUtil.filesEqual(exportFile, file)) {
LOG.assertTrue(file == null, "Component " + component + " serialize itself into " + file + " and " + exportFile);
// found
elementProperties.addComponent(component);
componentToContainingListElement.put(component, elementProperties);
file = exportFile;
}
}
}
return file != null;
}
@NotNull
public static AsyncResult<String> chooseSettingsFile(String oldPath, Component parent, final String title, final String description) {
FileChooserDescriptor chooserDescriptor = FileChooserDescriptorFactory.createSingleLocalFileDescriptor();
chooserDescriptor.setDescription(description);
chooserDescriptor.setHideIgnored(false);
chooserDescriptor.setTitle(title);
VirtualFile initialDir;
if (oldPath != null) {
final File oldFile = new File(oldPath);
initialDir = LocalFileSystem.getInstance().findFileByIoFile(oldFile);
if (initialDir == null && oldFile.getParentFile() != null) {
initialDir = LocalFileSystem.getInstance().findFileByIoFile(oldFile.getParentFile());
}
}
else {
initialDir = null;
}
final AsyncResult<String> result = new AsyncResult<String>();
FileChooser.chooseFiles(chooserDescriptor, null, parent, initialDir, new FileChooser.FileChooserConsumer() {
@Override
public void consume(List<VirtualFile> files) {
VirtualFile file = files.get(0);
if (file.isDirectory()) {
result.setDone(file.getPath() + '/' + new File(DEFAULT_PATH).getName());
}
else {
result.setDone(file.getPath());
}
}
@Override
public void cancelled() {
result.setRejected();
}
});
return result;
}
@Override
public JComponent getPreferredFocusedComponent() {
return myPathPanel.getTextField();
}
@Override
protected JComponent createNorthPanel() {
return new JLabel(myDescription);
}
@Override
protected JComponent createCenterPanel() {
return myChooser;
}
@Override
protected JComponent createSouthPanel() {
final JComponent buttons = super.createSouthPanel();
if (!myShowFilePath) return buttons;
final JPanel panel = new JPanel(new VerticalFlowLayout());
panel.add(myPathPanel);
panel.add(buttons);
return panel;
}
Set<ExportableComponent> getExportableComponents() {
final List<ComponentElementProperties> markedElements = myChooser.getMarkedElements();
final Set<ExportableComponent> components = new HashSet<ExportableComponent>();
for (ComponentElementProperties elementProperties : markedElements) {
components.addAll(elementProperties.myComponents);
}
return components;
}
private static class ComponentElementProperties implements ElementsChooser.ElementProperties {
private final Set<ExportableComponent> myComponents = new HashSet<ExportableComponent>();
private boolean addComponent(ExportableComponent component) {
return myComponents.add(component);
}
@Override
@Nullable
public Icon getIcon() {
return null;
}
@Override
@Nullable
public Color getColor() {
return null;
}
public String toString() {
Set<String> names = new LinkedHashSet<String>();
for (final ExportableComponent component : myComponents) {
names.add(component.getPresentableName());
}
return StringUtil.join(names.toArray(new String[names.size()]), ", ");
}
}
File getExportFile() {
return new File(myPathPanel.getText());
}
@Override
protected String getDimensionServiceKey() {
return "#com.intellij.ide.actions.ChooseComponentsToExportDialog";
}
}
| |
/*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.sandbox.idp.service;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.ldap.core.AuthenticationErrorCallback;
import org.springframework.ldap.core.ContextMapper;
import org.springframework.ldap.core.DirContextAdapter;
import org.springframework.ldap.core.DistinguishedName;
import org.springframework.ldap.core.LdapTemplate;
import org.slc.sli.sandbox.idp.service.UserService.GroupContextMapper;
import org.slc.sli.sandbox.idp.service.UserService.PersonContextMapper;
import org.slc.sli.sandbox.idp.service.UserService.User;
/**
* Unit tests
*/
@RunWith(MockitoJUnitRunner.class)
public class UsersTest {
@Mock
LdapTemplate ldapTemplate;
@InjectMocks
UserService userService = new UserService("uid", "person", "memberuid", "posixGroup");
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void testAuthenticate() throws AuthenticationException {
DistinguishedName dn = new DistinguishedName("ou=SLIAdmin");
Mockito.when(
ldapTemplate.authenticate(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=testuser))"),
Mockito.eq("testuser1234"), Mockito.any(AuthenticationErrorCallback.class))).thenReturn(true);
User mockUser = new User();
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("userName", "Test User");
mockUser.attributes = attributes;
mockUser.userId = "testuser";
Mockito.when(
ldapTemplate.searchForObject(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=testuser))"),
Mockito.any(ContextMapper.class))).thenReturn(mockUser);
List<String> mockGroups = new ArrayList<String>();
mockGroups.add("TestGroup1");
mockGroups.add("TestGroup2");
Mockito.when(
ldapTemplate.search(Mockito.eq(dn), Mockito.eq("(&(objectclass=posixGroup)(memberuid=testuser))"),
Mockito.any(GroupContextMapper.class))).thenReturn(mockGroups);
UserService.User user = userService.authenticate("SLIAdmin", "testuser", "testuser1234");
assertEquals("testuser", user.getUserId());
assertEquals("Test User", user.getAttributes().get("userName"));
assertEquals("staff", user.getAttributes().get("userType"));
assertEquals(2, user.getRoles().size());
assertEquals("TestGroup1", user.getRoles().get(0));
assertEquals("TestGroup2", user.getRoles().get(1));
}
@Test
public void testStaffAuthenticate() throws AuthenticationException {
DistinguishedName dn = new DistinguishedName("ou=StaffMember");
List<String> mockGroups = new ArrayList<String>();
mockGroups.add("Educator");
Mockito.when(
ldapTemplate.search(Mockito.eq(dn), Mockito.eq("(&(objectclass=posixGroup)(memberuid=staffuser))"),
Mockito.any(GroupContextMapper.class))).thenReturn(mockGroups);
Mockito.when(
ldapTemplate.authenticate(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=staffuser))"),
Mockito.eq("staffuser1234"), Mockito.any(AuthenticationErrorCallback.class))).thenReturn(true);
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("userName", "Staff User");
attributes.put("userType", "staff");
User mockUser = new User("staffuser", mockGroups, attributes);
Mockito.when(
ldapTemplate.searchForObject(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=staffuser))"),
Mockito.any(ContextMapper.class))).thenReturn(mockUser);
UserService.User user = userService.authenticate("StaffMember", "staffuser", "staffuser1234");
assertEquals("staffuser", user.getUserId());
assertEquals("Staff User", user.getAttributes().get("userName"));
assertEquals("staff", user.getAttributes().get("userType"));
assertEquals(1, user.getRoles().size());
assertEquals("Educator", user.getRoles().get(0));
}
@Test
public void testStudentAuthenticate() throws AuthenticationException {
DistinguishedName dn = new DistinguishedName("ou=Students");
List<String> mockGroups = new ArrayList<String>();
mockGroups.add("Student");
Mockito.when(
ldapTemplate.search(Mockito.eq(dn), Mockito.eq("(&(objectclass=posixGroup)(memberuid=studentuser))"),
Mockito.any(GroupContextMapper.class))).thenReturn(mockGroups);
Mockito.when(
ldapTemplate.authenticate(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=studentuser))"),
Mockito.eq("studentuser1234"), Mockito.any(AuthenticationErrorCallback.class))).thenReturn(true);
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("userName", "Student User");
attributes.put("userType", "student");
attributes.put("employeeNumber", "1234567890");
User mockUser = new User("studentuser", mockGroups, attributes);
Mockito.when(
ldapTemplate.searchForObject(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=studentuser))"),
Mockito.any(ContextMapper.class))).thenReturn(mockUser);
UserService.User user = userService.authenticate("Students", "studentuser", "studentuser1234");
assertEquals("1234567890", user.getUserId());
assertEquals("Student User", user.getAttributes().get("userName"));
assertEquals("student", user.getAttributes().get("userType"));
assertEquals(1, user.getRoles().size());
assertEquals("Student", user.getRoles().get(0));
}
@Test
public void testSandboxAuthenticate() throws AuthenticationException {
DistinguishedName dn = new DistinguishedName("ou=SLIAdmin");
Mockito.when(
ldapTemplate.authenticate(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=testuser))"),
Mockito.eq("testuser1234"), Mockito.any(AuthenticationErrorCallback.class))).thenReturn(true);
User mockUser = new User();
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("userName", "Test User");
attributes.put("Tenant", "mytenant");
attributes.put("isAdmin", "true");
mockUser.attributes = attributes;
mockUser.userId = "testuser";
Mockito.when(
ldapTemplate.searchForObject(Mockito.eq(dn), Mockito.eq("(&(objectclass=person)(uid=testuser))"),
Mockito.any(ContextMapper.class))).thenReturn(mockUser);
List<String> mockGroups = new ArrayList<String>();
mockGroups.add("TestGroup1");
mockGroups.add("TestGroup2");
Mockito.when(
ldapTemplate.search(Mockito.eq(dn), Mockito.eq("(&(objectclass=posixGroup)(memberuid=testuser))"),
Mockito.any(GroupContextMapper.class))).thenReturn(mockGroups);
UserService.User user = userService.authenticate("SLIAdmin", "testuser", "testuser1234");
assertEquals("testuser", user.getUserId());
assertEquals("Test User", user.getAttributes().get("userName"));
assertEquals("mytenant", user.getAttributes().get("Tenant"));
assertEquals("admin", user.getAttributes().get("userType"));
assertEquals(2, user.getRoles().size());
assertEquals("TestGroup1", user.getRoles().get(0));
assertEquals("TestGroup2", user.getRoles().get(1));
}
@Test
public void testAttributeExtractionCommas() {
String desc = "tenant=myTenantId,edOrg=myEdorgId";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("cn")).thenReturn("Full Name");
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals("Full Name", user.getAttributes().get("userName"));
assertEquals("myTenantId", user.getAttributes().get("tenant"));
assertEquals("myEdorgId", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(6, user.getAttributes().size());
}
@Test
public void testAttributeExtractionNewlines() {
String desc = "tenant=myTenantId\nedOrg=myEdorgId\n";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals("myTenantId", user.getAttributes().get("tenant"));
assertEquals("myEdorgId", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(6, user.getAttributes().size());
}
@Test
public void testAttributeExtractionSpaces() {
String desc = "tenant=myTenantId edOrg=myEdorgId";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals("myTenantId", user.getAttributes().get("tenant"));
assertEquals("myEdorgId", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(6, user.getAttributes().size());
}
@Test
public void testAttributeExtractionCommasWithSpaces() {
String desc = "tenant=myTenantId,edOrg=My Edorg Id";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals("myTenantId", user.getAttributes().get("tenant"));
assertEquals("My Edorg Id", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(6, user.getAttributes().size());
}
@Test
public void testAttributeExtractionNewLinesWithSpaces() {
String desc = "tenant=myTenantId\nedOrg=My Edorg Id\n";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals("myTenantId", user.getAttributes().get("tenant"));
assertEquals("My Edorg Id", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(6, user.getAttributes().size());
}
@Test
public void testAttributeExtractionNewLinesWithBlanks() {
String desc = "tenant=\nedOrg=My Edorg Id\n";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals(null, user.getAttributes().get("tenant"));
assertEquals("My Edorg Id", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(5, user.getAttributes().size());
}
@Test
public void testAttributeExtractionNewLinesWithBlanks2() {
String desc = "tenant=\nedOrg=\n";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals(null, user.getAttributes().get("tenant"));
assertEquals(null, user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(4, user.getAttributes().size());
}
@Test
public void testAttributeExtractionCommasWithBlanks() {
String desc = "tenant=,edOrg=My Edorg Id,";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals(null, user.getAttributes().get("tenant"));
assertEquals("My Edorg Id", user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(5, user.getAttributes().size());
}
@Test
public void testAttributeExtractionCommasWithBlanks2() {
String desc = "tenant=,edOrg=,";
PersonContextMapper mapper = new PersonContextMapper();
DirContextAdapter context = Mockito.mock(DirContextAdapter.class);
Mockito.when(context.getStringAttribute("description")).thenReturn(desc);
User user = (User) mapper.mapFromContext(context);
assertEquals(null, user.getAttributes().get("tenant"));
assertEquals(null, user.getAttributes().get("edOrg"));
assertEquals(null, user.getAttributes().get("vendor"));
assertEquals(null, user.getAttributes().get("givenName"));
assertEquals(null, user.getAttributes().get("sn"));
assertEquals(4, user.getAttributes().size());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.rcfile;
import com.facebook.presto.block.BlockEncodingManager;
import com.facebook.presto.hadoop.HadoopNative;
import com.facebook.presto.metadata.FunctionRegistry;
import com.facebook.presto.rcfile.binary.BinaryRcFileEncoding;
import com.facebook.presto.rcfile.text.TextRcFileEncoding;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.type.ArrayType;
import com.facebook.presto.spi.type.DecimalType;
import com.facebook.presto.spi.type.Decimals;
import com.facebook.presto.spi.type.MapType;
import com.facebook.presto.spi.type.RowType;
import com.facebook.presto.spi.type.SqlDate;
import com.facebook.presto.spi.type.SqlDecimal;
import com.facebook.presto.spi.type.SqlTimestamp;
import com.facebook.presto.spi.type.SqlVarbinary;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.facebook.presto.spi.type.TypeSignatureParameter;
import com.facebook.presto.spi.type.VarcharType;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.type.TypeRegistry;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.hadoop.compression.lzo.LzoCodec;
import io.airlift.slice.OutputStreamSliceOutput;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.airlift.units.DataSize;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.Serializer;
import org.apache.hadoop.hive.serde2.StructObject;
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyArray;
import org.apache.hadoop.hive.serde2.lazy.LazyMap;
import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.ByteWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.Lz4Codec;
import org.apache.hadoop.io.compress.SnappyCodec;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.joda.time.DateTimeZone;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.math.BigInteger;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import static com.facebook.presto.rcfile.RcFileDecoderUtils.findFirstSyncPosition;
import static com.facebook.presto.rcfile.RcFileTester.Compression.BZIP2;
import static com.facebook.presto.rcfile.RcFileTester.Compression.LZ4;
import static com.facebook.presto.rcfile.RcFileTester.Compression.NONE;
import static com.facebook.presto.rcfile.RcFileTester.Compression.SNAPPY;
import static com.facebook.presto.rcfile.RcFileTester.Compression.ZLIB;
import static com.facebook.presto.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION;
import static com.facebook.presto.rcfile.RcFileWriter.PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.Decimals.rescale;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.IntegerType.INTEGER;
import static com.facebook.presto.spi.type.RealType.REAL;
import static com.facebook.presto.spi.type.SmallintType.SMALLINT;
import static com.facebook.presto.spi.type.StandardTypes.ARRAY;
import static com.facebook.presto.spi.type.StandardTypes.MAP;
import static com.facebook.presto.spi.type.StandardTypes.ROW;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.TinyintType.TINYINT;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.testing.TestingConnectorSession.SESSION;
import static com.google.common.base.Functions.constant;
import static com.google.common.collect.Iterables.transform;
import static com.google.common.collect.Iterators.advance;
import static com.google.common.io.Files.createTempDir;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.airlift.slice.SizeOf.SIZE_OF_INT;
import static io.airlift.slice.SizeOf.SIZE_OF_LONG;
import static io.airlift.units.DataSize.Unit.BYTE;
import static io.airlift.units.DataSize.Unit.KILOBYTE;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.lang.Math.toIntExact;
import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR;
import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
import static org.apache.hadoop.mapred.Reporter.NULL;
import static org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.COMPRESS_CODEC;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
@SuppressWarnings("StaticPseudoFunctionalStyleMethod")
public class RcFileTester
{
private static final TypeManager TYPE_MANAGER = new TypeRegistry();
static {
// associate TYPE_MANAGER with a function registry
new FunctionRegistry(TYPE_MANAGER, new BlockEncodingManager(TYPE_MANAGER), new FeaturesConfig());
HadoopNative.requireHadoopNative();
}
public static final DateTimeZone HIVE_STORAGE_TIME_ZONE = DateTimeZone.forID("America/Bahia_Banderas");
public enum Format
{
BINARY {
@Override
@SuppressWarnings("deprecation")
public Serializer createSerializer()
{
return new LazyBinaryColumnarSerDe();
}
@Override
public RcFileEncoding getVectorEncoding()
{
return new BinaryRcFileEncoding();
}
},
TEXT {
@Override
@SuppressWarnings("deprecation")
public Serializer createSerializer()
{
try {
ColumnarSerDe columnarSerDe = new ColumnarSerDe();
Properties tableProperties = new Properties();
tableProperties.setProperty("columns", "test");
tableProperties.setProperty("columns.types", "string");
columnarSerDe.initialize(new JobConf(false), tableProperties);
return columnarSerDe;
}
catch (SerDeException e) {
throw new RuntimeException(e);
}
}
@Override
public RcFileEncoding getVectorEncoding()
{
return new TextRcFileEncoding(HIVE_STORAGE_TIME_ZONE);
}
};
@SuppressWarnings("deprecation")
public abstract Serializer createSerializer();
public abstract RcFileEncoding getVectorEncoding();
}
public enum Compression
{
BZIP2 {
@Override
Optional<String> getCodecName()
{
return Optional.of(BZip2Codec.class.getName());
}
},
ZLIB {
@Override
Optional<String> getCodecName()
{
return Optional.of(GzipCodec.class.getName());
}
},
SNAPPY {
@Override
Optional<String> getCodecName()
{
return Optional.of(SnappyCodec.class.getName());
}
},
LZO {
@Override
Optional<String> getCodecName()
{
return Optional.of(LzoCodec.class.getName());
}
},
LZ4 {
@Override
Optional<String> getCodecName()
{
return Optional.of(Lz4Codec.class.getName());
}
},
NONE {
@Override
Optional<String> getCodecName()
{
return Optional.empty();
}
};
abstract Optional<String> getCodecName();
}
private boolean structTestsEnabled;
private boolean mapTestsEnabled;
private boolean listTestsEnabled;
private boolean complexStructuralTestsEnabled;
private boolean readLastBatchOnlyEnabled;
private Set<Format> formats = ImmutableSet.of();
private Set<Compression> compressions = ImmutableSet.of();
public static RcFileTester quickTestRcFileReader()
{
RcFileTester rcFileTester = new RcFileTester();
rcFileTester.structTestsEnabled = true;
rcFileTester.mapTestsEnabled = true;
rcFileTester.listTestsEnabled = true;
rcFileTester.complexStructuralTestsEnabled = false;
rcFileTester.readLastBatchOnlyEnabled = false;
rcFileTester.formats = ImmutableSet.copyOf(Format.values());
rcFileTester.compressions = ImmutableSet.of(SNAPPY);
return rcFileTester;
}
public static RcFileTester fullTestRcFileReader()
{
RcFileTester rcFileTester = new RcFileTester();
rcFileTester.structTestsEnabled = true;
rcFileTester.mapTestsEnabled = true;
rcFileTester.listTestsEnabled = true;
rcFileTester.complexStructuralTestsEnabled = true;
rcFileTester.readLastBatchOnlyEnabled = true;
rcFileTester.formats = ImmutableSet.copyOf(Format.values());
// These compression algorithms were chosen to cover the three different
// cases: uncompressed, aircompressor, and hadoop compression
// We assume that the compression algorithms generally work
rcFileTester.compressions = ImmutableSet.of(NONE, LZ4, ZLIB, BZIP2);
return rcFileTester;
}
public void testRoundTrip(Type type, Iterable<?> writeValues, Format... skipFormats)
throws Exception
{
ImmutableSet<Format> skipFormatsSet = ImmutableSet.copyOf(skipFormats);
// just the values
testRoundTripType(type, writeValues, skipFormatsSet);
// all nulls
assertRoundTrip(type, transform(writeValues, constant(null)), skipFormatsSet);
// values wrapped in struct
if (structTestsEnabled) {
testStructRoundTrip(type, writeValues, skipFormatsSet);
}
// values wrapped in a struct wrapped in a struct
if (complexStructuralTestsEnabled) {
Iterable<Object> simpleStructs = transform(insertNullEvery(5, writeValues), RcFileTester::toHiveStruct);
testRoundTripType(
RowType.from(ImmutableList.of(RowType.field("field", createRowType(type)))),
transform(simpleStructs, Collections::singletonList),
skipFormatsSet);
}
// values wrapped in map
if (mapTestsEnabled) {
testMapRoundTrip(type, writeValues, skipFormatsSet);
}
// values wrapped in list
if (listTestsEnabled) {
testListRoundTrip(type, writeValues, skipFormatsSet);
}
// values wrapped in a list wrapped in a list
if (complexStructuralTestsEnabled) {
testListRoundTrip(
createListType(type),
transform(writeValues, RcFileTester::toHiveList),
skipFormatsSet);
}
}
private void testStructRoundTrip(Type type, Iterable<?> writeValues, Set<Format> skipFormats)
throws Exception
{
// values in simple struct and mix in some null values
testRoundTripType(
createRowType(type),
transform(insertNullEvery(5, writeValues), RcFileTester::toHiveStruct),
skipFormats);
}
private void testMapRoundTrip(Type type, Iterable<?> writeValues, Set<Format> skipFormats)
throws Exception
{
// json does not support null keys, so we just write the first value
Object nullKeyWrite = Iterables.getFirst(writeValues, null);
// values in simple map and mix in some null values
testRoundTripType(
createMapType(type),
transform(insertNullEvery(5, writeValues), value -> toHiveMap(nullKeyWrite, value)),
skipFormats);
}
private void testListRoundTrip(Type type, Iterable<?> writeValues, Set<Format> skipFormats)
throws Exception
{
// values in simple list and mix in some null values
testRoundTripType(
createListType(type),
transform(insertNullEvery(5, writeValues), RcFileTester::toHiveList),
skipFormats);
}
private void testRoundTripType(Type type, Iterable<?> writeValues, Set<Format> skipFormats)
throws Exception
{
// mix in some nulls
assertRoundTrip(type, insertNullEvery(5, writeValues), skipFormats);
}
private void assertRoundTrip(Type type, Iterable<?> writeValues, Set<Format> skipFormats)
throws Exception
{
List<?> finalValues = Lists.newArrayList(writeValues);
Set<Format> formats = new LinkedHashSet<>(this.formats);
formats.removeAll(skipFormats);
for (Format format : formats) {
for (Compression compression : compressions) {
// write old, read new
try (TempFile tempFile = new TempFile()) {
writeRcFileColumnOld(tempFile.getFile(), format, compression, type, finalValues.iterator());
assertFileContentsNew(type, tempFile, format, finalValues, false, ImmutableMap.of());
}
// write new, read old and new
try (TempFile tempFile = new TempFile()) {
Map<String, String> metadata = ImmutableMap.of(String.valueOf(ThreadLocalRandom.current().nextLong()), String.valueOf(ThreadLocalRandom.current().nextLong()));
writeRcFileColumnNew(tempFile.getFile(), format, compression, type, finalValues.iterator(), metadata);
assertFileContentsOld(type, tempFile, format, finalValues);
Map<String, String> expectedMetadata = ImmutableMap.<String, String>builder()
.putAll(metadata)
.put(PRESTO_RCFILE_WRITER_VERSION_METADATA_KEY, PRESTO_RCFILE_WRITER_VERSION)
.build();
assertFileContentsNew(type, tempFile, format, finalValues, false, expectedMetadata);
if (readLastBatchOnlyEnabled) {
assertFileContentsNew(type, tempFile, format, finalValues, true, expectedMetadata);
}
}
}
}
}
private static void assertFileContentsNew(
Type type,
TempFile tempFile,
Format format,
List<?> expectedValues,
boolean readLastBatchOnly,
Map<String, String> metadata)
throws IOException
{
try (RcFileReader recordReader = createRcFileReader(tempFile, type, format.getVectorEncoding())) {
assertIndexOf(recordReader, tempFile.getFile());
assertEquals(recordReader.getMetadata(), ImmutableMap.builder()
.putAll(metadata)
.put("hive.io.rcfile.column.number", "1")
.build());
Iterator<?> iterator = expectedValues.iterator();
int totalCount = 0;
for (int batchSize = recordReader.advance(); batchSize >= 0; batchSize = toIntExact(recordReader.advance())) {
totalCount += batchSize;
if (readLastBatchOnly && totalCount == expectedValues.size()) {
assertEquals(advance(iterator, batchSize), batchSize);
}
else {
Block block = recordReader.readBlock(0);
List<Object> data = new ArrayList<>(block.getPositionCount());
for (int position = 0; position < block.getPositionCount(); position++) {
data.add(type.getObjectValue(SESSION, block, position));
}
for (int i = 0; i < batchSize; i++) {
assertTrue(iterator.hasNext());
Object expected = iterator.next();
Object actual = data.get(i);
assertColumnValueEquals(type, actual, expected);
}
}
}
assertFalse(iterator.hasNext());
assertEquals(recordReader.getRowsRead(), totalCount);
}
}
private static void assertColumnValueEquals(Type type, Object actual, Object expected)
{
if (actual == null) {
assertNull(expected);
return;
}
String baseType = type.getTypeSignature().getBase();
if (ARRAY.equals(baseType)) {
List<?> actualArray = (List<?>) actual;
List<?> expectedArray = (List<?>) expected;
assertEquals(actualArray.size(), expectedArray.size());
Type elementType = type.getTypeParameters().get(0);
for (int i = 0; i < actualArray.size(); i++) {
Object actualElement = actualArray.get(i);
Object expectedElement = expectedArray.get(i);
assertColumnValueEquals(elementType, actualElement, expectedElement);
}
}
else if (MAP.equals(baseType)) {
Map<?, ?> actualMap = (Map<?, ?>) actual;
Map<?, ?> expectedMap = (Map<?, ?>) expected;
assertEquals(actualMap.size(), expectedMap.size());
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
List<Entry<?, ?>> expectedEntries = new ArrayList<>(expectedMap.entrySet());
for (Entry<?, ?> actualEntry : actualMap.entrySet()) {
for (Iterator<Entry<?, ?>> iterator = expectedEntries.iterator(); iterator.hasNext(); ) {
Entry<?, ?> expectedEntry = iterator.next();
try {
assertColumnValueEquals(keyType, actualEntry.getKey(), expectedEntry.getKey());
assertColumnValueEquals(valueType, actualEntry.getValue(), expectedEntry.getValue());
iterator.remove();
}
catch (AssertionError ignored) {
}
}
}
assertTrue(expectedEntries.isEmpty(), "Unmatched entries " + expectedEntries);
}
else if (ROW.equals(baseType)) {
List<Type> fieldTypes = type.getTypeParameters();
List<?> actualRow = (List<?>) actual;
List<?> expectedRow = (List<?>) expected;
assertEquals(actualRow.size(), fieldTypes.size());
assertEquals(actualRow.size(), expectedRow.size());
for (int fieldId = 0; fieldId < actualRow.size(); fieldId++) {
Type fieldType = fieldTypes.get(fieldId);
Object actualElement = actualRow.get(fieldId);
Object expectedElement = expectedRow.get(fieldId);
assertColumnValueEquals(fieldType, actualElement, expectedElement);
}
}
else if (type.equals(DOUBLE)) {
Double actualDouble = (Double) actual;
Double expectedDouble = (Double) expected;
if (actualDouble.isNaN()) {
assertTrue(expectedDouble.isNaN(), "expected double to be NaN");
}
else {
assertEquals(actualDouble, expectedDouble, 0.001);
}
}
else if (!Objects.equals(actual, expected)) {
assertEquals(actual, expected);
}
}
private static void assertIndexOf(RcFileReader recordReader, File file)
throws IOException
{
List<Long> syncPositionsBruteForce = getSyncPositionsBruteForce(recordReader, file);
List<Long> syncPositionsSimple = getSyncPositionsSimple(recordReader, file);
assertEquals(syncPositionsBruteForce, syncPositionsSimple);
}
private static List<Long> getSyncPositionsBruteForce(RcFileReader recordReader, File file)
{
Slice slice = Slices.allocate((int) file.length());
try (InputStream in = new FileInputStream(file)) {
slice.setBytes(0, in, slice.length());
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
List<Long> syncPositionsBruteForce = new ArrayList<>();
Slice sync = Slices.allocate(SIZE_OF_INT + SIZE_OF_LONG + SIZE_OF_LONG);
sync.setInt(0, -1);
sync.setBytes(SIZE_OF_INT, recordReader.getSync());
long syncPosition = 0;
while (syncPosition >= 0) {
syncPosition = slice.indexOf(sync, (int) syncPosition);
if (syncPosition > 0) {
syncPositionsBruteForce.add(syncPosition);
syncPosition++;
}
}
return syncPositionsBruteForce;
}
private static List<Long> getSyncPositionsSimple(RcFileReader recordReader, File file)
throws IOException
{
List<Long> syncPositions = new ArrayList<>();
Slice sync = recordReader.getSync();
long syncFirst = sync.getLong(0);
long syncSecond = sync.getLong(8);
long syncPosition = 0;
try (RcFileDataSource dataSource = new FileRcFileDataSource(file)) {
while (syncPosition >= 0) {
syncPosition = findFirstSyncPosition(dataSource, syncPosition, file.length() - syncPosition, syncFirst, syncSecond);
if (syncPosition > 0) {
assertEquals(findFirstSyncPosition(dataSource, syncPosition, 1, syncFirst, syncSecond), syncPosition);
assertEquals(findFirstSyncPosition(dataSource, syncPosition, 2, syncFirst, syncSecond), syncPosition);
assertEquals(findFirstSyncPosition(dataSource, syncPosition, 10, syncFirst, syncSecond), syncPosition);
assertEquals(findFirstSyncPosition(dataSource, syncPosition - 1, 1, syncFirst, syncSecond), -1);
assertEquals(findFirstSyncPosition(dataSource, syncPosition - 2, 2, syncFirst, syncSecond), -1);
assertEquals(findFirstSyncPosition(dataSource, syncPosition + 1, 1, syncFirst, syncSecond), -1);
syncPositions.add(syncPosition);
syncPosition++;
}
}
}
return syncPositions;
}
private static RcFileReader createRcFileReader(TempFile tempFile, Type type, RcFileEncoding encoding)
throws IOException
{
RcFileDataSource rcFileDataSource = new FileRcFileDataSource(tempFile.getFile());
RcFileReader rcFileReader = new RcFileReader(
rcFileDataSource,
encoding,
ImmutableMap.of(0, type),
new AircompressorCodecFactory(new HadoopCodecFactory(RcFileTester.class.getClassLoader())),
0,
tempFile.getFile().length(),
new DataSize(8, MEGABYTE));
assertEquals(rcFileReader.getColumnCount(), 1);
return rcFileReader;
}
private static DataSize writeRcFileColumnNew(File outputFile, Format format, Compression compression, Type type, Iterator<?> values, Map<String, String> metadata)
throws Exception
{
OutputStreamSliceOutput output = new OutputStreamSliceOutput(new FileOutputStream(outputFile));
AircompressorCodecFactory codecFactory = new AircompressorCodecFactory(new HadoopCodecFactory(RcFileTester.class.getClassLoader()));
RcFileWriter writer = new RcFileWriter(
output,
ImmutableList.of(type),
format.getVectorEncoding(),
compression.getCodecName(),
codecFactory,
metadata,
new DataSize(100, KILOBYTE), // use a smaller size to create more row groups
new DataSize(200, KILOBYTE),
true);
BlockBuilder blockBuilder = type.createBlockBuilder(null, 1024);
while (values.hasNext()) {
Object value = values.next();
writeValue(type, blockBuilder, value);
}
writer.write(new Page(blockBuilder.build()));
writer.close();
writer.validate(new FileRcFileDataSource(outputFile));
return new DataSize(output.size(), BYTE);
}
private static void writeValue(Type type, BlockBuilder blockBuilder, Object value)
{
if (value == null) {
blockBuilder.appendNull();
}
else {
if (BOOLEAN.equals(type)) {
type.writeBoolean(blockBuilder, (Boolean) value);
}
else if (TINYINT.equals(type)) {
type.writeLong(blockBuilder, ((Number) value).longValue());
}
else if (SMALLINT.equals(type)) {
type.writeLong(blockBuilder, ((Number) value).longValue());
}
else if (INTEGER.equals(type)) {
type.writeLong(blockBuilder, ((Number) value).longValue());
}
else if (BIGINT.equals(type)) {
type.writeLong(blockBuilder, ((Number) value).longValue());
}
else if (Decimals.isShortDecimal(type)) {
type.writeLong(blockBuilder, ((SqlDecimal) value).toBigDecimal().unscaledValue().longValue());
}
else if (Decimals.isLongDecimal(type)) {
type.writeSlice(blockBuilder, Decimals.encodeUnscaledValue(((SqlDecimal) value).toBigDecimal().unscaledValue()));
}
else if (REAL.equals(type)) {
type.writeLong(blockBuilder, Float.floatToIntBits((Float) value));
}
else if (DOUBLE.equals(type)) {
type.writeDouble(blockBuilder, ((Number) value).doubleValue());
}
else if (VARCHAR.equals(type)) {
type.writeSlice(blockBuilder, Slices.utf8Slice((String) value));
}
else if (VARBINARY.equals(type)) {
type.writeSlice(blockBuilder, Slices.wrappedBuffer(((SqlVarbinary) value).getBytes()));
}
else if (DATE.equals(type)) {
long days = ((SqlDate) value).getDays();
type.writeLong(blockBuilder, days);
}
else if (TIMESTAMP.equals(type)) {
long millis = ((SqlTimestamp) value).getMillisUtc();
type.writeLong(blockBuilder, millis);
}
else {
String baseType = type.getTypeSignature().getBase();
if (ARRAY.equals(baseType)) {
List<?> array = (List<?>) value;
Type elementType = type.getTypeParameters().get(0);
BlockBuilder arrayBlockBuilder = blockBuilder.beginBlockEntry();
for (Object elementValue : array) {
writeValue(elementType, arrayBlockBuilder, elementValue);
}
blockBuilder.closeEntry();
}
else if (MAP.equals(baseType)) {
Map<?, ?> map = (Map<?, ?>) value;
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
BlockBuilder mapBlockBuilder = blockBuilder.beginBlockEntry();
for (Entry<?, ?> entry : map.entrySet()) {
writeValue(keyType, mapBlockBuilder, entry.getKey());
writeValue(valueType, mapBlockBuilder, entry.getValue());
}
blockBuilder.closeEntry();
}
else if (ROW.equals(baseType)) {
List<?> array = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
BlockBuilder rowBlockBuilder = blockBuilder.beginBlockEntry();
for (int fieldId = 0; fieldId < fieldTypes.size(); fieldId++) {
Type fieldType = fieldTypes.get(fieldId);
writeValue(fieldType, rowBlockBuilder, array.get(fieldId));
}
blockBuilder.closeEntry();
}
else {
throw new IllegalArgumentException("Unsupported type " + type);
}
}
}
}
private static <K extends LongWritable, V extends BytesRefArrayWritable> void assertFileContentsOld(
Type type,
TempFile tempFile,
Format format,
Iterable<?> expectedValues)
throws Exception
{
JobConf configuration = new JobConf(new Configuration(false));
configuration.set(READ_COLUMN_IDS_CONF_STR, "0");
configuration.setBoolean(READ_ALL_COLUMNS, false);
Properties schema = new Properties();
schema.setProperty(META_TABLE_COLUMNS, "test");
schema.setProperty(META_TABLE_COLUMN_TYPES, getJavaObjectInspector(type).getTypeName());
@SuppressWarnings("deprecation")
Deserializer deserializer;
if (format == Format.BINARY) {
deserializer = new LazyBinaryColumnarSerDe();
}
else {
deserializer = new ColumnarSerDe();
}
deserializer.initialize(configuration, schema);
configuration.set(SERIALIZATION_LIB, deserializer.getClass().getName());
InputFormat<K, V> inputFormat = new RCFileInputFormat<>();
RecordReader<K, V> recordReader = inputFormat.getRecordReader(
new FileSplit(new Path(tempFile.getFile().getAbsolutePath()), 0, tempFile.getFile().length(), (String[]) null),
configuration,
NULL);
K key = recordReader.createKey();
V value = recordReader.createValue();
StructObjectInspector rowInspector = (StructObjectInspector) deserializer.getObjectInspector();
StructField field = rowInspector.getStructFieldRef("test");
Iterator<?> iterator = expectedValues.iterator();
while (recordReader.next(key, value)) {
Object expectedValue = iterator.next();
Object rowData = deserializer.deserialize(value);
Object actualValue = rowInspector.getStructFieldData(rowData, field);
actualValue = decodeRecordReaderValue(type, actualValue);
assertColumnValueEquals(type, actualValue, expectedValue);
}
assertFalse(iterator.hasNext());
}
private static Object decodeRecordReaderValue(Type type, Object actualValue)
{
if (actualValue instanceof LazyPrimitive) {
actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject();
}
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
}
else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
}
else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
}
else if (actualValue instanceof DateWritable) {
actualValue = new SqlDate(((DateWritable) actualValue).getDays());
}
else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
}
else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
}
else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
}
else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
}
else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
}
else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Presto type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
}
else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
}
else if (actualValue instanceof TimestampWritable) {
TimestampWritable timestamp = (TimestampWritable) actualValue;
if (SESSION.isLegacyTimestamp()) {
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), UTC_KEY);
}
else {
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L));
}
}
else if (actualValue instanceof StructObject) {
StructObject structObject = (StructObject) actualValue;
actualValue = decodeRecordReaderStruct(type, structObject.getFieldsAsList());
}
else if (actualValue instanceof LazyBinaryArray) {
actualValue = decodeRecordReaderList(type, ((LazyBinaryArray) actualValue).getList());
}
else if (actualValue instanceof LazyBinaryMap) {
actualValue = decodeRecordReaderMap(type, ((LazyBinaryMap) actualValue).getMap());
}
else if (actualValue instanceof LazyArray) {
actualValue = decodeRecordReaderList(type, ((LazyArray) actualValue).getList());
}
else if (actualValue instanceof LazyMap) {
actualValue = decodeRecordReaderMap(type, ((LazyMap) actualValue).getMap());
}
else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
}
return actualValue;
}
private static List<Object> decodeRecordReaderList(Type type, List<?> list)
{
Type elementType = type.getTypeParameters().get(0);
return list.stream()
.map(element -> decodeRecordReaderValue(elementType, element))
.collect(toList());
}
private static Object decodeRecordReaderMap(Type type, Map<?, ?> map)
{
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : map.entrySet()) {
newMap.put(decodeRecordReaderValue(keyType, entry.getKey()), decodeRecordReaderValue(valueType, entry.getValue()));
}
return newMap;
}
private static List<Object> decodeRecordReaderStruct(Type type, List<?> fields)
{
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newFields = new ArrayList<>(fields.size());
for (int i = 0; i < fields.size(); i++) {
Type fieldType = fieldTypes.get(i);
Object field = fields.get(i);
newFields.add(decodeRecordReaderValue(fieldType, field));
}
return newFields;
}
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values)
throws Exception
{
ObjectInspector columnObjectInspector = getJavaObjectInspector(type);
RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector);
SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector);
Object row = objectInspector.create();
List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
@SuppressWarnings("deprecation") Serializer serializer = format.createSerializer();
Properties tableProperties = new Properties();
tableProperties.setProperty("columns", "test");
tableProperties.setProperty("columns.types", objectInspector.getTypeName());
serializer.initialize(new JobConf(false), tableProperties);
while (values.hasNext()) {
Object value = values.next();
value = preprocessWriteValueOld(type, value);
objectInspector.setStructFieldData(row, fields.get(0), value);
Writable record = serializer.serialize(row, objectInspector);
recordWriter.write(record);
}
recordWriter.close(false);
return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize();
}
private static ObjectInspector getJavaObjectInspector(Type type)
{
if (type.equals(BOOLEAN)) {
return javaBooleanObjectInspector;
}
else if (type.equals(BIGINT)) {
return javaLongObjectInspector;
}
else if (type.equals(INTEGER)) {
return javaIntObjectInspector;
}
else if (type.equals(SMALLINT)) {
return javaShortObjectInspector;
}
else if (type.equals(TINYINT)) {
return javaByteObjectInspector;
}
else if (type.equals(REAL)) {
return javaFloatObjectInspector;
}
else if (type.equals(DOUBLE)) {
return javaDoubleObjectInspector;
}
else if (type instanceof VarcharType) {
return javaStringObjectInspector;
}
else if (type.equals(VARBINARY)) {
return javaByteArrayObjectInspector;
}
else if (type.equals(DATE)) {
return javaDateObjectInspector;
}
else if (type.equals(TIMESTAMP)) {
return javaTimestampObjectInspector;
}
else if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
return getPrimitiveJavaObjectInspector(new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()));
}
else if (type.getTypeSignature().getBase().equals(ARRAY)) {
return ObjectInspectorFactory.getStandardListObjectInspector(getJavaObjectInspector(type.getTypeParameters().get(0)));
}
else if (type.getTypeSignature().getBase().equals(MAP)) {
ObjectInspector keyObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(0));
ObjectInspector valueObjectInspector = getJavaObjectInspector(type.getTypeParameters().get(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyObjectInspector, valueObjectInspector);
}
else if (type.getTypeSignature().getBase().equals(ROW)) {
return getStandardStructObjectInspector(
type.getTypeSignature().getParameters().stream()
.map(parameter -> parameter.getNamedTypeSignature().getName().get())
.collect(toList()),
type.getTypeParameters().stream()
.map(RcFileTester::getJavaObjectInspector)
.collect(toList()));
}
throw new IllegalArgumentException("unsupported type: " + type);
}
private static Object preprocessWriteValueOld(Type type, Object value)
{
if (value == null) {
return null;
}
if (type.equals(BOOLEAN)) {
return value;
}
else if (type.equals(TINYINT)) {
return ((Number) value).byteValue();
}
else if (type.equals(SMALLINT)) {
return ((Number) value).shortValue();
}
else if (type.equals(INTEGER)) {
return ((Number) value).intValue();
}
else if (type.equals(BIGINT)) {
return ((Number) value).longValue();
}
else if (type.equals(REAL)) {
return ((Number) value).floatValue();
}
else if (type.equals(DOUBLE)) {
return ((Number) value).doubleValue();
}
else if (type instanceof VarcharType) {
return value;
}
else if (type.equals(VARBINARY)) {
return ((SqlVarbinary) value).getBytes();
}
else if (type.equals(DATE)) {
int days = ((SqlDate) value).getDays();
LocalDate localDate = LocalDate.ofEpochDay(days);
ZonedDateTime zonedDateTime = localDate.atStartOfDay(ZoneId.systemDefault());
long millis = zonedDateTime.toEpochSecond() * 1000;
Date date = new Date(0);
// mills must be set separately to avoid masking
date.setTime(millis);
return date;
}
else if (type.equals(TIMESTAMP)) {
long millisUtc = (int) ((SqlTimestamp) value).getMillisUtc();
return new Timestamp(millisUtc);
}
else if (type instanceof DecimalType) {
return HiveDecimal.create(((SqlDecimal) value).toBigDecimal());
}
else if (type.getTypeSignature().getBase().equals(ARRAY)) {
Type elementType = type.getTypeParameters().get(0);
return ((List<?>) value).stream()
.map(element -> preprocessWriteValueOld(elementType, element))
.collect(toList());
}
else if (type.getTypeSignature().getBase().equals(MAP)) {
Type keyType = type.getTypeParameters().get(0);
Type valueType = type.getTypeParameters().get(1);
Map<Object, Object> newMap = new HashMap<>();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
newMap.put(preprocessWriteValueOld(keyType, entry.getKey()), preprocessWriteValueOld(valueType, entry.getValue()));
}
return newMap;
}
else if (type.getTypeSignature().getBase().equals(ROW)) {
List<?> fieldValues = (List<?>) value;
List<Type> fieldTypes = type.getTypeParameters();
List<Object> newStruct = new ArrayList<>();
for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
newStruct.add(preprocessWriteValueOld(fieldTypes.get(fieldId), fieldValues.get(fieldId)));
}
return newStruct;
}
throw new IllegalArgumentException("unsupported type: " + type);
}
private static RecordWriter createRcFileWriterOld(File outputFile, Compression compression, ObjectInspector columnObjectInspector)
throws IOException
{
JobConf jobConf = new JobConf(false);
Optional<String> codecName = compression.getCodecName();
codecName.ifPresent(s -> jobConf.set(COMPRESS_CODEC, s));
return new RCFileOutputFormat().getHiveRecordWriter(
jobConf,
new Path(outputFile.toURI()),
Text.class,
codecName.isPresent(),
createTableProperties("test", columnObjectInspector.getTypeName()),
() -> {});
}
private static SettableStructObjectInspector createSettableStructObjectInspector(String name, ObjectInspector objectInspector)
{
return getStandardStructObjectInspector(ImmutableList.of(name), ImmutableList.of(objectInspector));
}
@SuppressWarnings("SpellCheckingInspection")
private static Properties createTableProperties(String name, String type)
{
Properties orderTableProperties = new Properties();
orderTableProperties.setProperty("columns", name);
orderTableProperties.setProperty("columns.types", type);
orderTableProperties.setProperty("file.inputformat", RCFileInputFormat.class.getName());
return orderTableProperties;
}
private static class TempFile
implements Closeable
{
private final File tempDir;
private final File file;
private TempFile()
{
tempDir = createTempDir();
tempDir.mkdirs();
file = new File(tempDir, "data.rcfile");
}
public File getFile()
{
return file;
}
@Override
public void close()
throws IOException
{
// hadoop creates crc files that must be deleted also, so just delete the whole directory
deleteRecursively(tempDir.toPath(), ALLOW_INSECURE);
}
}
private static <T> Iterable<T> insertNullEvery(int n, Iterable<T> iterable)
{
return () -> new AbstractIterator<T>()
{
private final Iterator<T> delegate = iterable.iterator();
private int position;
@Override
protected T computeNext()
{
position++;
if (position > n) {
position = 0;
return null;
}
if (!delegate.hasNext()) {
return endOfData();
}
return delegate.next();
}
};
}
private static RowType createRowType(Type type)
{
return RowType.from(ImmutableList.of(
RowType.field("a", type),
RowType.field("b", type),
RowType.field("c", type)));
}
private static Object toHiveStruct(Object input)
{
List<Object> data = new ArrayList<>();
data.add(input);
data.add(input);
data.add(input);
return data;
}
private static MapType createMapType(Type type)
{
return (MapType) TYPE_MANAGER.getParameterizedType(StandardTypes.MAP, ImmutableList.of(
TypeSignatureParameter.of(type.getTypeSignature()),
TypeSignatureParameter.of(type.getTypeSignature())));
}
private static Object toHiveMap(Object nullKeyValue, Object input)
{
Map<Object, Object> map = new HashMap<>();
if (input == null) {
// json doesn't support null keys, so just write the nullKeyValue
map.put(nullKeyValue, null);
}
else {
map.put(input, input);
}
return map;
}
private static ArrayType createListType(Type type)
{
return new ArrayType(type);
}
private static Object toHiveList(Object input)
{
ArrayList<Object> list = new ArrayList<>(4);
for (int i = 0; i < 4; i++) {
list.add(input);
}
return list;
}
}
| |
//License
/***
* Java Modbus Library (jamod)
* Copyright (c) 2002-2004, jamod development team
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the author nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS ``AS
* IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
***/
package com.ghgande.j2mod.modbus.io;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import com.ghgande.j2mod.modbus.Modbus;
import com.ghgande.j2mod.modbus.ModbusCoupler;
import com.ghgande.j2mod.modbus.ModbusIOException;
import com.ghgande.j2mod.modbus.msg.ModbusMessage;
import com.ghgande.j2mod.modbus.msg.ModbusRequest;
import com.ghgande.j2mod.modbus.msg.ModbusResponse;
import com.ghgande.j2mod.modbus.util.ModbusUtil;
/**
* Class that implements the Modbus/BIN transport
* flavor.
*
* @author Dieter Wimberger
* @version 1.2rc1 (09/11/2004)
*/
public class ModbusBINTransport
extends ModbusSerialTransport {
private DataInputStream m_InputStream; //used to read from
private ASCIIOutputStream m_OutputStream; //used to write to
private byte[] m_InBuffer;
private BytesInputStream m_ByteIn; //to read message from
private BytesOutputStream m_ByteInOut; //to buffer message to
private BytesOutputStream m_ByteOut; //write frames
/**
* Constructs a new <tt>MobusBINTransport</tt> instance.
*/
public ModbusBINTransport() {
}//constructor
public void close() throws IOException {
m_InputStream.close();
m_OutputStream.close();
}//close
public ModbusTransaction createTransaction() {
return new ModbusSerialTransaction();
}
public void writeMessage(ModbusMessage msg)
throws ModbusIOException {
try {
int len;
synchronized (m_ByteOut) {
//write message to byte out
msg.setHeadless();
msg.writeTo(m_ByteOut);
byte[] buf = m_ByteOut.getBuffer();
len = m_ByteOut.size();
//write message
m_OutputStream.write(FRAME_START); //FRAMESTART
m_OutputStream.write(buf, 0, len); //PDU
int[] crc = ModbusUtil.calculateCRC(buf, 0, len); //CRC
m_OutputStream.write(crc[0]);
m_OutputStream.write(crc[1]);
m_OutputStream.write(FRAME_END); //FRAMEEND
m_OutputStream.flush();
m_ByteOut.reset();
}
// clears out the echoed message
// for RS485
if (m_Echo) {
// read back the echoed message
readEcho(len + 4);
}
} catch (Exception ex) {
throw new ModbusIOException("I/O failed to write");
}
}//writeMessage
public ModbusRequest readRequest()
throws ModbusIOException {
boolean done = false;
ModbusRequest request = null;
int in = -1;
try {
do {
//1. Skip to FRAME_START
while ((in = m_InputStream.read()) != FRAME_START) ;
//2. Read to FRAME_END
synchronized (m_InBuffer) {
m_ByteInOut.reset();
while ((in = m_InputStream.read()) != FRAME_END) {
m_ByteInOut.writeByte(in);
}
//check CRC
int[] crc = ModbusUtil.calculateCRC(m_InBuffer,0,m_ByteInOut.size()-2);
if (!(
m_InBuffer[m_ByteInOut.size()-2] == crc[0] //low byte first
&& m_InBuffer[m_ByteInOut.size()-1] == crc[1] //hibyte
)) {
continue;
}
m_ByteIn.reset(m_InBuffer, m_ByteInOut.size());
in = m_ByteIn.readUnsignedByte();
//check unit identifier
if (in != ModbusCoupler.getReference().getUnitID()) {
continue;
}
in = m_ByteIn.readUnsignedByte();
//create request
request = ModbusRequest.createModbusRequest(in);
request.setHeadless();
//read message
m_ByteIn.reset(m_InBuffer, m_ByteInOut.size());
request.readFrom(m_ByteIn);
}
done = true;
} while (!done);
return request;
} catch (Exception ex) {
if(Modbus.debug) System.out.println(ex.getMessage());
throw new ModbusIOException("I/O exception - failed to read.");
}
}//readRequest
public ModbusResponse readResponse()
throws ModbusIOException {
boolean done = false;
ModbusResponse response = null;
int in = -1;
try {
do {
//1. Skip to FRAME_START
while ((in = m_InputStream.read()) != FRAME_START) ;
//2. Read to FRAME_END
synchronized (m_InBuffer) {
m_ByteInOut.reset();
while ((in = m_InputStream.read()) != FRAME_END) {
m_ByteInOut.writeByte(in);
}
//check CRC
int[] crc = ModbusUtil.calculateCRC(m_InBuffer,0,m_ByteInOut.size()-2);
if (!(
m_InBuffer[m_ByteInOut.size()-2] == crc[0] //low byte first
&& m_InBuffer[m_ByteInOut.size()-1] == crc[1] //hibyte
)) {
continue;
}
m_ByteIn.reset(m_InBuffer, m_ByteInOut.size());
in = m_ByteIn.readUnsignedByte();
//check unit identifier
if (in != ModbusCoupler.getReference().getUnitID()) {
continue;
}
m_ByteIn.reset(m_InBuffer, m_ByteInOut.size());
in = m_ByteIn.readUnsignedByte();
//check unit identifier
if (in != ModbusCoupler.getReference().getUnitID()) {
continue;
}
in = m_ByteIn.readUnsignedByte();
//create request
response = ModbusResponse.createModbusResponse(in);
response.setHeadless();
//read message
m_ByteIn.reset(m_InBuffer, m_ByteInOut.size());
response.readFrom(m_ByteIn);
}
done = true;
} while (!done);
return response;
} catch (Exception ex) {
if(Modbus.debug) System.out.println(ex.getMessage());
throw new ModbusIOException("I/O exception - failed to read.");
}
}//readResponse
/**
* Prepares the input and output streams of this
* <tt>ModbusASCIITransport</tt> instance.
* The raw input stream will be wrapped into a
* filtered <tt>DataInputStream</tt>.
*
* @param in the input stream to be used for reading.
* @param out the output stream to be used for writing.
* @throws java.io.IOException if an I\O related error occurs.
*/
public void prepareStreams(InputStream in, OutputStream out) throws IOException {
m_InputStream = new DataInputStream(new ASCIIInputStream(in));
m_OutputStream = new ASCIIOutputStream(out);
m_ByteOut = new BytesOutputStream(Modbus.MAX_MESSAGE_LENGTH);
m_InBuffer = new byte[Modbus.MAX_MESSAGE_LENGTH];
m_ByteIn = new BytesInputStream(m_InBuffer);
m_ByteInOut = new BytesOutputStream(m_InBuffer);
}//prepareStreams
/**
* Defines a virtual number for the FRAME START token (COLON).
*/
public static final int FRAME_START = 1000;
/**
* Defines a virtual number for the FRAME_END token (CR LF).
*/
public static final int FRAME_END = 2000;
/**
* Defines the frame start token <tt>{</tt>.
*/
public static final int FRAME_START_TOKEN = 123;
/**
* Defines the frame end token <tt>}</tt>.
*/
public static final int FRAME_END_TOKEN = 125;
}//class ModbusASCIITransport
| |
package com.rackspacecloud.blueflood.tools.ops;
import com.google.common.collect.Sets;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.RowCallback;
import com.netflix.astyanax.connectionpool.NodeDiscoveryType;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl;
import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.astyanax.connectionpool.impl.FixedRetryBackoffStrategy;
import com.netflix.astyanax.impl.AstyanaxConfigurationImpl;
import com.netflix.astyanax.model.ByteBufferRange;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnFamily;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.retry.RetryNTimes;
import com.netflix.astyanax.thrift.ThriftFamilyFactory;
import com.netflix.astyanax.util.RangeBuilder;
import com.rackspacecloud.blueflood.io.CassandraModel;
import com.rackspacecloud.blueflood.types.Locator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import javax.xml.bind.DatatypeConverter;
import java.io.PrintStream;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
public class Migration {
private static final Options cliOptions = new Options();
private static final double VERIFY_PERCENT = 0.005f; // half of one percent.
private static final String SRC = "src";
private static final String DST = "dst";
private static final String FROM = "from";
private static final String TO = "to";
private static final String COLUMN_FAMILY = "cf";
private static final String SKIP = "skip";
private static final String LIMIT = "limit";
private static final String TTL = "ttl";
private static final String WRITE_THREADS = "writethreads";
private static final String READ_THREADS = "readthreads";
private static final String BATCH_SIZE = "batchsize";
private static final String VERIFY = "verify";
private static final String DISCOVER = "discover";
private static final String RATE = "rate";
private static final PrintStream out = System.out;
static {
cliOptions.addOption(OptionBuilder.isRequired().hasArg(true).withValueSeparator(',').withDescription("[required] Source cassandra cluster (host:port:keyspace).").create(SRC));
cliOptions.addOption(OptionBuilder.isRequired().hasArg(true).withValueSeparator(',').withDescription("[required] Destination cassandra cluster (host:port:keyspace).").create(DST));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] ISO 6801 datetime (or millis since epoch) of when to start migrating data. defaults to one year ago.").create(FROM));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] ISO 6801 datetime (or millis since epoch) Datetime of when to stop migrating data. defaults to right now.").create(TO));
cliOptions.addOption(OptionBuilder.isRequired().hasArg().withValueSeparator(',').withDescription("[required] Which column family to migrate").create(COLUMN_FAMILY));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] Number of keys to skip before processing. default=0.").create(SKIP));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] Maximum number of keys to process. default=MAX_INT.").create(LIMIT));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] ttl in seconds for new data. default=5x the default for the column family.").create(TTL));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] number of read threads to use. default=1").create(READ_THREADS));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] number of write threads to use. default=1").create(WRITE_THREADS));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] number of rows to read per query. default=100").create(BATCH_SIZE));
cliOptions.addOption(OptionBuilder.withDescription("[optional] verify a sampling 0.5% of data copied").create(VERIFY));
cliOptions.addOption(OptionBuilder.withDescription("[optional] discover and query other cassandra nodes").create(DISCOVER));
cliOptions.addOption(OptionBuilder.hasArg().withDescription("[optional] maximum number of columns per/second to transfer. default=500").create(RATE));
}
private static long nowInSeconds() {
return System.currentTimeMillis() / 1000;
}
public static void main(String args[]) {
nullRouteAllLog4j();
Map<String, Object> options = parseOptions(args);
final int readThreads = (Integer)options.get(READ_THREADS);
final int writeThreads = (Integer)options.get(WRITE_THREADS);
final int keyLimit = (Integer)options.get(LIMIT);
final int batchSize = (Integer)options.get(BATCH_SIZE);
final int skip = (Integer)options.get(SKIP);
final int ttl = (Integer)options.get(TTL);
final int rate = (Integer)options.get(RATE);
NodeDiscoveryType discovery = (NodeDiscoveryType)options.get(DISCOVER);
// connect to src cluster.
String[] srcParts = options.get(SRC).toString().split(":", -1);
final AstyanaxContext<Keyspace> srcContext = connect(srcParts[0], Integer.parseInt(srcParts[1]), srcParts[2], readThreads, discovery);
final Keyspace srcKeyspace = srcContext.getEntity();
// connect to dst cluster.
String[] dstParts = options.get(DST).toString().split(":", -1);
final AstyanaxContext<Keyspace> dstContext = connect(dstParts[0], Integer.parseInt(dstParts[1]), dstParts[2], writeThreads, discovery);
final Keyspace dstKeyspace = dstContext.getEntity();
final AtomicLong columnsTransferred = new AtomicLong(0);
final long startClockTime = nowInSeconds();
// establish column range.
final ByteBufferRange range = new RangeBuilder()
.setStart((Long) options.get(FROM))
.setEnd((Long) options.get(TO)).build();
// create a threadpool that will write stuff into the destination.
final ThreadPoolExecutor destWriteExecutor = new ThreadPoolExecutor(writeThreads, writeThreads,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>());
// this threadpool ensures single-threaded output statements.
final ThreadPoolExecutor postExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>());
// this threadpool performs verifications.
final ThreadPoolExecutor verifyExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>());
// keep track of the number of keys that have been copied.
final AtomicInteger processedKeys = new AtomicInteger(0);
// keep track of the number of keys that have been iterated over (includes skips).
final AtomicInteger iteratedKeys = new AtomicInteger(0);
final AtomicBoolean skipping = new AtomicBoolean(true);
// sentinal that indicates it is time to stop doing everything.
final AtomicBoolean stopAll = new AtomicBoolean(false);
final AtomicLong heartbeat = new AtomicLong(System.currentTimeMillis());
final boolean verify = (Boolean)options.get(VERIFY);
final Random random = new Random(System.nanoTime());
// indicate what's going to happen.
out.println(String.format("Will process roughly %d keys from %s to %s for dates %s to %s",
keyLimit,
options.get(SRC),
options.get(DST),
new Date((Long)options.get(FROM)),
new Date((Long)options.get(TO))));
if (skip > 0) {
out.println("Be patient while I skip " + skip + " keys");
}
try {
final ColumnFamily<Locator, Long> columnFamily = (ColumnFamily<Locator, Long>)options.get(COLUMN_FAMILY);
// when we skip, we'd like the batch size to be larger. that way, if the user specified a small batch size,
// we don't spend a lot of time iterating through nothing. this should reduce the number of rount trips to
// one per thread.
int realizedBatchSize = batchSize;
if (skip > 0 && iteratedKeys.get() < skip) {
realizedBatchSize = skip / readThreads;
}
realizedBatchSize = Math.min(5000, realizedBatchSize); // no too big though.
// we have no way of knowing when we've processed all rows (no callbacks or anthying); this thread makes
// sure that work is being done. when it sees a 30s period of nothing, it shuts things down.
new Thread("Stagnation") {
public void run() {
while (!destWriteExecutor.isShutdown()) {
if (System.currentTimeMillis() - heartbeat.get() > 30*1000) {
if (!skipping.get()) {
out.println("It looks like we're done");
destWriteExecutor.shutdown();
postExecutor.shutdown();
verifyExecutor.shutdown();
srcContext.shutdown();
dstContext.shutdown();
break;
}
}
try { sleep(1000L); } catch (Exception ex) {}
}
}
}.start();
// get all the data.
srcKeyspace.prepareQuery(columnFamily)
.getAllRows()
.setRowLimit(realizedBatchSize)
.setRepeatLastToken(false)
.withColumnRange(range)
.setConcurrencyLevel(readThreads)
.executeWithCallback(new RowCallback<Locator, Long>() {
@Override
public void success(Rows<Locator, Long> rows) {
if (skipping.get()) {
out.println(String.format("skipping... " + iteratedKeys.get()));
}
for (Locator locator : rows.getKeys()) {
// short circuit quit if wee need to.
if (stopAll.get()) break;
// do skipping if needed.
int overallKey = iteratedKeys.incrementAndGet();
if (overallKey < skip) {
continue;
}
skipping.set(false);
// shut things down when we reach the key limit.
if (processedKeys.get() >= keyLimit && !stopAll.get()) {
out.println("Reached key limit.");
stopAll.set(true);
destWriteExecutor.shutdownNow();
verifyExecutor.shutdownNow();
postExecutor.submit(new Runnable() {
public void run() {
srcContext.shutdown();
dstContext.shutdown();
}
});
postExecutor.shutdown();
break;
}
final Locator locatorCapture = locator;
final Row<Locator, Long> row = rows.getRow(locator);
// send copy commands to the write thread pool.
destWriteExecutor.submit(new Runnable() {
public void run() {
// back out if we've processed our quota of rows.
if (processedKeys.get() >= keyLimit) {
return;
}
// copy the column.
MutationBatch batch = dstKeyspace.prepareMutationBatch();
ColumnListMutation<Long> mutation = batch.withRow(columnFamily, locatorCapture);
assert ttl != 0;
long colCount = 0;
for (Column<Long> c : row.getColumns()) {
mutation.putColumn(c.getName(), c.getByteBufferValue(), ttl);
colCount += 1;
}
columnsTransferred.addAndGet(colCount);
// save it, submit a log message to be shown later.
try {
batch.execute();
if (verify && random.nextFloat() < VERIFY_PERCENT) {
verifyExecutor.submit(new Runnable() {public void run() {
try {
ColumnList<Long> srcData = srcKeyspace.prepareQuery(columnFamily).getKey(locatorCapture)
.withColumnRange(range)
.execute()
.getResult();
ColumnList<Long> dstData = dstKeyspace.prepareQuery(columnFamily).getKey(locatorCapture)
.withColumnRange(range)
.execute()
.getResult();
checkSameResults(srcData, dstData);
postExecutor.submit(new Runnable() {public void run() {
out.println(String.format("verified %s", locatorCapture.toString()));
}});
} catch (ConnectionException ex) {
stopAll.set(true);
out.println("There was an error verifying data: " + ex.getMessage());
ex.printStackTrace(out);
} catch (Exception ex) {
stopAll.set(true);
out.println(ex.getMessage() + " " + locatorCapture.toString());
}
}});
}
final long fColCount = colCount;
postExecutor.submit(new Runnable() {
public void run() {
int rowIteration = processedKeys.incrementAndGet();
long colsPerSecond = columnsTransferred.get() / Math.max(1, (nowInSeconds() - startClockTime));
out.println(String.format("%d copied %d for %s (%d m/s), %d", rowIteration, fColCount, locatorCapture.toString(), colsPerSecond, columnsTransferred.get()));
heartbeat.set(System.currentTimeMillis());
}
});
// possibly throttle if we've sent a lot of columns.
while (columnsTransferred.get() / (nowInSeconds() - startClockTime) > rate) {
try { Thread.sleep(200); } catch (Exception ex) {}
}
}
catch (ConnectionException ex) {
stopAll.set(true);
out.println("There was an error A: " + ex.getMessage());
ex.printStackTrace(out);
}
}
});
}
}
@Override
public boolean failure(ConnectionException ex) {
// if we were stopped, then it was either done cleanly, or the error was already logged.
if (!stopAll.get()) {
stopAll.set(true);
out.println("There was an error iterating rows: " + ex.getMessage());
ex.printStackTrace(out);
}
return false;
}
});
} catch (ConnectionException ex) {
if (!stopAll.get()) {
stopAll.set(true);
out.println("There was an error C: " + ex.getMessage());
ex.printStackTrace(out);
}
}
}
private static void checkSameResults(ColumnList<Long> x, ColumnList<Long> y) throws Exception {
if (x.size() != y.size()) {
throw new Exception("source and destination column lengths do not match");
}
if (Sets.difference(new HashSet<Long>(x.getColumnNames()), new HashSet<Long>(y.getColumnNames())).size() != 0) {
throw new Exception("source and destination did not contain the same column names");
}
for (int i = 0; i < x.size(); i++) {
byte[] bx = x.getColumnByIndex(i).getByteArrayValue();
byte[] by = y.getColumnByIndex(i).getByteArrayValue();
if (bx.length != by.length) {
throw new Exception("source and destination column values did not match for column " + i);
}
// only examine every third byte.
for (int j = 0; j < bx.length; j+=3) {
if (bx[j] != by[j]) {
throw new Exception("source and destination column values did not match for column " + i);
}
}
}
}
private static void nullRouteAllLog4j() {
List<Logger> loggers = Collections.<Logger>list(LogManager.getCurrentLoggers());
loggers.add(LogManager.getRootLogger());
for ( Logger logger : loggers ) {
logger.setLevel(Level.OFF);
}
}
private static AstyanaxContext<Keyspace> connect(String host, int port, String keyspace, int threads, NodeDiscoveryType discovery) {
AstyanaxContext<Keyspace> context = new AstyanaxContext.Builder()
.forKeyspace(keyspace)
.withAstyanaxConfiguration(new AstyanaxConfigurationImpl()
.setDiscoveryType(discovery)
.setRetryPolicy(new RetryNTimes(10)))
.withConnectionPoolConfiguration(new ConnectionPoolConfigurationImpl(host + ":" + keyspace)
.setMaxConns(threads * 2)
.setSeeds(host)
.setPort(port)
.setRetryBackoffStrategy(new FixedRetryBackoffStrategy(1000, 1000)))
.withConnectionPoolMonitor(new CountingConnectionPoolMonitor())
.buildKeyspace(ThriftFamilyFactory.getInstance());
context.start();
return context;
}
// construct a well-formed options map. There should be no guesswork/checking for null after this point. All defaults
// should be populated.
private static Map<String, Object> parseOptions(String[] args) {
final GnuParser parser = new GnuParser();
final Map<String, Object> options = new HashMap<String, Object>();
try {
final long now = System.currentTimeMillis();
CommandLine line = parser.parse(cliOptions, args);
options.put(SRC, line.getOptionValue(SRC));
options.put(DST, line.getOptionValue(DST));
// default range is one year ago until now.
options.put(FROM, line.hasOption(FROM) ? parseDateTime(line.getOptionValue(FROM)) : now-(365L*24L*60L*60L*1000L));
options.put(TO, line.hasOption(TO) ? parseDateTime(line.getOptionValue(TO)) : now);
options.put(LIMIT, line.hasOption(LIMIT) ? Integer.parseInt(line.getOptionValue(LIMIT)) : Integer.MAX_VALUE);
options.put(SKIP, line.hasOption(SKIP) ? Integer.parseInt(line.getOptionValue(SKIP)) : 0);
options.put(BATCH_SIZE, line.hasOption(BATCH_SIZE) ? Integer.parseInt(line.getOptionValue(BATCH_SIZE)) : 100);
// create a mapping of all cf names -> cf.
// then determine which column family to process.
Map<String, ColumnFamily<Locator, Long>> nameToCf = new HashMap<String, ColumnFamily<Locator, Long>>() {{
for (CassandraModel.MetricColumnFamily cf : CassandraModel.getMetricColumnFamilies()) {
put(cf.getName(), cf);
}
}};
if (nameToCf.get(line.getOptionValue(COLUMN_FAMILY)) == null) {
throw new ParseException("Invalid column family");
}
CassandraModel.MetricColumnFamily columnFamily = (CassandraModel.MetricColumnFamily)nameToCf.get(line.getOptionValue(COLUMN_FAMILY));
options.put(COLUMN_FAMILY, columnFamily);
options.put(TTL, line.hasOption(TTL) ? Integer.parseInt(line.getOptionValue(TTL)) : (int)(5 * columnFamily.getDefaultTTL().toSeconds()));
options.put(READ_THREADS, line.hasOption(READ_THREADS) ? Integer.parseInt(line.getOptionValue(READ_THREADS)) : 1);
options.put(WRITE_THREADS, line.hasOption(WRITE_THREADS) ? Integer.parseInt(line.getOptionValue(WRITE_THREADS)) : 1);
options.put(VERIFY, line.hasOption(VERIFY));
options.put(DISCOVER, line.hasOption(DISCOVER) ? NodeDiscoveryType.RING_DESCRIBE : NodeDiscoveryType.NONE);
options.put(RATE, line.hasOption(RATE) ? Integer.parseInt(line.getOptionValue(RATE)) : 500);
} catch (ParseException ex) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp("bf-migrate", cliOptions);
System.exit(-1);
}
return options;
}
private static long parseDateTime(String s) {
try {
return Long.parseLong(s);
} catch (NumberFormatException ex) {
// convert from a ISO 6801 date String.
return DatatypeConverter.parseDateTime(s).getTime().getTime();
}
}
}
| |
package com.unibo.koci.moneytracking.Activities;
import android.app.DatePickerDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.DatePicker;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.location.places.Place;
import com.google.android.gms.location.places.PlaceBuffer;
import com.google.android.gms.location.places.Places;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.unibo.koci.moneytracking.Adapters.PlaceAdapter;
import com.unibo.koci.moneytracking.Database.DBHelper;
import com.unibo.koci.moneytracking.Entities.Category;
import com.unibo.koci.moneytracking.Entities.Location;
import com.unibo.koci.moneytracking.Entities.MoneyItem;
import com.unibo.koci.moneytracking.Entities.PlannedItem;
import com.unibo.koci.moneytracking.MainActivity;
import com.unibo.koci.moneytracking.R;
import org.joda.time.LocalDate;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/**
* Created by koale on 15/08/17.
*/
public class NewItemActivity extends AppCompatActivity implements
GoogleApiClient.OnConnectionFailedListener,
GoogleApiClient.ConnectionCallbacks {
// google api
private static final int GOOGLE_API_CLIENT_ID = 0;
private GoogleApiClient mGoogleApiClient;
private PlaceAdapter mPlaceArrayAdapter;
private static final LatLngBounds BOUNDS_MOUNTAIN_VIEW = new LatLngBounds(new LatLng(44.4833333, 11.3333333), new LatLng(44.4833333, 11.3333333));
//object view
private AutoCompleteTextView addLocation;
private EditText nameAdd;
private EditText descriptionAdd;
private EditText amountAdd;
private Button buttonAdd;
private EditText dateInputText;
private Spinner categorySpinner;
private Toolbar toolbar;
private LinearLayout li_planned;
private EditText repeatPlanned;
private Spinner occurrenceSpinner;
private Place place;
private long catid;
DBHelper dbHelper;
Boolean isPlanned = false;
String occurrence_type = "";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_new_item);
isPlanned = (Boolean) getIntent().getExtras().getSerializable("planned");
dbHelper = new DBHelper(this);
init_editText();
init_placeAPI();
init_toolbar();
init_dateinput();
init_addbutton();
init_selectategory();
if (isPlanned) {
li_planned.setVisibility(View.VISIBLE);
init_planned_occurrence();
} else {
li_planned.setVisibility(View.GONE);
}
}
private void init_editText() {
li_planned = (LinearLayout) findViewById(R.id.planned_layout);
nameAdd = (EditText) findViewById(R.id.add_name);
descriptionAdd = (EditText) findViewById(R.id.add_description);
amountAdd = (EditText) findViewById(R.id.add_amount);
buttonAdd = (Button) findViewById(R.id.add_button);
dateInputText = (EditText) findViewById(R.id.check_date);
categorySpinner = (Spinner) findViewById(R.id.add_category);
toolbar = (Toolbar) findViewById(R.id.toolbar2);
if (isPlanned) {
occurrenceSpinner = (Spinner) findViewById(R.id.add_occurrence);
repeatPlanned = (EditText) findViewById(R.id.add_repeat);
}
}
private void init_planned_occurrence() {
final String[] stringArray = getResources().getStringArray(R.array.occurrence);
final ArrayAdapter<String> spinnerArrayAdapter = new ArrayAdapter<String>(this, R.layout.spinner_row, R.id.text_spinner, stringArray);
spinnerArrayAdapter.setDropDownViewResource(R.layout.spinner_row);
occurrenceSpinner.setAdapter(spinnerArrayAdapter);
occurrenceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View selectedItemView, int position, long id) {
occurrence_type = stringArray[position];
}
@Override
public void onNothingSelected(AdapterView<?> parentView) {
}
});
}
private void init_toolbar() {
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
private void init_dateinput() {
dateInputText.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Calendar mcurrentDate = Calendar.getInstance();
int mYear = mcurrentDate.get(Calendar.YEAR);
int mMonth = mcurrentDate.get(Calendar.MONTH);
int mDay = mcurrentDate.get(Calendar.DAY_OF_MONTH);
DatePickerDialog mDatePicker = new DatePickerDialog(NewItemActivity.this, new DatePickerDialog.OnDateSetListener() {
public void onDateSet(DatePicker datepicker, int selectedyear, int selectedmonth, int selectedday) {
//int: the month between 0-11.
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
LocalDate lo = new LocalDate(selectedyear, (selectedmonth + 1), selectedday);
String date_string = sdf.format(lo.toDate());
dateInputText.setText(date_string);
}
}, mYear, mMonth, mDay);
mDatePicker.setTitle("Select date");
if (isPlanned) {
LocalDate lo = LocalDate.fromDateFields(new Date());
lo = lo.plusDays(1);
mDatePicker.getDatePicker().setMinDate(lo.toDate().getTime());
} else {
mDatePicker.getDatePicker().setMaxDate(new Date().getTime());
}
mDatePicker.show();
}
});
}
private void force_create_new_category() {
AlertDialog.Builder builder = new AlertDialog.Builder(this, R.style.DialogStyle);
builder.setMessage("There aren't categories, please add new category")
.setCancelable(false)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
startActivity(new Intent(NewItemActivity.this, CategoriesActivity.class));
finish();
}
});
AlertDialog alert = builder.create();
alert.show();
}
private void init_selectategory() {
List<String> listItems = new ArrayList<String>();
final List<Category> categories_list = dbHelper.getDaoSession().getCategoryDao().loadAll();
if (categories_list.size() == 0) {
force_create_new_category();
}
int i = 0;
while (listItems.size() != categories_list.size()) {
listItems.add(categories_list.get(i++).getName().toString());
}
final String[] categories_string = listItems.toArray(new String[listItems.size()]);
final ArrayAdapter<String> spinnerArrayAdapter = new ArrayAdapter<String>(this, R.layout.spinner_row, R.id.text_spinner, categories_string);
spinnerArrayAdapter.setDropDownViewResource(R.layout.spinner_row);
categorySpinner.setAdapter(spinnerArrayAdapter);
categorySpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View selectedItemView, int position, long id) {
catid = categories_list.get(position).getCategoryID();
}
@Override
public void onNothingSelected(AdapterView<?> parentView) {
}
});
}
private void init_addbutton() {
try {
buttonAdd.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String name, description;
double amount = 0;
Date date;
long locid;
boolean ok = true;
Location loc = new Location(null, "", 0, 0);
Integer repeat = 0;
name = nameAdd.getText().toString();
if (name.isEmpty()) {
nameAdd.setError("Insert name");
ok = false;
}
description = descriptionAdd.getText().toString();
if (description.isEmpty()) {
descriptionAdd.setError("Insert Description");
ok = false;
}
if (categorySpinner.getSelectedItem().toString().isEmpty()) {
// categorySpinner.setError("Select category");
ok = false;
}
if (place == null) {
if (addLocation.getText().toString().isEmpty()) {
addLocation.setError("Insert Location");
ok = false;
} else {
loc = new Location(null, addLocation.getText().toString(), 0, 0);
}
} else {
loc = new Location(null, place.getAddress().toString(), place.getLatLng().latitude, place.getLatLng().longitude);
}
date = getDate(dateInputText.getText().toString());
if (date.toString().isEmpty()) {
dateInputText.setError("Insert Date");
ok = false;
}
if (amountAdd.getText().toString().isEmpty()) {
ok = false;
} else {
amount = Double.valueOf((amountAdd.getText().toString().replace(',', '.')));
}
if (isPlanned) {
if (repeatPlanned.getText().toString().isEmpty()) {
repeatPlanned.setError("Insert Repeat time values");
ok = false;
} else {
repeat = Integer.valueOf(repeatPlanned.getText().toString());
}
if (occurrence_type.isEmpty()) {
ok = false;
}
}
if (ok) {
locid = dbHelper.getDaoSession().insert(loc);
if (isPlanned) {
PlannedItem pi = new PlannedItem(null, name, description, date, amount, catid, locid, occurrence_type, repeat);
dbHelper.getDaoSession().insert(pi);
} else {
MoneyItem mi = new MoneyItem(null, name, description, date, amount, catid, locid);
dbHelper.getDaoSession().insert(mi);
}
Toast.makeText(NewItemActivity.this, "Added", Toast.LENGTH_LONG).show();
Intent intent = new Intent(NewItemActivity.this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
} else {
Toast.makeText(NewItemActivity.this, "Please fill all input", Toast.LENGTH_LONG).show();
}
}
});
} catch (NullPointerException x) {
Toast.makeText(NewItemActivity.this, "Please fill all input!", Toast.LENGTH_LONG).show();
}
}
public static Date getDate(String datestring) {
SimpleDateFormat format = new SimpleDateFormat("dd/MM/yyyy");
Date date;
date = new Date();
try {
date = format.parse(datestring);
} catch (ParseException e) {
e.printStackTrace();
}
return date;
}
private void init_placeAPI() {
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Places.GEO_DATA_API)
.enableAutoManage(this, GOOGLE_API_CLIENT_ID, this)
.addConnectionCallbacks(this)
.build();
addLocation = (AutoCompleteTextView) findViewById(R.id
.addLocation);
addLocation.setThreshold(3);
addLocation.setOnItemClickListener(mAutocompleteClickListener);
mPlaceArrayAdapter = new PlaceAdapter(this, android.R.layout.simple_list_item_1,
BOUNDS_MOUNTAIN_VIEW, null);
addLocation.setAdapter(mPlaceArrayAdapter);
}
private AdapterView.OnItemClickListener mAutocompleteClickListener
= new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
final PlaceAdapter.PlaceAutocomplete item = mPlaceArrayAdapter.getItem(position);
final String placeId = String.valueOf(item.placeId);
PendingResult<PlaceBuffer> placeResult = Places.GeoDataApi.getPlaceById(mGoogleApiClient, placeId);
placeResult.setResultCallback(mUpdatePlaceDetailsCallback);
}
};
private ResultCallback<PlaceBuffer> mUpdatePlaceDetailsCallback
= new ResultCallback<PlaceBuffer>() {
@Override
public void onResult(PlaceBuffer places) {
if (!places.getStatus().isSuccess()) {
return;
}
place = places.get(0);
}
};
@Override
public void onConnected(Bundle bundle) {
mPlaceArrayAdapter.setGoogleApiClient(mGoogleApiClient);
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
Toast.makeText(this, "Google Places API connection failed with error code:" + connectionResult.getErrorCode(), Toast.LENGTH_LONG).show();
}
@Override
public void onConnectionSuspended(int i) {
mPlaceArrayAdapter.setGoogleApiClient(null);
}
@Override
public boolean onSupportNavigateUp() {
onBackPressed();
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.Set;
import javax.cache.CacheException;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.query.Query;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterTopologyException;
import org.apache.ignite.internal.IgniteClientDisconnectedCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.ClusterGroupEmptyCheckedException;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheInvalidStateException;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheAdapter;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtUnreservedPartitionException;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionTopology;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal;
import org.apache.ignite.internal.processors.cache.mvcc.MvccQueryTracker;
import org.apache.ignite.internal.processors.cache.mvcc.MvccSnapshot;
import org.apache.ignite.internal.processors.cache.mvcc.MvccUtils;
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.internal.util.GridCloseableIteratorAdapter;
import org.apache.ignite.internal.util.GridEmptyCloseableIterator;
import org.apache.ignite.internal.util.lang.GridCloseableIterator;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.lang.IgniteReducer;
import org.apache.ignite.plugin.security.SecurityPermission;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.cache.CacheMode.LOCAL;
import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.INDEX;
import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.SCAN;
import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.SET;
import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.SPI;
import static org.apache.ignite.internal.processors.cache.query.GridCacheQueryType.SQL_FIELDS;
/**
* Query adapter.
*/
public class GridCacheQueryAdapter<T> implements CacheQuery<T> {
/** */
private final GridCacheContext<?, ?> cctx;
/** */
private final GridCacheQueryType type;
/** */
private final IgniteLogger log;
/** Class name in case of binary query. */
private final String clsName;
/** */
@GridToStringInclude(sensitive = true)
private final String clause;
/** Description of IndexQuery. */
private final IndexQueryDesc idxQryDesc;
/** */
private final IgniteBiPredicate<Object, Object> filter;
/** Limits returned records quantity. */
private int limit;
/** Transformer. */
private IgniteClosure<?, ?> transform;
/** Partition. */
private Integer part;
/** */
private final boolean incMeta;
/** */
private volatile int pageSize = Query.DFLT_PAGE_SIZE;
/** */
private volatile long timeout;
/** */
private volatile boolean incBackups;
/** Local query. */
private boolean forceLocal;
/** */
private volatile boolean dedup;
/** */
private volatile ClusterGroup prj;
/** */
private boolean keepBinary;
/** */
private int taskHash;
/** */
private MvccSnapshot mvccSnapshot;
/** */
private Boolean dataPageScanEnabled;
/**
* Cache query adapter for SCAN query.
*
* @param cctx Context.
* @param type Query type.
* @param filter Scan filter.
* @param part Partition.
* @param keepBinary Keep binary flag.
* @param forceLocal Flag to force local query.
* @param dataPageScanEnabled Flag to enable data page scan.
*/
public GridCacheQueryAdapter(
GridCacheContext<?, ?> cctx,
GridCacheQueryType type,
@Nullable IgniteBiPredicate<Object, Object> filter,
@Nullable IgniteClosure<Map.Entry, Object> transform,
@Nullable Integer part,
boolean keepBinary,
boolean forceLocal,
Boolean dataPageScanEnabled
) {
assert cctx != null;
assert type != null;
assert part == null || part >= 0;
this.cctx = cctx;
this.type = type;
this.filter = filter;
this.transform = transform;
this.part = part;
this.keepBinary = keepBinary;
this.forceLocal = forceLocal;
this.dataPageScanEnabled = dataPageScanEnabled;
log = cctx.logger(getClass());
this.incMeta = false;
this.clsName = null;
this.clause = null;
this.idxQryDesc = null;
}
/**
* Cache query adapter for SET, SPI, TEXT queries.
*
* @param cctx Context.
* @param type Query type.
* @param clsName Class name.
* @param clause Clause.
* @param filter Scan filter.
* @param part Partition.
* @param incMeta Include metadata flag.
* @param keepBinary Keep binary flag.
* @param dataPageScanEnabled Flag to enable data page scan.
*/
public GridCacheQueryAdapter(
GridCacheContext<?, ?> cctx,
GridCacheQueryType type,
@Nullable String clsName,
@Nullable String clause,
@Nullable IgniteBiPredicate<Object, Object> filter,
@Nullable Integer part,
boolean incMeta,
boolean keepBinary,
Boolean dataPageScanEnabled
) {
assert cctx != null;
assert type != null;
assert part == null || part >= 0;
this.cctx = cctx;
this.type = type;
this.clsName = clsName;
this.clause = clause;
this.filter = filter;
this.part = part;
this.incMeta = incMeta;
this.keepBinary = keepBinary;
this.dataPageScanEnabled = dataPageScanEnabled;
log = cctx.logger(getClass());
this.idxQryDesc = null;
}
/**
* Cache query adapter for local query processing.
*
* @param cctx Context.
* @param type Query type.
* @param log Logger.
* @param pageSize Page size.
* @param timeout Timeout.
* @param incBackups Include backups flag.
* @param dedup Enable dedup flag.
* @param prj Grid projection.
* @param filter Key-value filter.
* @param part Partition.
* @param clsName Class name.
* @param clause Clause.
* @param limit Response limit. Set to 0 for no limits.
* @param incMeta Include metadata flag.
* @param keepBinary Keep binary flag.
* @param taskHash Task hash.
* @param mvccSnapshot Mvcc version.
* @param dataPageScanEnabled Flag to enable data page scan.
*/
public GridCacheQueryAdapter(
GridCacheContext<?, ?> cctx,
GridCacheQueryType type,
IgniteLogger log,
int pageSize,
long timeout,
boolean incBackups,
boolean dedup,
ClusterGroup prj,
IgniteBiPredicate<Object, Object> filter,
@Nullable Integer part,
@Nullable String clsName,
String clause,
IndexQueryDesc idxQryDesc,
int limit,
boolean incMeta,
boolean keepBinary,
int taskHash,
MvccSnapshot mvccSnapshot,
Boolean dataPageScanEnabled
) {
this.cctx = cctx;
this.type = type;
this.log = log;
this.pageSize = pageSize;
this.timeout = timeout;
this.incBackups = incBackups;
this.dedup = dedup;
this.prj = prj;
this.filter = filter;
this.part = part;
this.clsName = clsName;
this.clause = clause;
this.idxQryDesc = idxQryDesc;
this.limit = limit;
this.incMeta = incMeta;
this.keepBinary = keepBinary;
this.taskHash = taskHash;
this.mvccSnapshot = mvccSnapshot;
this.dataPageScanEnabled = dataPageScanEnabled;
}
/**
* Cache query adapter for INDEX query.
*
* @param cctx Context.
* @param type Query type.
* @param idxQryDesc Index query descriptor.
* @param clsName Class name.
* @param filter Index query remote filter.
*/
public GridCacheQueryAdapter(
GridCacheContext<?, ?> cctx,
GridCacheQueryType type,
IndexQueryDesc idxQryDesc,
@Nullable String clsName,
@Nullable IgniteBiPredicate<Object, Object> filter
) {
this.cctx = cctx;
this.type = type;
this.clsName = clsName;
this.idxQryDesc = idxQryDesc;
this.filter = filter;
log = cctx.logger(getClass());
clause = null;
incMeta = false;
}
/**
* @return Flag to enable data page scan.
*/
public Boolean isDataPageScanEnabled() {
return dataPageScanEnabled;
}
/**
* @return MVCC snapshot.
*/
@Nullable MvccSnapshot mvccSnapshot() {
return mvccSnapshot;
}
/**
* @return Type.
*/
public GridCacheQueryType type() {
return type;
}
/**
* @return Class name.
*/
@Nullable public String queryClassName() {
return clsName;
}
/**
* @return Clause.
*/
@Nullable public String clause() {
return clause;
}
/**
* @return Include metadata flag.
*/
public boolean includeMetadata() {
return incMeta;
}
/**
* @return {@code True} if binary should not be deserialized.
*/
public boolean keepBinary() {
return keepBinary;
}
/**
* Forces query to keep binary object representation even if query was created on plain projection.
*
* @param keepBinary Keep binary flag.
*/
public void keepBinary(boolean keepBinary) {
this.keepBinary = keepBinary;
}
/**
* @return {@code True} if the query is forced local.
*/
public boolean forceLocal() {
return forceLocal;
}
/**
* @return Task hash.
*/
public int taskHash() {
return taskHash;
}
/** {@inheritDoc} */
@Override public CacheQuery<T> pageSize(int pageSize) {
A.ensure(pageSize > 0, "pageSize > 0");
this.pageSize = pageSize;
return this;
}
/**
* @return Page size.
*/
public int pageSize() {
return pageSize;
}
/** {@inheritDoc} */
@Override public CacheQuery<T> timeout(long timeout) {
A.ensure(timeout >= 0, "timeout >= 0");
this.timeout = timeout;
return this;
}
/**
* @return Response limit. Returns 0 for no limits.
**/
public int limit() {
return limit;
}
/** {@inheritDoc} */
@Override public CacheQuery<T> limit(int limit) {
this.limit = limit;
return this;
}
/**
* @return Timeout.
*/
public long timeout() {
return timeout;
}
/** {@inheritDoc} */
@Override public CacheQuery<T> includeBackups(boolean incBackups) {
this.incBackups = incBackups;
return this;
}
/**
* @return Include backups.
*/
public boolean includeBackups() {
return incBackups;
}
/** {@inheritDoc} */
@Override public CacheQuery<T> enableDedup(boolean dedup) {
this.dedup = dedup;
return this;
}
/**
* @return Enable dedup flag.
*/
public boolean enableDedup() {
return dedup;
}
/** {@inheritDoc} */
@Override public CacheQuery<T> projection(ClusterGroup prj) {
this.prj = prj;
return this;
}
/**
* @return Grid projection.
*/
public ClusterGroup projection() {
return prj;
}
/**
* @return Key-value filter.
*/
@Nullable public <K, V> IgniteBiPredicate<K, V> scanFilter() {
return (IgniteBiPredicate<K, V>)filter;
}
/**
* @return Transformer.
*/
@Nullable public <K, V> IgniteClosure<Map.Entry<K, V>, Object> transform() {
return (IgniteClosure<Map.Entry<K, V>, Object>)transform;
}
/**
* @return Partition.
*/
@Nullable public Integer partition() {
return part;
}
/**
* @return Index query description.
*/
@Nullable public IndexQueryDesc idxQryDesc() { return idxQryDesc; }
/**
* @throws IgniteCheckedException If query is invalid.
*/
public void validate() throws IgniteCheckedException {
if ((type != SCAN && type != SET && type != SPI && type != INDEX)
&& !QueryUtils.isEnabled(cctx.config()))
throw new IgniteCheckedException("Indexing is disabled for cache: " + cctx.cache().name());
}
/** {@inheritDoc} */
@Override public CacheQueryFuture<T> execute(@Nullable Object... args) {
return execute0(null, args);
}
/** {@inheritDoc} */
@Override public <R> CacheQueryFuture<R> execute(IgniteReducer<T, R> rmtReducer, @Nullable Object... args) {
return execute0(rmtReducer, args);
}
/**
* @param rmtReducer Optional reducer.
* @param args Arguments.
* @return Future.
*/
@SuppressWarnings({"IfMayBeConditional"})
private <R> CacheQueryFuture<R> execute0(@Nullable IgniteReducer<T, R> rmtReducer, @Nullable Object... args) {
assert type != SCAN : this;
Collection<ClusterNode> nodes;
try {
nodes = nodes();
}
catch (IgniteCheckedException e) {
return new GridCacheQueryErrorFuture<>(cctx.kernalContext(), e);
}
cctx.checkSecurity(SecurityPermission.CACHE_READ);
if (nodes.isEmpty())
return new GridCacheQueryErrorFuture<>(cctx.kernalContext(), new ClusterGroupEmptyCheckedException());
if (log.isDebugEnabled())
log.debug("Executing query [query=" + this + ", nodes=" + nodes + ']');
if (cctx.deploymentEnabled()) {
try {
cctx.deploy().registerClasses(filter, rmtReducer);
cctx.deploy().registerClasses(args);
}
catch (IgniteCheckedException e) {
return new GridCacheQueryErrorFuture<>(cctx.kernalContext(), e);
}
}
taskHash = cctx.kernalContext().job().currentTaskNameHash();
final GridCacheQueryBean bean = new GridCacheQueryBean(this, (IgniteReducer<Object, Object>)rmtReducer,
null, args);
final GridCacheQueryManager qryMgr = cctx.queries();
boolean loc = nodes.size() == 1 && F.first(nodes).id().equals(cctx.localNodeId());
if (type == SQL_FIELDS || type == SPI)
return (CacheQueryFuture<R>)(loc ? qryMgr.queryFieldsLocal(bean) :
qryMgr.queryFieldsDistributed(bean, nodes));
else
return (CacheQueryFuture<R>)(loc ? qryMgr.queryLocal(bean) : qryMgr.queryDistributed(bean, nodes));
}
/** {@inheritDoc} */
@Override public GridCloseableIterator executeScanQuery() throws IgniteCheckedException {
assert type == SCAN : "Wrong processing of query: " + type;
if (!cctx.isLocal()) {
GridDhtCacheAdapter<?, ?> cacheAdapter = cctx.isNear() ? cctx.near().dht() : cctx.dht();
Set<Integer> lostParts = cacheAdapter.topology().lostPartitions();
if (!lostParts.isEmpty()) {
if (part == null || lostParts.contains(part)) {
throw new CacheException(new CacheInvalidStateException("Failed to execute query because cache partition " +
"has been lostParts [cacheName=" + cctx.name() +
", part=" + (part == null ? lostParts.iterator().next() : part) + ']'));
}
}
}
// Affinity nodes snapshot.
Collection<ClusterNode> nodes = new ArrayList<>(nodes());
cctx.checkSecurity(SecurityPermission.CACHE_READ);
if (nodes.isEmpty()) {
if (part != null) {
if (forceLocal) {
throw new IgniteCheckedException("No queryable nodes for partition " + part
+ " [forced local query=" + this + "]");
}
}
return new GridEmptyCloseableIterator();
}
if (log.isDebugEnabled())
log.debug("Executing query [query=" + this + ", nodes=" + nodes + ']');
if (cctx.deploymentEnabled())
cctx.deploy().registerClasses(filter);
taskHash = cctx.kernalContext().job().currentTaskNameHash();
final GridCacheQueryManager qryMgr = cctx.queries();
MvccQueryTracker mvccTracker = null;
if (cctx.mvccEnabled() && mvccSnapshot == null) {
GridNearTxLocal tx = cctx.tm().userTx();
if (tx != null)
mvccSnapshot = MvccUtils.requestSnapshot(tx);
else {
mvccTracker = MvccUtils.mvccTracker(cctx, null);
mvccSnapshot = mvccTracker.snapshot();
}
assert mvccSnapshot != null;
}
boolean loc = nodes.size() == 1 && F.first(nodes).id().equals(cctx.localNodeId());
GridCloseableIterator it;
if (loc)
it = qryMgr.scanQueryLocal(this, true);
else if (part != null)
it = new ScanQueryFallbackClosableIterator(part, this, qryMgr, cctx);
else
it = qryMgr.scanQueryDistributed(this, nodes);
return mvccTracker != null ? new MvccTrackingIterator(it, mvccTracker) : it;
}
/**
* @return Nodes to execute on.
*/
private Collection<ClusterNode> nodes() throws IgniteCheckedException {
CacheMode cacheMode = cctx.config().getCacheMode();
Integer part = partition();
switch (cacheMode) {
case LOCAL:
if (prj != null)
U.warn(log, "Ignoring query projection because it's executed over LOCAL cache " +
"(only local node will be queried): " + this);
if (type == SCAN && cctx.config().getCacheMode() == LOCAL &&
part != null && part >= cctx.affinity().partitions())
throw new IgniteCheckedException("Invalid partition number: " + part);
return Collections.singletonList(cctx.localNode());
case REPLICATED:
if (prj != null || part != null)
return nodes(cctx, prj, part);
GridDhtPartitionTopology topology = cctx.topology();
if (cctx.affinityNode() && !topology.localPartitionMap().hasMovingPartitions())
return Collections.singletonList(cctx.localNode());
topology.readLock();
try {
Collection<ClusterNode> affNodes = nodes(cctx, null, null);
List<ClusterNode> nodes = new ArrayList<>(affNodes);
Collections.shuffle(nodes);
for (ClusterNode node : nodes) {
if (!topology.partitions(node.id()).hasMovingPartitions())
return Collections.singletonList(node);
}
return affNodes;
}
finally {
topology.readUnlock();
}
case PARTITIONED:
return nodes(cctx, prj, part);
default:
throw new IllegalStateException("Unknown cache distribution mode: " + cacheMode);
}
}
/**
* @param cctx Cache context.
* @param prj Projection (optional).
* @return Collection of data nodes in provided projection (if any).
* @throws IgniteCheckedException If partition number is invalid.
*/
private static Collection<ClusterNode> nodes(final GridCacheContext<?, ?> cctx,
@Nullable final ClusterGroup prj, @Nullable final Integer part) throws IgniteCheckedException {
assert cctx != null;
final AffinityTopologyVersion topVer = cctx.affinity().affinityTopologyVersion();
Collection<ClusterNode> affNodes = CU.affinityNodes(cctx, topVer);
if (prj == null && part == null)
return affNodes;
if (part != null && part >= cctx.affinity().partitions())
throw new IgniteCheckedException("Invalid partition number: " + part);
final Set<ClusterNode> owners =
part == null ? Collections.<ClusterNode>emptySet() : new HashSet<>(cctx.topology().owners(part, topVer));
return F.view(affNodes, new P1<ClusterNode>() {
@Override public boolean apply(ClusterNode n) {
return cctx.discovery().cacheAffinityNode(n, cctx.name()) &&
(prj == null || prj.node(n.id()) != null) &&
(part == null || owners.contains(n));
}
});
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridCacheQueryAdapter.class, this);
}
/**
* Wrapper for queries with fallback.
*/
private static class ScanQueryFallbackClosableIterator extends GridCloseableIteratorAdapter {
/** */
private static final long serialVersionUID = 0L;
/** Query future. */
private volatile T2<GridCloseableIterator<Object>, GridCacheQueryFutureAdapter> tuple;
/** Backups. */
private volatile Queue<ClusterNode> nodes;
/** Topology version of the last detected {@link GridDhtUnreservedPartitionException}. */
private volatile AffinityTopologyVersion unreservedTopVer;
/** Number of times to retry the query on the nodes failed with {@link GridDhtUnreservedPartitionException}. */
private volatile int unreservedNodesRetryCnt = 5;
/** Bean. */
private final GridCacheQueryAdapter qry;
/** Query manager. */
private final GridCacheQueryManager qryMgr;
/** Cache context. */
private final GridCacheContext cctx;
/** Partition. */
private final int part;
/** Flag indicating that a first item has been returned to a user. */
private boolean firstItemReturned;
/** */
private Object cur;
/**
* @param part Partition.
* @param qry Query.
* @param qryMgr Query manager.
* @param cctx Cache context.
*/
private ScanQueryFallbackClosableIterator(int part, GridCacheQueryAdapter qry,
GridCacheQueryManager qryMgr, GridCacheContext cctx) {
this.qry = qry;
this.qryMgr = qryMgr;
this.cctx = cctx;
this.part = part;
nodes = fallbacks(cctx.shared().exchange().readyAffinityVersion());
if (F.isEmpty(nodes))
throw new ClusterTopologyException("Failed to execute the query " +
"(all affinity nodes left the grid) [cache=" + cctx.name() +
", qry=" + qry +
", curTopVer=" + qryMgr.queryTopologyVersion().topologyVersion() + ']');
init();
}
/**
* @param topVer Topology version.
* @return Nodes for query execution.
*/
private Queue<ClusterNode> fallbacks(AffinityTopologyVersion topVer) {
Deque<ClusterNode> fallbacks = new LinkedList<>();
Collection<ClusterNode> owners = new HashSet<>();
for (ClusterNode node : cctx.topology().owners(part, topVer)) {
if (node.isLocal())
fallbacks.addFirst(node);
else
fallbacks.add(node);
owners.add(node);
}
for (ClusterNode node : cctx.topology().moving(part)) {
if (!owners.contains(node))
fallbacks.add(node);
}
return fallbacks;
}
/**
*
*/
@SuppressWarnings("unchecked")
private void init() {
final ClusterNode node = nodes.poll();
if (node.isLocal()) {
try {
GridCloseableIterator it = qryMgr.scanQueryLocal(qry, true);
tuple = new T2(it, null);
}
catch (IgniteClientDisconnectedCheckedException e) {
throw CU.convertToCacheException(e);
}
catch (IgniteCheckedException e) {
retryIfPossible(e);
}
}
else {
final GridCacheQueryBean bean = new GridCacheQueryBean(qry, null, qry.transform, null);
GridCacheQueryFutureAdapter fut =
(GridCacheQueryFutureAdapter)qryMgr.queryDistributed(bean, Collections.singleton(node));
tuple = new T2(null, fut);
}
}
/** {@inheritDoc} */
@Override protected Object onNext() throws IgniteCheckedException {
if (!onHasNext())
throw new NoSuchElementException();
assert cur != null;
Object e = cur;
cur = null;
return e;
}
/** {@inheritDoc} */
@Override protected boolean onHasNext() throws IgniteCheckedException {
while (true) {
if (cur != null)
return true;
T2<GridCloseableIterator<Object>, GridCacheQueryFutureAdapter> t = tuple;
GridCloseableIterator<Object> iter = t.get1();
if (iter != null) {
boolean hasNext = iter.hasNext();
if (hasNext)
cur = iter.next();
return hasNext;
}
else {
GridCacheQueryFutureAdapter fut = t.get2();
assert fut != null;
if (firstItemReturned)
return (cur = convert(fut.next())) != null;
try {
fut.awaitFirstItemAvailable();
firstItemReturned = true;
return (cur = convert(fut.next())) != null;
}
catch (IgniteClientDisconnectedCheckedException e) {
throw CU.convertToCacheException(e);
}
catch (IgniteCheckedException e) {
retryIfPossible(e);
}
}
}
}
/**
* @param obj Entry to convert.
* @return Cache entry
*/
private Object convert(Object obj) {
if (qry.transform() != null)
return obj;
Map.Entry e = (Map.Entry)obj;
return e == null ? null : new CacheQueryEntry(e.getKey(), e.getValue());
}
/**
* @param e Exception for query run.
*/
private void retryIfPossible(IgniteCheckedException e) {
try {
IgniteInternalFuture<?> retryFut;
GridDhtUnreservedPartitionException partErr = X.cause(e, GridDhtUnreservedPartitionException.class);
if (partErr != null) {
AffinityTopologyVersion waitVer = partErr.topologyVersion();
assert waitVer != null;
retryFut = cctx.shared().exchange().affinityReadyFuture(waitVer);
}
else if (e.hasCause(ClusterTopologyCheckedException.class)) {
ClusterTopologyCheckedException topEx = X.cause(e, ClusterTopologyCheckedException.class);
retryFut = topEx.retryReadyFuture();
}
else if (e.hasCause(ClusterGroupEmptyCheckedException.class)) {
ClusterGroupEmptyCheckedException ex = X.cause(e, ClusterGroupEmptyCheckedException.class);
retryFut = ex.retryReadyFuture();
}
else
throw CU.convertToCacheException(e);
if (F.isEmpty(nodes)) {
if (--unreservedNodesRetryCnt > 0) {
if (retryFut != null)
retryFut.get();
nodes = fallbacks(unreservedTopVer == null ? cctx.shared().exchange().readyAffinityVersion() : unreservedTopVer);
unreservedTopVer = null;
init();
}
else
throw CU.convertToCacheException(e);
}
else
init();
}
catch (IgniteCheckedException ex) {
throw CU.convertToCacheException(ex);
}
}
/** {@inheritDoc} */
@Override protected void onClose() throws IgniteCheckedException {
super.onClose();
T2<GridCloseableIterator<Object>, GridCacheQueryFutureAdapter> t = tuple;
if (t != null && t.get1() != null)
t.get1().close();
if (t != null && t.get2() != null)
t.get2().cancel();
}
}
/**
* Wrapper for an MVCC-related iterators.
*/
private static class MvccTrackingIterator implements GridCloseableIterator {
/** Serial version uid. */
private static final long serialVersionUID = -1905248502802333832L;
/** Underlying iterator. */
private final GridCloseableIterator it;
/** Query MVCC tracker. */
private final MvccQueryTracker mvccTracker;
/**
* Constructor.
*
* @param it Underlying iterator.
* @param mvccTracker Query MVCC tracker.
*/
MvccTrackingIterator(GridCloseableIterator it, MvccQueryTracker mvccTracker) {
assert it != null && mvccTracker != null;
this.it = it;
this.mvccTracker = mvccTracker;
}
/** {@inheritDoc} */
@Override public void close() throws IgniteCheckedException {
if (isClosed())
return;
try {
it.close();
}
finally {
mvccTracker.onDone();
}
}
/** {@inheritDoc} */
@Override public boolean isClosed() {
return it.isClosed();
}
/** {@inheritDoc} */
@Override public boolean hasNext() {
boolean hasNext = it.hasNext();
if (!hasNext)
try {
close();
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
return hasNext;
}
/** {@inheritDoc} */
@Override public boolean hasNextX() throws IgniteCheckedException {
boolean hasNext = it.hasNext();
if (!hasNext)
close();
return hasNext;
}
/** {@inheritDoc} */
@Override public Object nextX() throws IgniteCheckedException {
return it.nextX();
}
/** {@inheritDoc} */
@Override public void removeX() throws IgniteCheckedException {
it.removeX();
}
/** {@inheritDoc} */
@NotNull @Override public Iterator iterator() {
return this;
}
/** {@inheritDoc} */
@Override public Object next() {
return it.next();
}
}
}
| |
/*
* Copyright 2016 Pinpoint contributors and NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.jboss;
import java.security.ProtectionDomain;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod;
import com.navercorp.pinpoint.bootstrap.instrument.Instrumentor;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformCallback;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplate;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplateAware;
import com.navercorp.pinpoint.bootstrap.logging.PLogger;
import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory;
import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPlugin;
import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPluginSetupContext;
import com.navercorp.pinpoint.common.trace.ServiceType;
import com.navercorp.pinpoint.plugin.jboss.interceptor.ContextInvocationInterceptor;
import com.navercorp.pinpoint.plugin.jboss.interceptor.MethodInvocationHandlerInterceptor;
import com.navercorp.pinpoint.plugin.jboss.interceptor.RequestStartAsyncInterceptor;
import com.navercorp.pinpoint.plugin.jboss.interceptor.StandardHostValveInvokeInterceptor;
/**
* The Class JbossPlugin.
*
* @author <a href="mailto:suraj.raturi89@gmail.com">Suraj Raturi</a>
* @author jaehong.kim
*/
public class JbossPlugin implements ProfilerPlugin, TransformTemplateAware {
private final PLogger logger = PLoggerFactory.getLogger(this.getClass());
/**
* The transform template.
*/
private TransformTemplate transformTemplate;
@Override
public void setup(final ProfilerPluginSetupContext context) {
final JbossConfig config = new JbossConfig(context.getConfig());
if (!config.isEnable()) {
logger.info("{} disabled", this.getClass().getSimpleName());
return;
}
logger.info("{} config:{}", this.getClass().getSimpleName(), config);
ServiceType applicationType = context.getConfiguredApplicationType();
if (ServiceType.UNDEFINED.equals(applicationType)) {
final JbossDetector jbossDetector = new JbossDetector(config.getBootstrapMains());
if (jbossDetector.detect()) {
logger.info("Detected application type : {}", JbossConstants.JBOSS);
if (context.registerApplicationType(JbossConstants.JBOSS)) {
applicationType = JbossConstants.JBOSS;
} else {
logger.info("Application type [{}] already set, skipping [{}] registration.", context.getApplicationType(), JbossConstants.JBOSS);
}
}
}
if (JbossConstants.JBOSS.equals(applicationType)) {
logger.info("Adding JBoss transformers");
addTransformers(config);
} else {
logger.info("Not adding JBoss transformers");
}
}
private void addTransformers(JbossConfig jbossConfig) {
// Instrumenting class on the base of ejb based application or rest based application.
if (jbossConfig.isTraceEjb()) {
addMethodInvocationMessageHandlerEditor();
} else {
// Add async listener. Servlet 3.0
addRequestEditor();
addContextInvocationEditor();
// Hide pinpoint headers
requestFacade();
// Clear bind trace. defense code
addStandardHostValveEditor();
}
}
private void requestFacade() {
transformTemplate.transform("org.apache.catalina.connector.RequestFacade", RequestFacadeTransform.class);
}
public static class RequestFacadeTransform implements TransformCallback {
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
final JbossConfig jbossConfig = new JbossConfig(instrumentor.getProfilerConfig());
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
if (jbossConfig.isHidePinpointHeader()) {
// Hide pinpoint headers
target.weave("com.navercorp.pinpoint.plugin.jboss.aspect.RequestFacadeAspect");
}
return target.toBytecode();
}
};
private void addRequestEditor() {
transformTemplate.transform("org.apache.catalina.connector.Request", RequestTransform.class);
}
public static class RequestTransform implements TransformCallback {
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
// Add async listener. Servlet 3.0
InstrumentMethod startAsyncMethodEditor = target.getDeclaredMethod("startAsync", "javax.servlet.ServletRequest", "javax.servlet.ServletResponse");
if (startAsyncMethodEditor != null) {
startAsyncMethodEditor.addInterceptor(RequestStartAsyncInterceptor.class);
}
return target.toBytecode();
}
}
/**
* Adds the method invoke message handler editor.
*/
private void addMethodInvocationMessageHandlerEditor() {
transformTemplate.transform("org.jboss.as.ejb3.remote.protocol.versionone.MethodInvocationMessageHandler", MethodInvocationMessageHandlerTransform.class);
}
public static class MethodInvocationMessageHandlerTransform implements TransformCallback {
@Override
public byte[] doInTransform(final Instrumentor instrumentor, final ClassLoader classLoader, final String className, final Class<?> classBeingRedefined,
final ProtectionDomain protectionDomain,
final byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
// Support EJB
final InstrumentMethod method =
target.getDeclaredMethod("invokeMethod", new String[]{"short", "org.jboss.as.ee.component.ComponentView", "java.lang.reflect.Method", "java.lang.Object[]",
"org.jboss.ejb.client.EJBLocator", "java.util.Map"});
if (method != null) {
method.addInterceptor(MethodInvocationHandlerInterceptor.class);
}
return target.toBytecode();
}
}
/**
* Adds the context invocation editor.
*/
private void addContextInvocationEditor() {
transformTemplate.transform("org.jboss.as.ejb3.tx.EjbBMTInterceptor", EjbBMTInterceptorTransform.class);
}
public static class EjbBMTInterceptorTransform implements TransformCallback {
@Override
public byte[] doInTransform(final Instrumentor instrumentor, final ClassLoader classLoader, final String className, final Class<?> classBeingRedefined,
final ProtectionDomain protectionDomain,
final byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
// EJB
final InstrumentMethod method = target.getDeclaredMethod("handleInvocation", "org.jboss.invocation.InterceptorContext");
if (method != null) {
method.addInterceptor(ContextInvocationInterceptor.class);
}
return target.toBytecode();
}
}
/**
* Adds the standard host valve editor.
*/
private void addStandardHostValveEditor() {
transformTemplate.transform("org.apache.catalina.core.StandardHostValve", StandardHostValveTransform.class);
}
public static class StandardHostValveTransform implements TransformCallback {
@Override
public byte[] doInTransform(final Instrumentor instrumentor, final ClassLoader classLoader, final String className, final Class<?> classBeingRedefined,
final ProtectionDomain protectionDomain,
final byte[] classfileBuffer) throws InstrumentException {
final InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
// Clear bind trace
final InstrumentMethod invokeMethod = target.getDeclaredMethod("invoke", "org.apache.catalina.connector.Request", "org.apache.catalina.connector.Response");
if (invokeMethod != null) {
invokeMethod.addInterceptor(StandardHostValveInvokeInterceptor.class);
}
return target.toBytecode();
}
}
/*
* (non-Javadoc)
*
* @see com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplateAware#setTransformTemplate(com.navercorp.
* pinpoint.bootstrap.instrument.transformer.TransformTemplate)
*/
@Override
public void setTransformTemplate(final TransformTemplate transformTemplate) {
this.transformTemplate = transformTemplate;
}
}
| |
//
// Copyright (c) eProtectioneers 2016/17. All rights reserved.
// Licensed under the MIT License. See LICENSE file in the project root for full license information.
//
package org.eprotectioneers.panacea.contactmanagement.view;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import org.eprotectioneers.panacea.contactmanagement.components.RoundRectangleButton;
/**
* The super class, to visualize an Object (Contact/Group)
* @author eProtectioneers
*/
public abstract class Item_Object extends RoundRectangleButton {
protected JPopupMenu popupMenu;
private boolean selectable=false;
private static boolean hidepopup=false;
private boolean selected=false;
private boolean editable=true;
private static Color bg_selected=Color.BLACK;
private static Color fg_selected=Color.WHITE;
private JPopupMenu selectedPopup;
private static SelectionListener sl=new SelectionListener();
private static Thread t1;
private boolean puoogenerated;
/**
* Set the
* @param puoogenerated
*/
public void setPUOOGenerated(boolean puoogenerated){
this.puoogenerated=puoogenerated;
}
/**
* @return the bg_selected
*/
public static Color getBg_selected() {
return bg_selected;
}
/**
* sets the
* @param bg_selected
*/
public static void setBg_selected(Color bg_selected) {
Item_Object.bg_selected = bg_selected;
}
/**
* @return the fg_selected
*/
public static Color getFg_selected() {
return fg_selected;
}
/**
* sets the
* @param fg_selected
*/
public static void setFg_selected(Color fg_selected) {
Item_Object.fg_selected = fg_selected;
}
/**
* @return if it is selected
*/
public boolean isSelected() {
return selected;
}
/**
* @param selected
*/
public void setSelected(boolean selected) {
if(selected)new Item_ObjectMouseListener().selectItem();
else new Item_ObjectMouseListener().deselectItem();
}
/**
* @return the hidePopup
*/
public static boolean isHidePopup() {
return hidepopup;
}
/**
* @param hidePopup the hidePopup to set
*/
public static void setHidePopup(boolean hidePopup) {
hidepopup = hidePopup;
}
/**
* @return the selectable
*/
public boolean isSelectable() {
return selectable;
}
/**
* @param selectable the selectable to set
*/
public void setSelectable(boolean selectable) {
if(selectable)setCursor(new Cursor(Cursor.HAND_CURSOR));
else setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
this.selectable = selectable;
}
/**
* @return the editable
*/
public boolean isEditable() {
return editable;
}
/**
* @param editable the editable to set
*/
public void setEditable(boolean editable) {
this.editable = editable;
}
/**
* @return the selectedPopup
*/
public JPopupMenu getSelectedPopup() {
return selectedPopup;
}
/**
* @param selectedPopup the selectedPopup to set
*/
public void setSelectedPopup(JPopupMenu selectedPopup) {
this.selectedPopup = selectedPopup;
}
/**
* Constructor, assigns
* @param text
* @param radius
*/
public Item_Object(String text, int radius) {
super(text, radius);
super.setHorizontalAlignment(SwingConstants.LEFT);
super.setFont(new Font("Calibri", Font.PLAIN, 18));
super.addActionListener(new DoubleClickListener());
super.setFocusPainted(false);
super.setMaximumSize(new Dimension(99999,27));
super.setMinimumSize(new Dimension(0,27));
super.setIconTextGap(15);
this.addMouseListener(new Item_ObjectMouseListener());
this.addMouseListener(sl);
}
/**
* @return the IO's shown text
*/
public abstract String getShownText();
/**
*
*/
abstract protected void generatePopup();
/**
* Generates the PopupMenu of the other Object (Contact-Group/Group-Object)
* @param bg
* @param fg
* @param borderpainted
* @param mnOObject
* @param tooltipset
*/
abstract public void generatePopupOObject(Color bg, Color fg, boolean borderpainted, JComponent mnOObject,boolean tooltipset);
/**
* @param o
* @param bg
* @param fg
* @param borderpainted
* @return a Array of default MenuItems of the other Object (Contact-Group/Group-Object)
*/
abstract protected JMenuItem[] getDefaultOObjectMI(Object o,Color bg, Color fg, boolean borderpainted);
/**
* Is running after clicking twice in a short period of time
*/
abstract protected void doubleClickServiceRoutine();
/**
* MenuListener, which generates the Menus of the other Object (Contact-Group/Group-Object)
* @author eProtectioneers
*/
protected class GenerateOObjectMenuListener implements MenuListener,Runnable{
private Color _bg,_fg;
private boolean _borderpainted;
private JComponent _mnOObject;
protected GenerateOObjectMenuListener(Color bg, Color fg, boolean borderpainted, JComponent mnOObject){
this._bg=bg;
this._fg=fg;
this._borderpainted=borderpainted;
this._mnOObject=mnOObject;
puoogenerated=false;
}
@Override
public void run() {
_mnOObject.setEnabled(false);
generatePopupOObject(_bg,_fg,_borderpainted,_mnOObject,true);
_mnOObject.setEnabled(true);
}
@Override
public void menuCanceled(MenuEvent arg0) {
}
@Override
public void menuDeselected(MenuEvent arg0) {
}
@Override
public void menuSelected(MenuEvent arg0) {
if(!puoogenerated){
if(t1!=null&&t1.isAlive())t1.stop();
t1=new Thread(this);
t1.start();
puoogenerated=true;
}
}
}
/**
* Listener, which is being activated completely, if it has been activated twice in a short period of time
* @author eProtectioneers
*/
protected class DoubleClickListener implements ActionListener, Runnable{
private int doubleclickcounter;
private Thread threadwait=new Thread(this);
@Override
public void run() {
try {
Thread.sleep(300);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@Override
public void actionPerformed(ActionEvent e) {
if(doubleClickHappened()){
doubleClickServiceRoutine();
}
}
/**
* @return true, if the click was a doubleclick
*/
public boolean doubleClickHappened(){
if(selected){
doubleclickcounter=0;
return false;
}
if(!threadwait.isAlive()){
threadwait=new Thread(this);
threadwait.start();
doubleclickcounter=0;
}
doubleclickcounter++;
if(doubleclickcounter>1){doubleclickcounter=0; return true;}
return false;
}
}
/**
* MouseListener, which sets the io selected and shows the PuMn
* @author eProtectioneers
*/
protected class Item_ObjectMouseListener extends MouseAdapter {
private PopUpGenerator pug;
private Color _bg,_fg;
public Item_ObjectMouseListener(){
this._bg=getBackground();
this._fg=getForeground();
}
@Override
public void mousePressed(MouseEvent e) {
if((e.isControlDown()||selectable)&&e.getButton()==MouseEvent.BUTTON1){
if(selected){
deselectItem();
}else{
selectItem();
}
}
popupTriggered(e);
}
@Override
public void mouseReleased(MouseEvent e) {
popupTriggered(e);
}
/**
* Triggeres the Popup
* @param e
*/
public void popupTriggered(MouseEvent e){
if(e.isPopupTrigger()){
if(pug==null)pug=new PopUpGenerator(e);
if(!selected&&!hidepopup){
pug.setMouseEvent(e);
new Thread(pug).start();
}else if(selected&&selectedPopup!=null){
showMenu(selectedPopup,e);
}
}
}
/**
* selects the Item
*/
public void selectItem(){
selected=true;
_bg=getBackground();
_fg=getForeground();
setBackground(bg_selected);
setForeground(fg_selected);
}
/**
* deselects the Item
*/
public void deselectItem(){
selected=false;
setBackground(_bg);
setForeground(_fg);
}
public void showMenu(JPopupMenu popumenu,MouseEvent e) {
popumenu.show(e.getComponent(), e.getX(),e.getY());
}
}
/**
* Sets the Tooltiptext to this
* @author eProtectioneers
*/
protected class AddToolTipText implements Runnable{
JComponent _c;
String _s;
public AddToolTipText(JComponent c, String text) {
this._c=c;
this._s=text;
}
@Override
public void run() {
_c.setToolTipText(_s);
}
}
/**
* Generator, which generates the PopupMenu
* @author eProtectioneers
*/
private class PopUpGenerator implements Runnable{
private boolean generated=false;
MouseEvent _me;
public void setMouseEvent(MouseEvent me){
this._me=me;
}
public PopUpGenerator(MouseEvent me){
this._me=me;
}
@Override
public void run() {
Cursor c=getCursor();
if(!generated){
setCursor(new Cursor(Cursor.WAIT_CURSOR));
generatePopup();
setCursor(c);
generated=true;
}
new Item_ObjectMouseListener().showMenu(popupMenu,_me);
}
}
/**
* MouseListener, which sets the Cursor to a HandCursor, if the IO can be selected by clicking
* @author eProtectioneers
*/
private static class SelectionListener extends MouseAdapter{
private static boolean handCursor=false;
private static KeyStroke keyStrokeP = KeyStroke.getKeyStroke(KeyEvent.VK_CONTROL, InputEvent.CTRL_MASK,false);
private static KeyStroke keyStrokeR = KeyStroke.getKeyStroke(KeyEvent.VK_CONTROL, 0,true);
private static Item_Object io;
@Override
public void mouseEntered(MouseEvent e) {
io=(Item_Object)e.getSource();
if(!io.selectable){
io.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(keyStrokeP, "hand");
io.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(keyStrokeR, "default");
io.getActionMap().put("hand", actionSetHand);
io.getActionMap().put("default", actionSetDefault);
}
}
private static Action actionSetHand = new AbstractAction() {
@Override
public void actionPerformed(ActionEvent ae) {
if(!handCursor){
handCursor=true;
io.setCursor(new Cursor(Cursor.HAND_CURSOR));
}
}
};
private static Action actionSetDefault = new AbstractAction() {
@Override
public void actionPerformed(ActionEvent ae) {
if(handCursor){
handCursor=false;
io.setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
}
}
};
@Override
public void mouseExited(MouseEvent e) {
if(!io.selectable){
if(handCursor){
handCursor=false;
io.setCursor(new Cursor(Cursor.DEFAULT_CURSOR));
}
io.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(keyStrokeP, "none");
io.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(keyStrokeR, "none");
}
}
}
}
| |
/*
* Copyright 2016 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.world.block.internal;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import gnu.trove.iterator.TObjectShortIterator;
import gnu.trove.map.TObjectShortMap;
import gnu.trove.map.TShortObjectMap;
import gnu.trove.map.hash.TObjectShortHashMap;
import gnu.trove.map.hash.TShortObjectHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.assets.ResourceUrn;
import org.terasology.assets.management.AssetManager;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockManager;
import org.terasology.world.block.BlockUri;
import org.terasology.world.block.BlockUriParseException;
import org.terasology.world.block.family.BlockFamily;
import org.terasology.world.block.loader.BlockFamilyDefinition;
import org.terasology.world.block.shapes.BlockShape;
import org.terasology.world.block.tiles.WorldAtlas;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
public class BlockManagerImpl extends BlockManager {
private static final Logger logger = LoggerFactory.getLogger(BlockManagerImpl.class);
// This is the id we assign to blocks whose mappings are missing. This shouldn't happen, but in case it does
// we set them to the last id (don't want to use 0 as they would override air)
private static final short UNKNOWN_ID = (short) 65535;
private static final int MAX_ID = 65534;
private static final ResourceUrn CUBE_SHAPE_URN = new ResourceUrn("engine:cube");
private AssetManager assetManager;
private BlockBuilder blockBuilder;
private ReentrantLock lock = new ReentrantLock();
private AtomicReference<RegisteredState> registeredBlockInfo = new AtomicReference<>(new RegisteredState());
private Set<BlockRegistrationListener> listeners = Sets.newLinkedHashSet();
private boolean generateNewIds;
private int nextId = 1;
// Cache this for performance reasons because a lookup by BlockURI happens the first time a block is set when getting the previous block.
// This causes performance problems eventually down the line when it then uses the ResourceUrn's hashcode to do a lookup into the block map.
private Block airBlock;
public BlockManagerImpl(WorldAtlas atlas, AssetManager assetManager) {
this(atlas, assetManager, true);
}
public BlockManagerImpl(WorldAtlas atlas,
AssetManager assetManager,
boolean generateNewIds) {
this.generateNewIds = generateNewIds;
this.assetManager = assetManager;
this.blockBuilder = new BlockBuilder(atlas);
}
public void initialise(List<String> registeredBlockFamilies,
Map<String, Short> knownBlockMappings) {
if (knownBlockMappings.size() >= MAX_ID) {
nextId = UNKNOWN_ID;
} else if (knownBlockMappings.size() > 0) {
nextId = (short) knownBlockMappings.size();
}
registeredBlockInfo.set(new RegisteredState());
for (String rawFamilyUri : registeredBlockFamilies) {
try {
BlockUri familyUri = new BlockUri(rawFamilyUri);
Optional<BlockFamily> family = loadFamily(familyUri);
if (family.isPresent()) {
for (Block block : family.get().getBlocks()) {
Short id = knownBlockMappings.get(block.getURI().toString());
if (id != null) {
block.setId(id);
} else {
logger.error("Missing id for block {} in provided family {}", block.getURI(), family.get().getURI());
if (generateNewIds) {
block.setId(getNextId());
} else {
block.setId(UNKNOWN_ID);
}
}
}
registerFamily(family.get());
}
} catch (BlockUriParseException e) {
logger.error("Failed to parse block family, skipping", e);
}
}
}
public void dispose() {
}
private short getNextId() {
if (nextId > MAX_ID) {
return UNKNOWN_ID;
}
return (short) nextId++;
}
private Block getAirBlock() {
if (airBlock == null) {
airBlock = getBlock(AIR_ID);
}
return airBlock;
}
public void subscribe(BlockRegistrationListener listener) {
this.listeners.add(listener);
}
public void unsubscribe(BlockRegistrationListener listener) {
this.listeners.remove(listener);
}
public void receiveFamilyRegistration(BlockUri familyUri, Map<String, Integer> registration) {
Optional<BlockFamily> family = loadFamily(familyUri);
if (family.isPresent()) {
lock.lock();
try {
for (Block block : family.get().getBlocks()) {
Integer id = registration.get(block.getURI().toString());
if (id != null) {
block.setId((short) id.intValue());
} else {
logger.error("Missing id for block {} in registered family {}", block.getURI(), familyUri);
block.setId(UNKNOWN_ID);
}
}
registerFamily(family.get());
} finally {
lock.unlock();
}
}
}
@VisibleForTesting
protected void registerFamily(BlockFamily family) {
Preconditions.checkNotNull(family);
logger.info("Registered {}", family);
lock.lock();
try {
RegisteredState newState = new RegisteredState(registeredBlockInfo.get());
newState.registeredFamilyByUri.put(family.getURI(), family);
for (Block block : family.getBlocks()) {
registerBlock(block, newState);
}
registeredBlockInfo.set(newState);
} finally {
lock.unlock();
}
for (BlockRegistrationListener listener : listeners) {
listener.onBlockFamilyRegistered(family);
}
}
private void registerBlock(Block block, RegisteredState newState) {
if (block.getId() != UNKNOWN_ID) {
logger.info("Registered Block {} with id {}", block, block.getId());
newState.blocksById.put(block.getId(), block);
newState.idByUri.put(block.getURI(), block.getId());
} else {
logger.info("Failed to register block {} - no id", block, block.getId());
}
newState.blocksByUri.put(block.getURI(), block);
}
@Override
public Map<String, Short> getBlockIdMap() {
Map<String, Short> result = Maps.newHashMapWithExpectedSize(registeredBlockInfo.get().idByUri.size());
TObjectShortIterator<BlockUri> iterator = registeredBlockInfo.get().idByUri.iterator();
while (iterator.hasNext()) {
iterator.advance();
result.put(iterator.key().toString(), iterator.value());
}
return result;
}
@Override
public BlockFamily getBlockFamily(String uri) {
if (!uri.contains(":")) {
Set<ResourceUrn> resourceUrns = assetManager.resolve(uri, BlockFamilyDefinition.class);
if (resourceUrns.size() == 1) {
return getBlockFamily(new BlockUri(resourceUrns.iterator().next()));
} else {
if (resourceUrns.size() > 0) {
logger.error("Failed to resolve block family '{}', too many options - {}", uri, resourceUrns);
} else {
logger.error("Failed to resolve block family '{}'", uri);
}
}
} else {
try {
BlockUri blockUri = new BlockUri(uri);
return getBlockFamily(blockUri);
} catch (BlockUriParseException e) {
logger.error("Failed to resolve block family '{}', invalid uri", uri);
}
}
return getBlockFamily(AIR_ID);
}
@Override
public BlockFamily getBlockFamily(BlockUri uri) {
if (uri.getShapeUrn().isPresent() && uri.getShapeUrn().get().equals(CUBE_SHAPE_URN)) {
return getBlockFamily(uri.getShapelessUri());
}
BlockFamily family = registeredBlockInfo.get().registeredFamilyByUri.get(uri);
if (family == null && generateNewIds) {
Optional<BlockFamily> newFamily = loadFamily(uri);
if (newFamily.isPresent()) {
lock.lock();
try {
for (Block block : newFamily.get().getBlocks()) {
block.setId(getNextId());
}
registerFamily(newFamily.get());
} finally {
lock.unlock();
}
return newFamily.get();
}
}
return family;
}
private Optional<BlockFamily> loadFamily(BlockUri uri) {
Optional<BlockFamilyDefinition> familyDef = assetManager.getAsset(uri.getBlockFamilyDefinitionUrn(), BlockFamilyDefinition.class);
if (familyDef.isPresent() && familyDef.get().isLoadable()) {
if (familyDef.get().isFreeform()) {
ResourceUrn shapeUrn;
if (uri.getShapeUrn().isPresent()) {
shapeUrn = uri.getShapeUrn().get();
} else {
shapeUrn = CUBE_SHAPE_URN;
}
Optional<BlockShape> shape = assetManager.getAsset(shapeUrn, BlockShape.class);
if (shape.isPresent()) {
return Optional.of(familyDef.get().createFamily(shape.get(), blockBuilder));
}
} else if (!familyDef.get().isFreeform()) {
return Optional.of(familyDef.get().createFamily(blockBuilder));
}
} else {
logger.error("Family not available: {}", uri);
}
return Optional.empty();
}
@Override
public Block getBlock(String uri) {
try {
return getBlock(new BlockUri(uri));
} catch (BlockUriParseException e) {
logger.error("Attempt to fetch block with illegal uri '{}'", uri);
return getAirBlock();
}
}
@Override
public Block getBlock(BlockUri uri) {
if (uri.getShapeUrn().isPresent() && uri.getShapeUrn().get().equals(CUBE_SHAPE_URN)) {
return getBlock(uri.getShapelessUri());
}
Block block = registeredBlockInfo.get().blocksByUri.get(uri);
if (block == null) {
// Check if partially registered by getting the block family
BlockFamily family = getBlockFamily(uri.getFamilyUri());
if (family != null) {
block = family.getBlockFor(uri);
}
if (block == null) {
return getAirBlock();
}
}
return block;
}
@Override
public Block getBlock(short id) {
Block result = registeredBlockInfo.get().blocksById.get(id);
if (result == null) {
return getAirBlock();
}
return result;
}
@Override
public Collection<BlockUri> listRegisteredBlockUris() {
return Collections.unmodifiableCollection(registeredBlockInfo.get().registeredFamilyByUri.keySet());
}
@Override
public Collection<BlockFamily> listRegisteredBlockFamilies() {
return Collections.unmodifiableCollection(registeredBlockInfo.get().registeredFamilyByUri.values());
}
@Override
public int getBlockFamilyCount() {
return registeredBlockInfo.get().registeredFamilyByUri.size();
}
@Override
public Collection<Block> listRegisteredBlocks() {
return ImmutableList.copyOf(registeredBlockInfo.get().blocksById.valueCollection());
}
private static class RegisteredState {
private final Map<BlockUri, BlockFamily> registeredFamilyByUri;
/* Blocks */
private final Map<BlockUri, Block> blocksByUri;
private final TShortObjectMap<Block> blocksById;
private final TObjectShortMap<BlockUri> idByUri;
RegisteredState() {
this.registeredFamilyByUri = Maps.newHashMap();
this.blocksByUri = Maps.newHashMap();
this.blocksById = new TShortObjectHashMap<>();
this.idByUri = new TObjectShortHashMap<>();
}
RegisteredState(RegisteredState oldState) {
this.registeredFamilyByUri = Maps.newHashMap(oldState.registeredFamilyByUri);
this.blocksByUri = Maps.newHashMap(oldState.blocksByUri);
this.blocksById = new TShortObjectHashMap<>(oldState.blocksById);
this.idByUri = new TObjectShortHashMap<>(oldState.idByUri);
}
}
}
| |
package com.afollestad.materialdialogs.prefs;
import android.annotation.TargetApi;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.preference.ListPreference;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ListView;
import com.afollestad.materialdialogs.DialogAction;
import com.afollestad.materialdialogs.MaterialDialog;
import com.afollestad.materialdialogs.commons.R;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* @author Marc Holder Kluver (marchold), Aidan Follestad (afollestad)
*/
public class MaterialListPreference extends ListPreference {
private Context context;
private MaterialDialog mDialog;
public MaterialListPreference(Context context) {
super(context);
init(context, null);
}
public MaterialListPreference(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public MaterialListPreference(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public MaterialListPreference(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs);
}
private void init(Context context, AttributeSet attrs) {
this.context = context;
PrefUtil.setLayoutResource(this, attrs);
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1)
setWidgetLayoutResource(0);
}
@Override
public void setEntries(CharSequence[] entries) {
super.setEntries(entries);
if (mDialog != null)
mDialog.setItems(entries);
}
@Override
public Dialog getDialog() {
return mDialog;
}
public ListView getListView() {
if (getDialog() == null) return null;
return ((MaterialDialog) getDialog()).getListView();
}
@Override
protected void showDialog(Bundle state) {
if (getEntries() == null || getEntryValues() == null) {
throw new IllegalStateException(
"ListPreference requires an entries array and an entryValues array.");
}
int preselect = findIndexOfValue(getValue());
MaterialDialog.Builder builder = new MaterialDialog.Builder(context)
.title(getDialogTitle())
.icon(getDialogIcon())
.dismissListener(this)
.onAny(new MaterialDialog.SingleButtonCallback() {
@Override
public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) {
switch (which) {
default:
MaterialListPreference.this.onClick(dialog, DialogInterface.BUTTON_POSITIVE);
break;
case NEUTRAL:
MaterialListPreference.this.onClick(dialog, DialogInterface.BUTTON_NEUTRAL);
break;
case NEGATIVE:
MaterialListPreference.this.onClick(dialog, DialogInterface.BUTTON_NEGATIVE);
break;
}
}
})
.negativeText(getNegativeButtonText())
.items(getEntries())
.autoDismiss(true) // immediately close the dialog after selection
.itemsCallbackSingleChoice(preselect, new MaterialDialog.ListCallbackSingleChoice() {
@Override
public boolean onSelection(MaterialDialog dialog, View itemView, int which, CharSequence text) {
onClick(null, DialogInterface.BUTTON_POSITIVE);
if (which >= 0 && getEntryValues() != null) {
try {
Field clickedIndex = ListPreference.class.getDeclaredField("mClickedDialogEntryIndex");
clickedIndex.setAccessible(true);
clickedIndex.set(MaterialListPreference.this, which);
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
});
final View contentView = onCreateDialogView();
if (contentView != null) {
onBindDialogView(contentView);
builder.customView(contentView, false);
} else {
builder.content(getDialogMessage());
}
PrefUtil.registerOnActivityDestroyListener(this, this);
mDialog = builder.build();
if (state != null)
mDialog.onRestoreInstanceState(state);
onClick(mDialog, DialogInterface.BUTTON_NEGATIVE);
mDialog.show();
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
PrefUtil.unregisterOnActivityDestroyListener(this, this);
}
@Override
public void onActivityDestroy() {
super.onActivityDestroy();
if (mDialog != null && mDialog.isShowing())
mDialog.dismiss();
}
@Override
protected Parcelable onSaveInstanceState() {
final Parcelable superState = super.onSaveInstanceState();
Dialog dialog = getDialog();
if (dialog == null || !dialog.isShowing()) {
return superState;
}
final SavedState myState = new SavedState(superState);
myState.isDialogShowing = true;
myState.dialogBundle = dialog.onSaveInstanceState();
return myState;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (state == null || !state.getClass().equals(SavedState.class)) {
// Didn't save state for us in onSaveInstanceState
super.onRestoreInstanceState(state);
return;
}
SavedState myState = (SavedState) state;
super.onRestoreInstanceState(myState.getSuperState());
if (myState.isDialogShowing) {
showDialog(myState.dialogBundle);
}
}
// From DialogPreference
private static class SavedState extends BaseSavedState {
boolean isDialogShowing;
Bundle dialogBundle;
public SavedState(Parcel source) {
super(source);
isDialogShowing = source.readInt() == 1;
dialogBundle = source.readBundle();
}
@Override
public void writeToParcel(@NonNull Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeInt(isDialogShowing ? 1 : 0);
dest.writeBundle(dialogBundle);
}
public SavedState(Parcelable superState) {
super(superState);
}
public static final Parcelable.Creator<SavedState> CREATOR =
new Parcelable.Creator<SavedState>() {
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
}
| |
package net.minecraft.inventory;
import java.util.Iterator;
import java.util.Map;
import net.minecraft.block.BlockAnvil;
import net.minecraft.block.state.IBlockState;
import net.minecraft.enchantment.Enchantment;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.player.Player;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.util.BlockPos;
import net.minecraft.world.World;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class ContainerRepair extends Container
{
private static final Logger logger = LogManager.getLogger();
/** Here comes out item you merged and/or renamed. */
private IInventory outputSlot = new InventoryCraftResult();
/**
* The 2slots where you put your items in that you want to merge and/or rename.
*/
private IInventory inputSlots = new InventoryBasic("Repair", true, 2)
{
public void markDirty()
{
super.markDirty();
ContainerRepair.this.onCraftMatrixChanged(this);
}
};
private World theWorld;
private BlockPos selfPosition;
/** The maximum cost of repairing/renaming in the anvil. */
public int maximumCost;
/** determined by damage of input item and stackSize of repair materials */
private int materialCost;
private String repairedItemName;
/** The player that has this container open. */
private final Player thePlayer;
public ContainerRepair(InventoryPlayer playerInventory, final World worldIn, final BlockPos blockPosIn, Player player)
{
this.selfPosition = blockPosIn;
this.theWorld = worldIn;
this.thePlayer = player;
this.addSlotToContainer(new Slot(this.inputSlots, 0, 27, 47));
this.addSlotToContainer(new Slot(this.inputSlots, 1, 76, 47));
this.addSlotToContainer(new Slot(this.outputSlot, 2, 134, 47)
{
public boolean isItemValid(ItemStack stack)
{
return false;
}
public boolean canTakeStack(Player playerIn)
{
return (playerIn.capabilities.isCreativeMode || playerIn.experienceLevel >= ContainerRepair.this.maximumCost) && ContainerRepair.this.maximumCost > 0 && this.getHasStack();
}
public void onPickupFromSlot(Player playerIn, ItemStack stack)
{
if (!playerIn.capabilities.isCreativeMode)
{
playerIn.addExperienceLevel(-ContainerRepair.this.maximumCost);
}
ContainerRepair.this.inputSlots.setInventorySlotContents(0, (ItemStack)null);
if (ContainerRepair.this.materialCost > 0)
{
ItemStack itemstack = ContainerRepair.this.inputSlots.getStackInSlot(1);
if (itemstack != null && itemstack.stackSize > ContainerRepair.this.materialCost)
{
itemstack.stackSize -= ContainerRepair.this.materialCost;
ContainerRepair.this.inputSlots.setInventorySlotContents(1, itemstack);
}
else
{
ContainerRepair.this.inputSlots.setInventorySlotContents(1, (ItemStack)null);
}
}
else
{
ContainerRepair.this.inputSlots.setInventorySlotContents(1, (ItemStack)null);
}
ContainerRepair.this.maximumCost = 0;
IBlockState iblockstate = worldIn.getBlockState(blockPosIn);
if (!playerIn.capabilities.isCreativeMode && !worldIn.isRemote && iblockstate.getBlock() == Blocks.anvil && playerIn.getRNG().nextFloat() < 0.12F)
{
int l = ((Integer)iblockstate.getValue(BlockAnvil.DAMAGE)).intValue();
++l;
if (l > 2)
{
worldIn.setBlockToAir(blockPosIn);
worldIn.playAuxSFX(1020, blockPosIn, 0);
}
else
{
worldIn.setBlockState(blockPosIn, iblockstate.withProperty(BlockAnvil.DAMAGE, Integer.valueOf(l)), 2);
worldIn.playAuxSFX(1021, blockPosIn, 0);
}
}
else if (!worldIn.isRemote)
{
worldIn.playAuxSFX(1021, blockPosIn, 0);
}
}
});
for (int i = 0; i < 3; ++i)
{
for (int j = 0; j < 9; ++j)
{
this.addSlotToContainer(new Slot(playerInventory, j + i * 9 + 9, 8 + j * 18, 84 + i * 18));
}
}
for (int k = 0; k < 9; ++k)
{
this.addSlotToContainer(new Slot(playerInventory, k, 8 + k * 18, 142));
}
}
/**
* Callback for when the crafting matrix is changed.
*/
public void onCraftMatrixChanged(IInventory inventoryIn)
{
super.onCraftMatrixChanged(inventoryIn);
if (inventoryIn == this.inputSlots)
{
this.updateRepairOutput();
}
}
/**
* called when the Anvil Input Slot changes, calculates the new result and puts it in the output slot
*/
public void updateRepairOutput()
{
int i = 0;
int j = 1;
int k = 1;
int l = 1;
int i1 = 2;
int j1 = 1;
int k1 = 1;
ItemStack itemstack = this.inputSlots.getStackInSlot(0);
this.maximumCost = 1;
int l1 = 0;
int i2 = 0;
int j2 = 0;
if (itemstack == null)
{
this.outputSlot.setInventorySlotContents(0, (ItemStack)null);
this.maximumCost = 0;
}
else
{
ItemStack itemstack1 = itemstack.copy();
ItemStack itemstack2 = this.inputSlots.getStackInSlot(1);
Map<Integer, Integer> map = EnchantmentHelper.getEnchantments(itemstack1);
boolean flag = false;
i2 = i2 + itemstack.getRepairCost() + (itemstack2 == null ? 0 : itemstack2.getRepairCost());
this.materialCost = 0;
if (itemstack2 != null)
{
flag = itemstack2.getItem() == Items.enchanted_book && Items.enchanted_book.getEnchantments(itemstack2).tagCount() > 0;
if (itemstack1.isItemStackDamageable() && itemstack1.getItem().getIsRepairable(itemstack, itemstack2))
{
int j4 = Math.min(itemstack1.getItemDamage(), itemstack1.getMaxDamage() / 4);
if (j4 <= 0)
{
this.outputSlot.setInventorySlotContents(0, (ItemStack)null);
this.maximumCost = 0;
return;
}
int l4;
for (l4 = 0; j4 > 0 && l4 < itemstack2.stackSize; ++l4)
{
int j5 = itemstack1.getItemDamage() - j4;
itemstack1.setItemDamage(j5);
++l1;
j4 = Math.min(itemstack1.getItemDamage(), itemstack1.getMaxDamage() / 4);
}
this.materialCost = l4;
}
else
{
if (!flag && (itemstack1.getItem() != itemstack2.getItem() || !itemstack1.isItemStackDamageable()))
{
this.outputSlot.setInventorySlotContents(0, (ItemStack)null);
this.maximumCost = 0;
return;
}
if (itemstack1.isItemStackDamageable() && !flag)
{
int k2 = itemstack.getMaxDamage() - itemstack.getItemDamage();
int l2 = itemstack2.getMaxDamage() - itemstack2.getItemDamage();
int i3 = l2 + itemstack1.getMaxDamage() * 12 / 100;
int j3 = k2 + i3;
int k3 = itemstack1.getMaxDamage() - j3;
if (k3 < 0)
{
k3 = 0;
}
if (k3 < itemstack1.getMetadata())
{
itemstack1.setItemDamage(k3);
l1 += 2;
}
}
Map<Integer, Integer> map1 = EnchantmentHelper.getEnchantments(itemstack2);
Iterator iterator1 = map1.keySet().iterator();
while (iterator1.hasNext())
{
int i5 = ((Integer)iterator1.next()).intValue();
Enchantment enchantment = Enchantment.getEnchantmentById(i5);
if (enchantment != null)
{
int k5 = map.containsKey(Integer.valueOf(i5)) ? ((Integer)map.get(Integer.valueOf(i5))).intValue() : 0;
int l3 = ((Integer)map1.get(Integer.valueOf(i5))).intValue();
int i6;
if (k5 == l3)
{
++l3;
i6 = l3;
}
else
{
i6 = Math.max(l3, k5);
}
l3 = i6;
boolean flag1 = enchantment.canApply(itemstack);
if (this.thePlayer.capabilities.isCreativeMode || itemstack.getItem() == Items.enchanted_book)
{
flag1 = true;
}
Iterator iterator = map.keySet().iterator();
while (iterator.hasNext())
{
int i4 = ((Integer)iterator.next()).intValue();
if (i4 != i5 && !enchantment.canApplyTogether(Enchantment.getEnchantmentById(i4)))
{
flag1 = false;
++l1;
}
}
if (flag1)
{
if (l3 > enchantment.getMaxLevel())
{
l3 = enchantment.getMaxLevel();
}
map.put(Integer.valueOf(i5), Integer.valueOf(l3));
int l5 = 0;
switch (enchantment.getWeight())
{
case 1:
l5 = 8;
break;
case 2:
l5 = 4;
case 3:
case 4:
case 6:
case 7:
case 8:
case 9:
default:
break;
case 5:
l5 = 2;
break;
case 10:
l5 = 1;
}
if (flag)
{
l5 = Math.max(1, l5 / 2);
}
l1 += l5 * l3;
}
}
}
}
}
if (StringUtils.isBlank(this.repairedItemName))
{
if (itemstack.hasDisplayName())
{
j2 = 1;
l1 += j2;
itemstack1.clearCustomName();
}
}
else if (!this.repairedItemName.equals(itemstack.getDisplayName()))
{
j2 = 1;
l1 += j2;
itemstack1.setStackDisplayName(this.repairedItemName);
}
this.maximumCost = i2 + l1;
if (l1 <= 0)
{
itemstack1 = null;
}
if (j2 == l1 && j2 > 0 && this.maximumCost >= 40)
{
this.maximumCost = 39;
}
if (this.maximumCost >= 40 && !this.thePlayer.capabilities.isCreativeMode)
{
itemstack1 = null;
}
if (itemstack1 != null)
{
int k4 = itemstack1.getRepairCost();
if (itemstack2 != null && k4 < itemstack2.getRepairCost())
{
k4 = itemstack2.getRepairCost();
}
k4 = k4 * 2 + 1;
itemstack1.setRepairCost(k4);
EnchantmentHelper.setEnchantments(map, itemstack1);
}
this.outputSlot.setInventorySlotContents(0, itemstack1);
this.detectAndSendChanges();
}
}
public void onCraftGuiOpened(ICrafting listener)
{
super.onCraftGuiOpened(listener);
listener.sendProgressBarUpdate(this, 0, this.maximumCost);
}
/**
* Called when the container is closed.
*/
public void onContainerClosed(Player playerIn)
{
super.onContainerClosed(playerIn);
if (!this.theWorld.isRemote)
{
for (int i = 0; i < this.inputSlots.getSizeInventory(); ++i)
{
ItemStack itemstack = this.inputSlots.removeStackFromSlot(i);
if (itemstack != null)
{
playerIn.dropPlayerItemWithRandomChoice(itemstack, false);
}
}
}
}
public boolean canInteractWith(Player playerIn)
{
return this.theWorld.getBlockState(this.selfPosition).getBlock() != Blocks.anvil ? false : playerIn.getDistanceSq((double)this.selfPosition.getX() + 0.5D, (double)this.selfPosition.getY() + 0.5D, (double)this.selfPosition.getZ() + 0.5D) <= 64.0D;
}
/**
* Take a stack from the specified inventory slot.
*/
public ItemStack transferStackInSlot(Player playerIn, int index)
{
ItemStack itemstack = null;
Slot slot = (Slot)this.inventorySlots.get(index);
if (slot != null && slot.getHasStack())
{
ItemStack itemstack1 = slot.getStack();
itemstack = itemstack1.copy();
if (index == 2)
{
if (!this.mergeItemStack(itemstack1, 3, 39, true))
{
return null;
}
slot.onSlotChange(itemstack1, itemstack);
}
else if (index != 0 && index != 1)
{
if (index >= 3 && index < 39 && !this.mergeItemStack(itemstack1, 0, 2, false))
{
return null;
}
}
else if (!this.mergeItemStack(itemstack1, 3, 39, false))
{
return null;
}
if (itemstack1.stackSize == 0)
{
slot.putStack((ItemStack)null);
}
else
{
slot.onSlotChanged();
}
if (itemstack1.stackSize == itemstack.stackSize)
{
return null;
}
slot.onPickupFromSlot(playerIn, itemstack1);
}
return itemstack;
}
/**
* used by the Anvil GUI to update the Item Name being typed by the player
*/
public void updateItemName(String newName)
{
this.repairedItemName = newName;
if (this.getSlot(2).getHasStack())
{
ItemStack itemstack = this.getSlot(2).getStack();
if (StringUtils.isBlank(newName))
{
itemstack.clearCustomName();
}
else
{
itemstack.setStackDisplayName(this.repairedItemName);
}
}
this.updateRepairOutput();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package org.jetbrains.java.decompiler.modules.decompiler.stats;
import org.jetbrains.java.decompiler.code.CodeConstants;
import org.jetbrains.java.decompiler.code.InstructionSequence;
import org.jetbrains.java.decompiler.main.DecompilerContext;
import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer;
import org.jetbrains.java.decompiler.main.collectors.CounterContainer;
import org.jetbrains.java.decompiler.modules.decompiler.StatEdge;
import org.jetbrains.java.decompiler.modules.decompiler.StatEdge.EdgeType;
import org.jetbrains.java.decompiler.modules.decompiler.StrongConnectivityHelper;
import org.jetbrains.java.decompiler.modules.decompiler.exps.Exprent;
import org.jetbrains.java.decompiler.modules.decompiler.stats.DoStatement.LoopType;
import org.jetbrains.java.decompiler.struct.match.IMatchable;
import org.jetbrains.java.decompiler.struct.match.MatchEngine;
import org.jetbrains.java.decompiler.struct.match.MatchNode;
import org.jetbrains.java.decompiler.struct.match.MatchNode.RuleValue;
import org.jetbrains.java.decompiler.util.TextBuffer;
import org.jetbrains.java.decompiler.util.VBStyleCollection;
import java.util.*;
import java.util.Map.Entry;
public class Statement implements IMatchable {
public static final int DIRECTION_BACKWARD = 0;
public static final int DIRECTION_FORWARD = 1;
public static final int TYPE_GENERAL = 0;
public static final int TYPE_IF = 2;
public static final int TYPE_DO = 5;
public static final int TYPE_SWITCH = 6;
public static final int TYPE_TRY_CATCH = 7;
public static final int TYPE_BASIC_BLOCK = 8;
//public static final int TYPE_FINALLY = 9;
public static final int TYPE_SYNCHRONIZED = 10;
public static final int TYPE_PLACEHOLDER = 11;
public static final int TYPE_CATCH_ALL = 12;
public static final int TYPE_ROOT = 13;
public static final int TYPE_DUMMY_EXIT = 14;
public static final int TYPE_SEQUENCE = 15;
public static final int LASTBASICTYPE_IF = 0;
public static final int LASTBASICTYPE_SWITCH = 1;
public static final int LASTBASICTYPE_GENERAL = 2;
// *****************************************************************************
// public fields
// *****************************************************************************
public int type;
public Integer id;
// *****************************************************************************
// private fields
// *****************************************************************************
private final Map<EdgeType, List<StatEdge>> mapSuccEdges = new HashMap<>();
private final Map<EdgeType, List<StatEdge>> mapPredEdges = new HashMap<>();
private final Map<EdgeType, List<Statement>> mapSuccStates = new HashMap<>();
private final Map<EdgeType, List<Statement>> mapPredStates = new HashMap<>();
// statement as graph
protected final VBStyleCollection<Statement, Integer> stats = new VBStyleCollection<>();
protected Statement parent;
protected Statement first;
protected List<Exprent> exprents;
protected final HashSet<StatEdge> labelEdges = new HashSet<>();
protected final List<Exprent> varDefinitions = new ArrayList<>();
// copied statement, s. deobfuscating of irreducible CFGs
private boolean copied = false;
// relevant for the first stage of processing only
// set to null after initializing of the statement structure
protected Statement post;
protected int lastBasicType = LASTBASICTYPE_GENERAL;
protected boolean isMonitorEnter;
protected boolean containsMonitorExit;
protected HashSet<Statement> continueSet = new HashSet<>();
// *****************************************************************************
// initializers
// *****************************************************************************
{
// set statement id
id = DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.STATEMENT_COUNTER);
}
Statement(int type) {
this.type = type;
}
// *****************************************************************************
// public methods
// *****************************************************************************
public void clearTempInformation() {
post = null;
continueSet = null;
copied = false;
// FIXME: used in FlattenStatementsHelper.flattenStatement()! check and remove
//lastBasicType = LASTBASICTYPE_GENERAL;
isMonitorEnter = false;
containsMonitorExit = false;
processMap(mapSuccEdges);
processMap(mapPredEdges);
processMap(mapSuccStates);
processMap(mapPredStates);
}
private static <T> void processMap(Map<EdgeType, List<T>> map) {
map.remove(EdgeType.EXCEPTION);
List<T> lst = map.get(EdgeType.DIRECT_ALL);
if (lst != null) {
map.put(EdgeType.ALL, new ArrayList<>(lst));
}
else {
map.remove(EdgeType.ALL);
}
}
public void collapseNodesToStatement(Statement stat) {
Statement head = stat.getFirst();
Statement post = stat.getPost();
VBStyleCollection<Statement, Integer> setNodes = stat.getStats();
// post edges
if (post != null) {
for (StatEdge edge : post.getEdges(EdgeType.DIRECT_ALL, DIRECTION_BACKWARD)) {
if (stat.containsStatementStrict(edge.getSource())) {
edge.getSource().changeEdgeType(DIRECTION_FORWARD, edge, EdgeType.BREAK);
stat.addLabeledEdge(edge);
}
}
}
// regular head edges
for (StatEdge prededge : head.getAllPredecessorEdges()) {
if (prededge.getType() != EdgeType.EXCEPTION &&
stat.containsStatementStrict(prededge.getSource())) {
prededge.getSource().changeEdgeType(DIRECTION_FORWARD, prededge, EdgeType.CONTINUE);
stat.addLabeledEdge(prededge);
}
head.removePredecessor(prededge);
prededge.getSource().changeEdgeNode(DIRECTION_FORWARD, prededge, stat);
stat.addPredecessor(prededge);
}
if (setNodes.containsKey(first.id)) {
first = stat;
}
// exception edges
Set<Statement> setHandlers = new HashSet<>(head.getNeighbours(EdgeType.EXCEPTION, DIRECTION_FORWARD));
for (Statement node : setNodes) {
setHandlers.retainAll(node.getNeighbours(EdgeType.EXCEPTION, DIRECTION_FORWARD));
}
if (!setHandlers.isEmpty()) {
for (StatEdge edge : head.getEdges(EdgeType.EXCEPTION, DIRECTION_FORWARD)) {
Statement handler = edge.getDestination();
if (setHandlers.contains(handler)) {
if (!setNodes.containsKey(handler.id)) {
stat.addSuccessor(new StatEdge(stat, handler, edge.getExceptions()));
}
}
}
for (Statement node : setNodes) {
for (StatEdge edge : node.getEdges(EdgeType.EXCEPTION, DIRECTION_FORWARD)) {
if (setHandlers.contains(edge.getDestination())) {
node.removeSuccessor(edge);
}
}
}
}
if (post != null &&
!stat.getNeighbours(EdgeType.EXCEPTION, DIRECTION_FORWARD).contains(post)) { // TODO: second condition redundant?
stat.addSuccessor(new StatEdge(EdgeType.REGULAR, stat, post));
}
// adjust statement collection
for (Statement st : setNodes) {
stats.removeWithKey(st.id);
}
stats.addWithKey(stat, stat.id);
stat.setAllParent();
stat.setParent(this);
stat.buildContinueSet();
// monitorenter and monitorexit
stat.buildMonitorFlags();
if (stat.type == TYPE_SWITCH) {
// special case switch, sorting leaf nodes
((SwitchStatement)stat).sortEdgesAndNodes();
}
}
public void setAllParent() {
for (Statement st : stats) {
st.setParent(this);
}
}
public void addLabeledEdge(StatEdge edge) {
if (edge.closure != null) {
edge.closure.getLabelEdges().remove(edge);
}
edge.closure = this;
this.getLabelEdges().add(edge);
}
private void addEdgeDirectInternal(int direction, StatEdge edge, EdgeType edgetype) {
Map<EdgeType, List<StatEdge>> mapEdges = direction == DIRECTION_BACKWARD ? mapPredEdges : mapSuccEdges;
Map<EdgeType, List<Statement>> mapStates = direction == DIRECTION_BACKWARD ? mapPredStates : mapSuccStates;
mapEdges.computeIfAbsent(edgetype, k -> new ArrayList<>()).add(edge);
mapStates.computeIfAbsent(edgetype, k -> new ArrayList<>()).add(direction == DIRECTION_BACKWARD ? edge.getSource() : edge.getDestination());
}
private void addEdgeInternal(int direction, StatEdge edge) {
EdgeType type = edge.getType();
EdgeType[] arrtypes;
if (type == EdgeType.EXCEPTION) {
arrtypes = new EdgeType[]{EdgeType.ALL, EdgeType.EXCEPTION};
}
else {
arrtypes = new EdgeType[]{EdgeType.ALL, EdgeType.DIRECT_ALL, type};
}
for (EdgeType edgetype : arrtypes) {
addEdgeDirectInternal(direction, edge, edgetype);
}
}
private void removeEdgeDirectInternal(int direction, StatEdge edge, EdgeType edgetype) {
Map<EdgeType, List<StatEdge>> mapEdges = direction == DIRECTION_BACKWARD ? mapPredEdges : mapSuccEdges;
Map<EdgeType, List<Statement>> mapStates = direction == DIRECTION_BACKWARD ? mapPredStates : mapSuccStates;
List<StatEdge> lst = mapEdges.get(edgetype);
if (lst != null) {
int index = lst.indexOf(edge);
if (index >= 0) {
lst.remove(index);
mapStates.get(edgetype).remove(index);
}
}
}
private void removeEdgeInternal(int direction, StatEdge edge) {
EdgeType type = edge.getType();
EdgeType[] arrtypes;
if (type == EdgeType.EXCEPTION) {
arrtypes = new EdgeType[]{EdgeType.ALL, EdgeType.EXCEPTION};
}
else {
arrtypes = new EdgeType[]{EdgeType.ALL, EdgeType.DIRECT_ALL, type};
}
for (EdgeType edgetype : arrtypes) {
removeEdgeDirectInternal(direction, edge, edgetype);
}
}
public void addPredecessor(StatEdge edge) {
addEdgeInternal(DIRECTION_BACKWARD, edge);
}
public void removePredecessor(StatEdge edge) {
if (edge == null) { // FIXME: redundant?
return;
}
removeEdgeInternal(DIRECTION_BACKWARD, edge);
}
public void addSuccessor(StatEdge edge) {
addEdgeInternal(DIRECTION_FORWARD, edge);
if (edge.closure != null) {
edge.closure.getLabelEdges().add(edge);
}
edge.getDestination().addPredecessor(edge);
}
public void removeSuccessor(StatEdge edge) {
if (edge == null) {
return;
}
removeEdgeInternal(DIRECTION_FORWARD, edge);
if (edge.closure != null) {
edge.closure.getLabelEdges().remove(edge);
}
if (edge.getDestination() != null) { // TODO: redundant?
edge.getDestination().removePredecessor(edge);
}
}
// TODO: make obsolete and remove
public void removeAllSuccessors(Statement stat) {
if (stat == null) {
return;
}
for (StatEdge edge : getAllSuccessorEdges()) {
if (edge.getDestination() == stat) {
removeSuccessor(edge);
}
}
}
public HashSet<Statement> buildContinueSet() {
continueSet.clear();
for (Statement st : stats) {
continueSet.addAll(st.buildContinueSet());
if (st != first) {
continueSet.remove(st.getBasichead());
}
}
for (StatEdge edge : getEdges(EdgeType.CONTINUE, DIRECTION_FORWARD)) {
continueSet.add(edge.getDestination().getBasichead());
}
if (type == TYPE_DO) {
continueSet.remove(first.getBasichead());
}
return continueSet;
}
public void buildMonitorFlags() {
for (Statement st : stats) {
st.buildMonitorFlags();
}
switch (type) {
case TYPE_BASIC_BLOCK:
BasicBlockStatement bblock = (BasicBlockStatement)this;
InstructionSequence seq = bblock.getBlock().getSeq();
if (seq != null && seq.length() > 0) {
for (int i = 0; i < seq.length(); i++) {
if (seq.getInstr(i).opcode == CodeConstants.opc_monitorexit) {
containsMonitorExit = true;
break;
}
}
isMonitorEnter = (seq.getLastInstr().opcode == CodeConstants.opc_monitorenter);
}
break;
case TYPE_SEQUENCE:
case TYPE_IF:
containsMonitorExit = false;
for (Statement st : stats) {
containsMonitorExit |= st.isContainsMonitorExit();
}
break;
case TYPE_SYNCHRONIZED:
case TYPE_ROOT:
case TYPE_GENERAL:
break;
default:
containsMonitorExit = false;
for (Statement st : stats) {
containsMonitorExit |= st.isContainsMonitorExit();
}
}
}
public List<Statement> getReversePostOrderList() {
return getReversePostOrderList(first);
}
public List<Statement> getReversePostOrderList(Statement stat) {
List<Statement> res = new ArrayList<>();
addToReversePostOrderListIterative(stat, res);
return res;
}
public List<Statement> getPostReversePostOrderList() {
return getPostReversePostOrderList(null);
}
public List<Statement> getPostReversePostOrderList(List<Statement> lstexits) {
List<Statement> res = new ArrayList<>();
if (lstexits == null) {
lstexits = new StrongConnectivityHelper(this).getExitReps();
}
HashSet<Statement> setVisited = new HashSet<>();
for (Statement exit : lstexits) {
addToPostReversePostOrderList(exit, res, setVisited);
}
if (res.size() != stats.size()) {
throw new RuntimeException("computing post reverse post order failed!");
}
return res;
}
public boolean containsStatement(Statement stat) {
return this == stat || containsStatementStrict(stat);
}
public boolean containsStatementStrict(Statement stat) {
if (stats.contains(stat)) {
return true;
}
for (Statement st : stats) {
if (st.containsStatementStrict(stat)) {
return true;
}
}
return false;
}
public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) {
throw new RuntimeException("not implemented");
}
// TODO: make obsolete and remove
public List<Object> getSequentialObjects() {
return new ArrayList<>(stats);
}
public void initExprents() {
// do nothing
}
public void replaceExprent(Exprent oldexpr, Exprent newexpr) {
// do nothing
}
public Statement getSimpleCopy() {
throw new RuntimeException("not implemented");
}
public void initSimpleCopy() {
if (!stats.isEmpty()) {
first = stats.get(0);
}
}
public void replaceStatement(Statement oldstat, Statement newstat) {
for (StatEdge edge : oldstat.getAllPredecessorEdges()) {
oldstat.removePredecessor(edge);
edge.getSource().changeEdgeNode(DIRECTION_FORWARD, edge, newstat);
newstat.addPredecessor(edge);
}
for (StatEdge edge : oldstat.getAllSuccessorEdges()) {
oldstat.removeSuccessor(edge);
edge.setSource(newstat);
newstat.addSuccessor(edge);
}
int statindex = stats.getIndexByKey(oldstat.id);
stats.removeWithKey(oldstat.id);
stats.addWithKeyAndIndex(statindex, newstat, newstat.id);
newstat.setParent(this);
newstat.post = oldstat.post;
if (first == oldstat) {
first = newstat;
}
List<StatEdge> lst = new ArrayList<>(oldstat.getLabelEdges());
for (int i = lst.size() - 1; i >= 0; i--) {
StatEdge edge = lst.get(i);
if (edge.getSource() != newstat) {
newstat.addLabeledEdge(edge);
}
else {
if (this == edge.getDestination() || this.containsStatementStrict(edge.getDestination())) {
edge.closure = null;
}
else {
this.addLabeledEdge(edge);
}
}
}
oldstat.getLabelEdges().clear();
}
// *****************************************************************************
// private methods
// *****************************************************************************
private static void addToReversePostOrderListIterative(Statement root, List<? super Statement> lst) {
LinkedList<Statement> stackNode = new LinkedList<>();
LinkedList<Integer> stackIndex = new LinkedList<>();
HashSet<Statement> setVisited = new HashSet<>();
stackNode.add(root);
stackIndex.add(0);
while (!stackNode.isEmpty()) {
Statement node = stackNode.getLast();
int index = stackIndex.removeLast();
setVisited.add(node);
List<StatEdge> lstEdges = node.getAllSuccessorEdges();
for (; index < lstEdges.size(); index++) {
StatEdge edge = lstEdges.get(index);
Statement succ = edge.getDestination();
if (!setVisited.contains(succ) &&
(edge.getType() == EdgeType.REGULAR || edge.getType() == EdgeType.EXCEPTION)) { // TODO: edge filter?
stackIndex.add(index + 1);
stackNode.add(succ);
stackIndex.add(0);
break;
}
}
if (index == lstEdges.size()) {
lst.add(0, node);
stackNode.removeLast();
}
}
}
private static void addToPostReversePostOrderList(Statement stat, List<? super Statement> lst, HashSet<? super Statement> setVisited) {
if (setVisited.contains(stat)) { // because of not considered exception edges, s. isExitComponent. Should be rewritten, if possible.
return;
}
setVisited.add(stat);
for (StatEdge prededge : stat.getEdges(EdgeType.REGULAR.unite(EdgeType.EXCEPTION), DIRECTION_BACKWARD)) {
Statement pred = prededge.getSource();
if (!setVisited.contains(pred)) {
addToPostReversePostOrderList(pred, lst, setVisited);
}
}
lst.add(0, stat);
}
// *****************************************************************************
// getter and setter methods
// *****************************************************************************
public void changeEdgeNode(int direction, StatEdge edge, Statement value) {
Map<EdgeType, List<StatEdge>> mapEdges = direction == DIRECTION_BACKWARD ? mapPredEdges : mapSuccEdges;
Map<EdgeType, List<Statement>> mapStates = direction == DIRECTION_BACKWARD ? mapPredStates : mapSuccStates;
EdgeType type = edge.getType();
EdgeType[] arrtypes;
if (type == EdgeType.EXCEPTION) {
arrtypes = new EdgeType[]{EdgeType.ALL, EdgeType.EXCEPTION};
}
else {
arrtypes = new EdgeType[]{EdgeType.ALL, EdgeType.DIRECT_ALL, type};
}
for (EdgeType edgetype : arrtypes) {
List<StatEdge> lst = mapEdges.get(edgetype);
if (lst != null) {
int index = lst.indexOf(edge);
if (index >= 0) {
mapStates.get(edgetype).set(index, value);
}
}
}
if (direction == DIRECTION_BACKWARD) {
edge.setSource(value);
}
else {
edge.setDestination(value);
}
}
public void changeEdgeType(int direction, StatEdge edge, EdgeType newtype) {
EdgeType oldtype = edge.getType();
if (oldtype == newtype) {
return;
}
if (oldtype == EdgeType.EXCEPTION || newtype == EdgeType.EXCEPTION) {
throw new RuntimeException("Invalid edge type!");
}
removeEdgeDirectInternal(direction, edge, oldtype);
addEdgeDirectInternal(direction, edge, newtype);
if (direction == DIRECTION_FORWARD) {
edge.getDestination().changeEdgeType(DIRECTION_BACKWARD, edge, newtype);
}
edge.setType(newtype);
}
private List<StatEdge> getEdges(EdgeType type, int direction) {
Map<EdgeType, List<StatEdge>> map = direction == DIRECTION_BACKWARD ? mapPredEdges : mapSuccEdges;
List<StatEdge> res;
if ((type.mask() & (type.mask() - 1)) == 0) {
res = map.get(type);
res = res == null ? new ArrayList<>() : new ArrayList<>(res);
}
else {
res = new ArrayList<>();
for (EdgeType edgetype : EdgeType.types()) {
if ((type.mask() & edgetype.mask()) != 0) {
List<StatEdge> lst = map.get(edgetype);
if (lst != null) {
res.addAll(lst);
}
}
}
}
return res;
}
public List<Statement> getNeighbours(EdgeType type, int direction) {
Map<EdgeType, List<Statement>> map = direction == DIRECTION_BACKWARD ? mapPredStates : mapSuccStates;
List<Statement> res;
if ((type.mask() & (type.mask() - 1)) == 0) {
res = map.get(type);
res = res == null ? new ArrayList<>() : new ArrayList<>(res);
}
else {
res = new ArrayList<>();
for (EdgeType edgetype : EdgeType.types()) {
if ((type.mask() & edgetype.mask()) != 0) {
List<Statement> lst = map.get(edgetype);
if (lst != null) {
res.addAll(lst);
}
}
}
}
return res;
}
public Set<Statement> getNeighboursSet(EdgeType type, int direction) {
return new HashSet<>(getNeighbours(type, direction));
}
public List<StatEdge> getSuccessorEdges(EdgeType type) {
return getEdges(type, DIRECTION_FORWARD);
}
public List<StatEdge> getPredecessorEdges(EdgeType type) {
return getEdges(type, DIRECTION_BACKWARD);
}
public List<StatEdge> getAllSuccessorEdges() {
return getEdges(EdgeType.ALL, DIRECTION_FORWARD);
}
public List<StatEdge> getAllPredecessorEdges() {
return getEdges(EdgeType.ALL, DIRECTION_BACKWARD);
}
public Statement getFirst() {
return first;
}
public void setFirst(Statement first) {
this.first = first;
}
public Statement getPost() {
return post;
}
public VBStyleCollection<Statement, Integer> getStats() {
return stats;
}
public int getLastBasicType() {
return lastBasicType;
}
public HashSet<Statement> getContinueSet() {
return continueSet;
}
public boolean isContainsMonitorExit() {
return containsMonitorExit;
}
public boolean isMonitorEnter() {
return isMonitorEnter;
}
public BasicBlockStatement getBasichead() {
if (type == TYPE_BASIC_BLOCK) {
return (BasicBlockStatement)this;
}
else {
return first.getBasichead();
}
}
public boolean isLabeled() {
for (StatEdge edge : labelEdges) {
if (edge.labeled && edge.explicit) { // FIXME: consistent setting
return true;
}
}
return false;
}
public boolean hasBasicSuccEdge() {
// FIXME: default switch
return type == TYPE_BASIC_BLOCK || (type == TYPE_IF &&
((IfStatement)this).iftype == IfStatement.IFTYPE_IF) ||
(type == TYPE_DO && ((DoStatement)this).getLoopType() != LoopType.DO);
}
public Statement getParent() {
return parent;
}
public void setParent(Statement parent) {
this.parent = parent;
}
public HashSet<StatEdge> getLabelEdges() { // FIXME: why HashSet?
return labelEdges;
}
public List<Exprent> getVarDefinitions() {
return varDefinitions;
}
public List<Exprent> getExprents() {
return exprents;
}
public void setExprents(List<Exprent> exprents) {
this.exprents = exprents;
}
public boolean isCopied() {
return copied;
}
public void setCopied(boolean copied) {
this.copied = copied;
}
// helper methods
public String toString() {
return id.toString();
}
// *****************************************************************************
// IMatchable implementation
// *****************************************************************************
@Override
public IMatchable findObject(MatchNode matchNode, int index) {
int node_type = matchNode.getType();
if (node_type == MatchNode.MATCHNODE_STATEMENT && !this.stats.isEmpty()) {
String position = (String)matchNode.getRuleValue(MatchProperties.STATEMENT_POSITION);
if (position != null) {
if (position.matches("-?\\d+")) {
return this.stats.get((this.stats.size() + Integer.parseInt(position)) % this.stats.size()); // care for negative positions
}
}
else if (index < this.stats.size()) { // use 'index' parameter
return this.stats.get(index);
}
}
else if (node_type == MatchNode.MATCHNODE_EXPRENT && this.exprents != null && !this.exprents.isEmpty()) {
String position = (String)matchNode.getRuleValue(MatchProperties.EXPRENT_POSITION);
if (position != null) {
if (position.matches("-?\\d+")) {
return this.exprents.get((this.exprents.size() + Integer.parseInt(position)) % this.exprents.size()); // care for negative positions
}
}
else if (index < this.exprents.size()) { // use 'index' parameter
return this.exprents.get(index);
}
}
return null;
}
@Override
public boolean match(MatchNode matchNode, MatchEngine engine) {
if (matchNode.getType() != MatchNode.MATCHNODE_STATEMENT) {
return false;
}
for (Entry<MatchProperties, RuleValue> rule : matchNode.getRules().entrySet()) {
switch (rule.getKey()) {
case STATEMENT_TYPE:
if (this.type != (Integer)rule.getValue().value) {
return false;
}
break;
case STATEMENT_STATSIZE:
if (this.stats.size() != (Integer)rule.getValue().value) {
return false;
}
break;
case STATEMENT_EXPRSIZE:
int exprsize = (Integer)rule.getValue().value;
if (exprsize == -1) {
if (this.exprents != null) {
return false;
}
}
else {
if (this.exprents == null || this.exprents.size() != exprsize) {
return false;
}
}
break;
case STATEMENT_RET:
if (!engine.checkAndSetVariableValue((String)rule.getValue().value, this)) {
return false;
}
break;
}
}
return true;
}
}
| |
package backend.codegen;
/*
* Extremely Compiler Collection
* Copyright (c) 2015-2020, Jianping Zeng.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
import backend.analysis.MachineDomTree;
import backend.analysis.MachineLoopInfo;
import backend.pass.AnalysisUsage;
import backend.support.EquivalenceClass;
import backend.support.MachineFunctionPass;
import backend.mc.MCRegisterClass;
import backend.target.TargetRegisterInfo;
import gnu.trove.set.hash.TIntHashSet;
import tools.BitMap;
import tools.Util;
import java.util.*;
import static backend.target.TargetRegisterInfo.isPhysicalRegister;
import static backend.target.TargetRegisterInfo.isVirtualRegister;
/**
* This class implements a MachineFunctionPass used for performing linear scan
* register allocation on each MachineFunction.
* <pre>
* If you want to learn more information, consult the paper
* Poletto, Massimiliano, and Vivek Sarkar. "Linear scan register allocation.".
* </pre>
*
* @author Jianping Zeng
* @version 0.4
*/
public class RegAllocLinearScan extends MachineFunctionPass {
private TreeSet<LiveInterval> unhandled;
private ArrayList<LiveInterval> fixed;
private ArrayList<LiveInterval> active;
private ArrayList<LiveInterval> inactive;
private LinkedList<LiveInterval> handled;
private LiveIntervalAnalysis li;
private PhysRegTracker prt;
private TargetRegisterInfo tri;
private MachineRegisterInfo mri;
private VirtRegMap vrm;
private MachineFunction mf;
private float[] spillWeights;
private VirtRegRewriter rewriter;
private LiveStackSlot ls;
/**
* This structure is built the first time a function is
* compiled, and keeps track of which register classes have registers that
* belong to multiple classes or have aliases that are in other classes.
*/
private EquivalenceClass<MCRegisterClass> relatedRegisterClasses;
private HashMap<Integer, MCRegisterClass> oneClassForEachPhysReg;
@Override
public void getAnalysisUsage(AnalysisUsage au) {
au.setPreservesCFG();
au.addRequired(LiveIntervalAnalysis.class);
au.addRequired(LiveIntervalCoalescing.class);
au.addRequired(LiveStackSlot.class);
au.addPreserved(LiveStackSlot.class);
au.addRequired(MachineLoopInfo.class);
au.addPreserved(MachineLoopInfo.class);
au.addPreserved(MachineDomTree.class);
super.getAnalysisUsage(au);
}
@Override
public String getPassName() {
return "Linear scan register allocator";
}
private void initIntervalSet() {
Util.assertion(unhandled.isEmpty() && fixed.isEmpty()
&& active.isEmpty() && inactive.isEmpty());
for (LiveInterval interval : li.getReg2LiveInterval().values()) {
// Add live interval of physical register to fixed set.
if (TargetRegisterInfo.isPhysicalRegister(interval.register)) {
fixed.add(interval);
mri.setPhysRegUsed(interval.register);
} else {
unhandled.add(interval);
}
}
}
/**
* create a live Interval for a stack slot if the specified live interval has
* been spilled.
*
* @param cur
* @param ls
* @param li
* @param mri
* @param vrm
*/
private static void addStackInterval(
LiveInterval cur,
LiveStackSlot ls,
LiveIntervalAnalysis li,
MachineRegisterInfo mri,
VirtRegMap vrm) {
if (!vrm.hasStackSlot(cur.register))
return;
int ss = vrm.getStackSlot(cur.register);
MCRegisterClass rc = mri.getRegClass(cur.register);
LiveInterval slotInterval = ls.getOrCreateInterval(ss, rc);
int valNumber;
if (slotInterval.hasAtLeastOneValue())
valNumber = slotInterval.getRange(0).valId;
else
valNumber = slotInterval.getNextValue();
LiveInterval regInterval = li.getInterval(cur.register);
slotInterval.mergeRangesInAsValue(regInterval, valNumber);
}
private void linearScan() {
while (!unhandled.isEmpty()) {
// remove and obtains the first live interval whose start is first.
LiveInterval cur = unhandled.pollFirst();
Util.assertion(cur != null);
for (int i = 0; i < active.size(); i++) {
LiveInterval li = active.get(i);
if (li.expiredAt(cur.beginNumber())) {
active.remove(i);
--i;
prt.delRegUse(vrm.getPhys(li.register));
} else if (!li.isLiveAt(cur.beginNumber())) {
active.remove(i);
--i;
inactive.add(li);
prt.delRegUse(vrm.getPhys(li.register));
}
}
for (int i = 0; i < inactive.size(); i++) {
LiveInterval li = inactive.get(i);
if (li.expiredAt(cur.beginNumber())) {
inactive.remove(i);
--i;
} else if (li.isLiveAt(cur.beginNumber())) {
inactive.remove(i);
--i;
active.add(li);
prt.addRegUse(vrm.getPhys(li.register));
}
}
// if this register is fixed we are done
if (TargetRegisterInfo.isPhysicalRegister(cur.register)) {
prt.addRegUse(cur.register);
active.add(cur);
handled.add(cur);
} else {
// otherwise we are allocating a virtual register. try to find
// a free physical register or spill an interval in order to
// assign it one (we could spill the current though).
assignRegOrStackSlot(cur);
}
}
}
private void updateSpillWeights(int reg, float weight) {
spillWeights[reg] += weight;
for (int alias : tri.getAliasSet(reg))
spillWeights[alias] += weight;
}
private void assignRegOrStackSlot(LiveInterval cur) {
spillWeights = new float[tri.getNumRegs()];
// The register class for current live interval.
MCRegisterClass rc1 = mri.getRegClass(cur.register);
// Update spill weight.
for (LiveInterval li : active) {
int reg = li.register;
if (isVirtualRegister(reg))
reg = vrm.getPhys(reg);
updateSpillWeights(reg, li.weight);
}
// for every interval in inactive we overlap with, mark the
// register as not free and update spill weights.
EquivalenceClass.ECNode<MCRegisterClass> node =
relatedRegisterClasses.findLeading(rc1), node2;
MCRegisterClass leadingRC = node != null ? node.getValue() : null;
for (LiveInterval li : inactive) {
node2 = relatedRegisterClasses.findLeading(mri.getRegClass(li.register));
MCRegisterClass rcs = node2 != null ? node2.getValue() : null;
if (leadingRC == rcs && leadingRC != null && cur.overlaps(li)) {
int reg = li.register;
if (isVirtualRegister(reg))
reg = vrm.getPhys(reg);
updateSpillWeights(reg, li.weight);
prt.addRegUse(reg);
}
}
// for every interval in fixed we overlap with,
// mark the register as not free and update spill weights
node = relatedRegisterClasses.findLeading(rc1);
leadingRC = node != null ? node.getValue() : null;
for (LiveInterval li : fixed) {
Util.assertion(oneClassForEachPhysReg.containsKey(li.register));
node2 = relatedRegisterClasses.findLeading(oneClassForEachPhysReg.get(li.register));
MCRegisterClass rcs = node2 != null ? node2.getValue() : null;
if (leadingRC != null && leadingRC == rcs && cur.overlaps(li)) {
int reg = li.register;
updateSpillWeights(reg, li.weight);
prt.addRegUse(reg);
}
}
// if we find a free register, we are done: assign this virtual to
// the free physical register and add this interval to the active
// list.
int phyReg = getFreePhysReg(cur);
if (phyReg != 0) {
vrm.assignVirt2Phys(cur.register, phyReg);
prt.addRegUse(phyReg);
active.add(cur);
handled.add(cur);
return;
}
if (Util.DEBUG) {
System.err.print("no free register\n");
System.err.print("\tassigning stack slot at interval");
cur.print(System.err, tri);
System.err.println(":");
}
float minWeigth = Float.MAX_VALUE;
int minReg = 0;
MCRegisterClass rc = mri.getRegClass(cur.register);
BitMap allocatable = tri.getAllocatableSet(mf, rc);
for (int reg = allocatable.findFirst(); reg > 0; reg = allocatable.findNext(reg+1)) {
if (spillWeights[reg] <= minWeigth) {
minWeigth = spillWeights[reg];
minReg = reg;
}
}
if (Util.DEBUG) {
System.err.printf("\tregister with min weight: %s (%f)\n",
tri.getName(minReg),
minWeigth);
}
// if the current has the minimum weight, we need to spill it and
// add any added intervals back to unhandled, and restart
// linearscan.
if (cur.weight < minWeigth) {
if (Util.DEBUG) {
System.err.print("\t\t\tspilling(c):");
cur.print(System.err, tri);
System.err.println();
}
int slot = vrm.assignVirt2StackSlot(cur.register);
ArrayList<LiveInterval> added = li.addIntervalsForSpills(cur, vrm, slot);
addStackInterval(cur, ls, li, mri, vrm);
if (added.isEmpty())
return; // Early exit if all spills were folded.
// Merge added with unhandled. Note that we know that
// addIntervalsForSpills returns intervals sorted by their starting
// point.
unhandled.addAll(added);
return;
}
// push the current interval back to unhandled since we are going
// to re-run at least this iteration. Since we didn't modify it it
// should go back right in the front of the list
unhandled.add(cur);
ArrayList<LiveInterval> added = new ArrayList<>();
Util.assertion(isPhysicalRegister(minReg), "didn't choose a register to spill?");
boolean[] toSpill = new boolean[tri.getNumRegs()];
toSpill[minReg] = true;
for (int alias : tri.getAliasSet(minReg))
toSpill[alias] = true;
int earliestStart = cur.beginNumber();
// set of spilled vregs(used later to rollback properly).
TIntHashSet spilled = new TIntHashSet();
// spill live intervals of virtual regs mapped to the physical
// register we want to clear (and its aliases). we only spill
// those that overlap with the current interval as the rest do not
// affect its allocation. we also keep track of the earliest start
// of all spilled live intervals since this will mark our rollback
// point
for (LiveInterval interval : active) {
int reg = interval.register;
if (isVirtualRegister(reg) && toSpill[vrm.getPhys(reg)]
&& cur.overlaps(interval)) {
if (Util.DEBUG) {
System.err.print("\t\t\tspilling(a): ");
interval.print(System.err, tri);
System.err.println();
}
earliestStart = Math.min(earliestStart, interval.beginNumber());
int slot = vrm.assignVirt2StackSlot(reg);
ArrayList<LiveInterval> newIS = li.addIntervalsForSpills(interval, vrm, slot);
addStackInterval(interval, ls, li, mri, vrm);
added.addAll(newIS);
spilled.add(reg);
}
}
for (LiveInterval interval : inactive) {
int reg = interval.register;
if (isVirtualRegister(reg) && toSpill[vrm.getPhys(reg)]
&& cur.overlaps(interval)) {
if (Util.DEBUG) {
System.err.print("\t\t\tspilling(a): ");
interval.print(System.err, tri);
System.err.println();
}
earliestStart = Math.min(earliestStart, interval.beginNumber());
int slot = vrm.assignVirt2StackSlot(reg);
ArrayList<LiveInterval> newIS = li.addIntervalsForSpills(interval, vrm, slot);
addStackInterval(interval, ls, li, mri, vrm);
added.addAll(newIS);
spilled.add(reg);
}
}
// Starting to rollback.
if (Util.DEBUG)
System.err.printf("\t\trolling back to: %d\n", earliestStart);
/**
* Scan handled in reverse order up to the earliest start of a spilled live
* interval and undo each one, restore the state of unhandled.
*/
while (!handled.isEmpty()) {
LiveInterval interval = handled.getLast();
// If the begining number of interval is less than
// earliest start, just break out.
if (interval.beginNumber() < earliestStart)
break;
// Remove it from the handled list.
handled.removeLast();
int idx = -1;
// when undoing a live interval allocation we must know if it
// is active or inactive to properly update the PhysRegTracker
// and the virtRegMap
if ((idx = active.indexOf(interval)) != -1) {
active.remove(idx);
int reg = interval.register;
if (isPhysicalRegister(reg)) {
prt.delRegUse(reg);
unhandled.add(interval);
} else {
if (!spilled.contains(reg))
unhandled.add(interval);
prt.delRegUse(vrm.getPhys(reg));
vrm.clearVirt(reg);
}
} else if ((idx = inactive.indexOf(interval)) != -1) {
inactive.remove(idx);
int reg = interval.register;
if (isPhysicalRegister(reg)) {
prt.delRegUse(reg);
unhandled.add(interval);
} else {
if (!spilled.contains(reg))
unhandled.add(interval);
// FIXME prt.delRegUse(vrm.getPhys(reg)); why?
vrm.clearVirt(reg);
}
} else {
int reg = interval.register;
if (isVirtualRegister(reg))
vrm.clearVirt(reg);
unhandled.add(interval);
}
}
for (Iterator<LiveInterval> itr = handled.iterator();
itr.hasNext(); ) {
LiveInterval interval = itr.next();
if (!interval.expiredAt(earliestStart) &&
interval.expiredAt(cur.beginNumber())) {
active.add(interval);
int reg = interval.register;
if (Util.DEBUG)
System.err.printf("\t\t\tundo register: %s\n",
li.getRegisterName(reg));
if (isPhysicalRegister(reg))
prt.addRegUse(reg);
else
prt.addRegUse(vrm.getPhys(reg));
}
}
// Add all of live intervals that are caused by
// spilling code.
unhandled.addAll(added);
}
private int getFreePhysReg(LiveInterval cur) {
MCRegisterClass rc = mri.getRegClass(cur.register);
BitMap allocatable = tri.getAllocatableSet(mf, rc);
for (int reg = allocatable.findFirst(); reg > 0; reg = allocatable.findNext(reg+1)) {
if (prt.isRegAvail(reg))
return reg;
}
return 0;
}
private RegAllocLinearScan() {
unhandled = new TreeSet<>(
Comparator.comparingInt(LiveInterval::beginNumber));
fixed = new ArrayList<>();
active = new ArrayList<>();
inactive = new ArrayList<>();
handled = new LinkedList<>();
relatedRegisterClasses = new EquivalenceClass<>();
oneClassForEachPhysReg = new HashMap<>();
}
/**
* Build related register class equivalence classes for checking exactly
* overlapping between different live interval.
*/
private void buildRelatedRegClasses() {
if (tri.getRegClasses() == null || tri.getRegClasses().length <= 0)
return;
boolean hasAlias = false;
for (MCRegisterClass rc : tri.getRegClasses()) {
if (rc.getRegs() == null || rc.getRegs().length <= 0)
continue;
relatedRegisterClasses.insert(rc);
for (int reg : rc.getRegs()) {
hasAlias = hasAlias || tri.getAliasSet(reg) != null
&& tri.getAliasSet(reg).length > 0;
if (!oneClassForEachPhysReg.containsKey(reg))
oneClassForEachPhysReg.put(reg, rc);
else
relatedRegisterClasses.union(oneClassForEachPhysReg.get(reg), rc);
}
}
if (hasAlias) {
for (int reg : oneClassForEachPhysReg.keySet()) {
MCRegisterClass rc = oneClassForEachPhysReg.get(reg);
int[] alias = tri.getAliasSet(reg);
if (alias != null && alias.length > 0) {
for (int aliasReg : alias)
relatedRegisterClasses.union(rc, oneClassForEachPhysReg.get(aliasReg));
}
}
}
}
@Override
public boolean runOnMachineFunction(MachineFunction mf) {
this.mf = mf;
li = (LiveIntervalAnalysis) getAnalysisToUpDate(LiveIntervalAnalysis.class);
ls = (LiveStackSlot) getAnalysisToUpDate(LiveStackSlot.class);
tri = mf.getSubtarget().getRegisterInfo();
mri = mf.getMachineRegisterInfo();
prt = new PhysRegTracker(tri);
if (relatedRegisterClasses.isEmpty())
buildRelatedRegClasses();
// Step#1: Initialize interval set.
initIntervalSet();
vrm = new VirtRegMap(mf);
// Step#2:
linearScan();
if (rewriter == null)
rewriter = VirtRegRewriter.createVirtRegRewriter();
// Step#3: Inserts load code for loading data from memory before use, or
// store data to memory after define it.
rewriter.runOnMachineFunction(mf, vrm);
unhandled.clear();
fixed.clear();
active.clear();
inactive.clear();
handled.clear();
relatedRegisterClasses.clear();
oneClassForEachPhysReg.clear();
return true;
}
public static RegAllocLinearScan createLinearScanRegAllocator() {
return new RegAllocLinearScan();
}
}
| |
/* The MIT License
*
* Copyright (c) 2010-2014 Jeevanandam M. (myjeeva.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.myjeeva.digitalocean.pojo;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.gson.annotations.SerializedName;
/**
* Represents Droplet attributes of DigitalOcean
*
* @author Jeevanandam M. (jeeva@myjeeva.com)
*/
public class Droplet {
private Integer id;
private String name;
@SerializedName("image_id")
private Integer imageId;
@SerializedName("region_id")
private Integer regionId;
@SerializedName("size_id")
private Integer sizeId;
@SerializedName("backups_active")
private boolean backupsActive;
@JsonIgnore
private List<Backup> backups;
@JsonIgnore
private List<Snapshot> snapshots;
@SerializedName("ip_address")
private String ipAddress;
@SerializedName("private_ip_address")
private String privateIpAddress;
private boolean locked;
private String status;
@SerializedName("created_at")
private String createdDate;
@SerializedName("event_id")
private Long eventId;
/**
* @return the id
*/
public Integer getId() {
return id;
}
/**
* @param id
* the id to set
*/
public void setId(Integer id) {
this.id = id;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the imageId
*/
public Integer getImageId() {
return imageId;
}
/**
* @param imageId
* the imageId to set
*/
public void setImageId(Integer imageId) {
this.imageId = imageId;
}
/**
* @return the regionId
*/
public Integer getRegionId() {
return regionId;
}
/**
* @param regionId
* the regionId to set
*/
public void setRegionId(Integer regionId) {
this.regionId = regionId;
}
/**
* @return the sizeId
*/
public Integer getSizeId() {
return sizeId;
}
/**
* @param sizeId
* the sizeId to set
*/
public void setSizeId(Integer sizeId) {
this.sizeId = sizeId;
}
/**
* @return the backupsActive
*/
public boolean isBackupsActive() {
return backupsActive;
}
/**
* @param backupsActive
* the backupsActive to set
*/
public void setBackupsActive(boolean backupsActive) {
this.backupsActive = backupsActive;
}
/**
* @return the backups
*/
@JsonIgnore
public List<Backup> getBackups() {
return backups;
}
/**
* @param backups
* the backups to set
*/
public void setBackups(List<Backup> backups) {
this.backups = backups;
}
/**
* @return the snapshots
*/
@JsonIgnore
public List<Snapshot> getSnapshots() {
return snapshots;
}
/**
* @param snapshots
* the snapshots to set
*/
public void setSnapshots(List<Snapshot> snapshots) {
this.snapshots = snapshots;
}
/**
* @return the ipAddress
*/
public String getIpAddress() {
return ipAddress;
}
/**
* @param ipAddress
* the ipAddress to set
*/
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
/**
* @return the privateIpAddress
*/
public String getPrivateIpAddress() {
return privateIpAddress;
}
/**
* @param privateIpAddress
* the privateIpAddress to set
*/
public void setPrivateIpAddress(String privateIpAddress) {
this.privateIpAddress = privateIpAddress;
}
/**
* @return the locked
*/
public boolean isLocked() {
return locked;
}
/**
* @param locked
* the locked to set
*/
public void setLocked(boolean locked) {
this.locked = locked;
}
/**
* @return the status
*/
public String getStatus() {
return status;
}
/**
* @param status
* the status to set
*/
public void setStatus(String status) {
this.status = status;
}
/**
* @return the createdDate
*/
public String getCreatedDate() {
return createdDate;
}
/**
* @param createdDate
* the createdDate to set
*/
public void setCreatedDate(String createdDate) {
this.createdDate = createdDate;
}
/**
* @return the eventId
*/
public Long getEventId() {
return eventId;
}
/**
* @param eventId
* the eventId to set
*/
public void setEventId(Long eventId) {
this.eventId = eventId;
}
}
| |
/**
* Code contributed to the Learning Layers project
* http://www.learning-layers.eu
* Development is partly funded by the FP7 Programme of the European Commission under
* Grant Agreement FP7-ICT-318209.
* Copyright (c) 2014, Graz University of Technology - KTI (Knowledge Technologies Institute).
* For a list of contributors see the AUTHORS file at the top-level directory of this distribution.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.tugraz.sss.serv.db.api;
import at.tugraz.sss.serv.reg.SSServErrReg;
import at.tugraz.sss.serv.util.*;
import at.tugraz.sss.serv.datatype.SSAuthor;
import at.tugraz.sss.serv.datatype.SSTextComment;
import at.tugraz.sss.serv.datatype.SSEntity;
import at.tugraz.sss.serv.datatype.*;
import at.tugraz.sss.serv.datatype.SSErr;
import at.tugraz.sss.serv.datatype.enums.SSErrE;
import at.tugraz.sss.serv.datatype.enums.SSSpaceE;
import at.tugraz.sss.serv.datatype.enums.*;
import at.tugraz.sss.serv.datatype.api.SSEntityA;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.MultivaluedMap;
public abstract class SSDBSQLFctA extends SSDBFctA{
protected final SSDBSQLI dbSQL;
public SSDBSQLFctA(
final SSDBSQLI dbSQL){
super();
this.dbSQL = dbSQL;
}
protected static void insert(
final Map<String, String> inserts,
final String key,
final Object value) throws SSErr{
inserts.put(key, value.toString());
}
protected static void insert(
final Map<String, String> inserts,
final String key,
final Enum value) throws SSErr{
inserts.put(key, value.toString());
}
protected static void update(
final Map<String, String> updates,
final String key,
final String value) throws SSErr{
updates.put(key, value);
}
protected static void update(
final Map<String, String> updates,
final String key,
final Enum value) throws SSErr{
updates.put(key, value.toString());
}
protected static void update(
final Map<String, String> updates,
final String key,
final Number value) throws SSErr{
updates.put(key, value.toString());
}
protected static void update(
final Map<String, String> updates,
final String key,
final boolean value) throws SSErr{
updates.put(key, Boolean.toString(value));
}
protected static void update(
final Map<String, String> updates,
final String key,
final SSEntityA value) throws SSErr{
updates.put(key, value.toString());
}
protected static void uniqueKey(
final Map<String, String> uniqueKeys,
final String key,
final Enum value) throws SSErr{
uniqueKeys.put(key, value.toString());
}
protected static void uniqueKey(
final Map<String, String> uniqueKeys,
final String key,
final String value) throws SSErr{
uniqueKeys.put(key, value);
}
protected static void uniqueKey(
final Map<String, String> uniqueKeys,
final String key,
final SSEntityA value) throws SSErr{
uniqueKeys.put(key, value.toString());
}
protected static void match(
final List<String> matches,
final String key) throws SSErr{
matches.add(key);
}
protected static void where(
final Map<String, String> wheres,
final String key,
final Object value) throws SSErr{
wheres.put(key, value.toString());
}
protected static void where(
final Map<String, String> wheres,
final SSSQLTableI table,
final String key,
final SSEntityA value) throws SSErr{
wheres.put(table + SSStrU.dot + key, value.toString());
}
protected static void where(
final Map<String, String> wheres,
final SSSQLTableI table,
final String key,
final Object value) throws SSErr{
wheres.put(table + SSStrU.dot + key, value.toString());
}
protected static void where(
final Map<String, String> wheres,
final String key,
final Enum value) throws SSErr{
wheres.put(key, value.toString());
}
protected static void where(
final Map<String, String> wheres,
final SSSQLTableI table,
final String key,
final Enum value) throws SSErr{
wheres.put(table + SSStrU.dot + key, value.toString());
}
protected static void where(
final MultivaluedMap<String, String> wheres,
final SSSQLTableI table,
final String key,
final Object value) throws SSErr{
wheres.add(table + SSStrU.dot + key, value.toString());
}
protected static void table(
final List<SSSQLTableI> tables,
final SSSQLTableI table) throws SSErr{
tables.add(table);
}
protected static void column(
final List<String> columns,
final String column) throws SSErr{
columns.add(column);
}
protected static void column(
final List<String> columns,
final SSSQLTableI table,
final String key) throws SSErr{
columns.add(table + SSStrU.dot + key);
}
protected static void tableCon(
final List<String> tableCons,
final SSSQLTableI table1,
final String key1,
final SSSQLTableI table2,
final String key2) throws SSErr{
tableCons.add(table1 + SSStrU.dot + key1 + SSStrU.equal + table2 + SSStrU.dot + key2);
}
protected static boolean existsFirstResult(
final ResultSet resultSet) throws SSErr {
try{
return resultSet.first();
}catch(SQLException error){
SSServErrReg.regErrThrow(SSErrE.sqlDefaultErr, error);
return false;
}
}
protected static List<String> getStringsFromResult(
final ResultSet resultSet,
final String key) throws SSErr{
try{
final List<String> strings = new ArrayList<>();
while(resultSet.next()){
strings.add(bindingStr(resultSet, key));
}
return strings;
}catch(SQLException error){
SSServErrReg.regErrThrow(SSErrE.sqlDefaultErr, error);
return null;
}
}
protected static List<SSTextComment> getTextCommentsFromResult(
final ResultSet resultSet,
final String key) throws SSErr {
try{
final List<SSTextComment> textComments = new ArrayList<>();
while(resultSet.next()){
textComments.add(bindingStrToTextComment(resultSet, key));
}
return textComments;
}catch(SQLException error){
SSServErrReg.regErrThrow(SSErrE.sqlDefaultErr, error);
return null;
}
}
protected static List<SSUri> getURIsFromResult(
final ResultSet resultSet,
final String key) throws SSErr {
try{
final List<SSUri> uris = new ArrayList<>();
while(resultSet.next()){
uris.add(bindingStrToUri(resultSet, key));
}
return uris;
}catch(SQLException error){
SSServErrReg.regErrThrow(SSErrE.sqlDefaultErr, error);
return null;
}
}
protected static List<SSEntity> getEntitiesFromResult(
final ResultSet resultSet,
final String key) throws SSErr {
try{
final List<SSEntity> entities = new ArrayList<>();
while(resultSet.next()){
entities.add(
SSEntity.get(
bindingStrToUri(resultSet, key),
SSEntityE.entity));
}
return entities;
}catch(SQLException error){
SSServErrReg.regErrThrow(SSErrE.sqlDefaultErr, error);
return null;
}
}
protected static SSEntity getEntityFromResult(
final ResultSet resultSet,
final String key) throws SSErr {
return SSEntity.get(
bindingStrToUri(resultSet, key),
SSEntityE.entity);
}
protected static SSAuthor bindingStrToAuthor(
final ResultSet resultSet,
final String binding) throws SSErr {
final String tmp = bindingStr(resultSet, binding);
if(SSStrU.isEmpty(tmp)){
return null;
}
final SSAuthor author = SSAuthor.get(SSUri.get(tmp));
author.author = SSAuthor.get(SSUri.get("http://sss.eu/system"), SSLabel.get("system"));
return author;
}
protected static SSEntity bindingStrToEntity(
final ResultSet resultSet,
final String binding,
final SSEntityE type) throws SSErr {
final String tmp = bindingStr(resultSet, binding);
if(SSStrU.isEmpty(tmp)){
return null;
}
return SSEntity.get(SSUri.get(tmp), type);
}
protected static SSUri bindingStrToUri(
final ResultSet resultSet,
final String binding) throws SSErr {
final String tmp = bindingStr(resultSet, binding);
if(SSStrU.isEmpty(tmp)){
return null;
}
return SSUri.get(tmp);
}
protected static SSSpaceE bindingStrToSpace(
final ResultSet resultSet,
final String binding) throws SSErr {
return SSSpaceE.get(bindingStr(resultSet, binding));
}
protected static String bindingStr(
final ResultSet resultSet,
final String binding) throws SSErr{
try{
return resultSet.getString(binding);
}catch(SQLException error){
SSServErrReg.regErrThrow(SSErrE.sqlDefaultErr, error);
return null;
}
}
protected static SSLabel bindingStrToLabel(
final ResultSet resultSet,
final String binding) throws SSErr{
return SSLabel.get(bindingStr(resultSet, binding));
}
protected static boolean bindingStrToBoolean(
final ResultSet resultSet,
final String binding) throws SSErr{
return Boolean.parseBoolean(bindingStr(resultSet, binding));
}
protected static SSTextComment bindingStrToTextComment(
final ResultSet resultSet,
final String binding) throws SSErr{
return SSTextComment.get(bindingStr(resultSet, binding));
}
protected static SSEntityE bindingStrToEntityType(
final ResultSet resultSet,
final String binding) throws SSErr{
return SSEntityE.get(bindingStr(resultSet, binding));
}
protected static Double bindingStrToDouble(
final ResultSet resultSet,
final String binding) throws SSErr {
return Double.parseDouble(bindingStr(resultSet, binding));
}
protected static Float bindingStrToFloat(
final ResultSet resultSet,
final String binding) throws SSErr {
return Float.parseFloat(bindingStr(resultSet, binding));
}
protected static Integer bindingStrToInteger(
final ResultSet resultSet,
final String binding) throws SSErr{
return Integer.parseInt(bindingStr(resultSet, binding));
}
protected static Long bindingStrToLong(
final ResultSet resultSet,
final String binding) throws SSErr{
return Long.parseLong(bindingStr(resultSet, binding));
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.refactoring.convertToJava;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.GrListOrMap;
import org.jetbrains.plugins.groovy.lang.psi.api.signatures.GrClosureSignature;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.arguments.GrNamedArgument;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.types.GrClosureParameter;
import org.jetbrains.plugins.groovy.lang.psi.impl.signatures.GrClosureSignatureUtil;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil;
import org.jetbrains.plugins.groovy.refactoring.GroovyRefactoringUtil;
import java.util.Arrays;
/**
* @author Maxim.Medvedev
*/
class ArgumentListGenerator {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.plugins.groovy.refactoring.convertToJava.ArgumentListGenerator");
private final StringBuilder myBuilder;
private final ExpressionGenerator myExpressionGenerator;
ArgumentListGenerator(StringBuilder builder, ExpressionContext context) {
myBuilder = builder;
myExpressionGenerator = new ExpressionGenerator(builder, context);
}
public void generate(@Nullable GrClosureSignature signature,
@NotNull GrExpression[] exprs,
@NotNull GrNamedArgument[] namedArgs,
@NotNull GrClosableBlock[] clArgs,
@NotNull GroovyPsiElement context) {
GrClosureSignatureUtil.ArgInfo<PsiElement>[] argInfos =
signature == null ? null : GrClosureSignatureUtil.mapParametersToArguments(signature, namedArgs, exprs, clArgs, context, false, false);
if (argInfos == null && signature != null) {
argInfos = GrClosureSignatureUtil.mapParametersToArguments(signature, namedArgs, exprs, clArgs, context, true, true);
}
final PsiSubstitutor substitutor = signature == null ? PsiSubstitutor.EMPTY : signature.getSubstitutor();
if (argInfos == null || ArrayUtil.contains(null, argInfos)) {
generateSimple(exprs, namedArgs, clArgs, context, substitutor);
return;
}
final GrClosureParameter[] params = signature.getParameters();
final Project project = context.getProject();
myBuilder.append('(');
boolean hasCommaAtEnd = false;
for (int i = 0; i < argInfos.length; i++) {
GrClosureSignatureUtil.ArgInfo<PsiElement> arg = argInfos[i];
if (arg == null) continue;
final GrClosureParameter param = params[i];
boolean generated = arg.isMultiArg ? generateMultiArg(arg, param, substitutor, project, context) : generateSingeArg(arg, param);
if (generated) {
hasCommaAtEnd = true;
myBuilder.append(", ");
}
}
if (hasCommaAtEnd) {
myBuilder.delete(myBuilder.length() - 2, myBuilder.length());
//myBuilder.removeFromTheEnd(2);
}
myBuilder.append(')');
}
private boolean generateSingeArg(GrClosureSignatureUtil.ArgInfo<PsiElement> arg, GrClosureParameter param) {
boolean argExists = !arg.args.isEmpty() && arg.args.get(0) != null;
if (argExists) {
final PsiElement actual = arg.args.get(0);
LOG.assertTrue(actual instanceof GrExpression);
final PsiType type = param.getType();
final PsiType declaredType = GenerationUtil.getDeclaredType((GrExpression)actual, myExpressionGenerator.getContext());
if (type != null && declaredType != null && !TypesUtil.isAssignableByMethodCallConversion(type, declaredType,(GrExpression)actual
)) {
myBuilder.append('(');
TypeWriter.writeType(myBuilder, type, actual);
myBuilder.append(')');
}
((GrExpression)actual).accept(myExpressionGenerator);
return true;
}
else {
/*final GrExpression initializer = param.getInitializerGroovy();
if (initializer != null) {
initializer.accept(myExpressionGenerator);
}
else {
myBuilder.append("???"); //todo add something more consistent
}*/
return false;
}
}
private boolean generateMultiArg(GrClosureSignatureUtil.ArgInfo<PsiElement> arg,
GrClosureParameter param,
PsiSubstitutor substitutor,
Project project,
GroovyPsiElement context) {
final PsiType type = param.getType();
//todo find out if param is array in case of it has declared type
if (type instanceof PsiEllipsisType) {
for (PsiElement element : arg.args) {
LOG.assertTrue(element instanceof GrExpression);
((GrExpression)element).accept(myExpressionGenerator);
myBuilder.append(", ");
}
if (!arg.args.isEmpty()) {
myBuilder.delete(myBuilder.length() - 2, myBuilder.length());
return true;
}
else {
return false;
}
}
else if (type instanceof PsiArrayType) {
myBuilder.append("new ");
if (arg.args.isEmpty()) {
TypeWriter.writeType(myBuilder, ((PsiArrayType)type).getComponentType(), context);
myBuilder.append("[0]");
}
else {
TypeWriter.writeTypeForNew(myBuilder, type, context);
myBuilder.append("{");
for (PsiElement element : arg.args) {
LOG.assertTrue(element instanceof GrExpression);
((GrExpression)element).accept(myExpressionGenerator);
myBuilder.append(", ");
}
if (!arg.args.isEmpty()) myBuilder.delete(myBuilder.length() - 2, myBuilder.length());
//if (arg.args.size() > 0) myBuilder.removeFromTheEnd(2);
myBuilder.append('}');
}
}
else {
final GrExpression listOrMap = GroovyRefactoringUtil.generateArgFromMultiArg(substitutor, arg.args, type, project);
LOG.assertTrue(listOrMap instanceof GrListOrMap);
listOrMap.accept(myExpressionGenerator);
}
return true;
}
private void generateSimple(GrExpression[] exprs,
GrNamedArgument[] namedArgs,
GrClosableBlock[] closures,
GroovyPsiElement context,
PsiSubstitutor substitutor) {
myBuilder.append('(');
if (namedArgs.length > 0) {
final GrExpression listOrMap =
GroovyRefactoringUtil.generateArgFromMultiArg(substitutor, Arrays.asList(namedArgs), null, context.getProject());
LOG.assertTrue(listOrMap instanceof GrListOrMap);
listOrMap.accept(myExpressionGenerator);
myBuilder.append(", ");
}
for (GrExpression expr : exprs) {
expr.accept(myExpressionGenerator);
myBuilder.append(", ");
}
for (GrClosableBlock closure : closures) {
closure.accept(myExpressionGenerator);
myBuilder.append(", ");
}
if (namedArgs.length + exprs.length + closures.length > 0) {
myBuilder.delete(myBuilder.length()-2, myBuilder.length());
//myBuilder.removeFromTheEnd(2);
}
myBuilder.append(')');
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.policy.HttpLogOptions;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.management.exception.ManagementException;
import com.azure.resourcemanager.authorization.models.BuiltInRole;
import com.azure.resourcemanager.compute.models.CachingTypes;
import com.azure.resourcemanager.compute.models.KnownLinuxVirtualMachineImage;
import com.azure.resourcemanager.compute.models.PowerState;
import com.azure.resourcemanager.compute.models.VirtualMachine;
import com.azure.resourcemanager.compute.models.VirtualMachineImage;
import com.azure.resourcemanager.compute.models.VirtualMachineOffer;
import com.azure.resourcemanager.compute.models.VirtualMachinePublisher;
import com.azure.resourcemanager.compute.models.VirtualMachineSizeTypes;
import com.azure.resourcemanager.compute.models.VirtualMachineSku;
import com.azure.resourcemanager.containerinstance.models.Container;
import com.azure.resourcemanager.containerinstance.models.ContainerGroup;
import com.azure.resourcemanager.containerinstance.models.ContainerGroupRestartPolicy;
import com.azure.resourcemanager.containerinstance.models.Operation;
import com.azure.resourcemanager.containerinstance.models.ResourceIdentityType;
import com.azure.resourcemanager.msi.MsiManager;
import com.azure.resourcemanager.msi.models.Identity;
import com.azure.resourcemanager.network.models.Access;
import com.azure.resourcemanager.network.models.ConnectionMonitor;
import com.azure.resourcemanager.network.models.ConnectionMonitorQueryResult;
import com.azure.resourcemanager.network.models.ConnectivityCheck;
import com.azure.resourcemanager.network.models.Direction;
import com.azure.resourcemanager.network.models.FlowLogSettings;
import com.azure.resourcemanager.network.models.IpFlowProtocol;
import com.azure.resourcemanager.network.models.NetworkSecurityGroup;
import com.azure.resourcemanager.network.models.NetworkWatcher;
import com.azure.resourcemanager.network.models.NextHop;
import com.azure.resourcemanager.network.models.NextHopType;
import com.azure.resourcemanager.network.models.PacketCapture;
import com.azure.resourcemanager.network.models.PcProtocol;
import com.azure.resourcemanager.network.models.PcStatus;
import com.azure.resourcemanager.network.models.SecurityGroupView;
import com.azure.resourcemanager.network.models.Topology;
import com.azure.resourcemanager.network.models.VerificationIPFlow;
import com.azure.resourcemanager.storage.models.StorageAccountSkuType;
import com.azure.resourcemanager.test.utils.TestUtilities;
import com.azure.resourcemanager.resources.fluentcore.arm.CountryIsoCode;
import com.azure.core.management.Region;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import com.azure.core.management.profile.AzureProfile;
import com.azure.resourcemanager.resources.fluentcore.utils.HttpPipelineProvider;
import com.azure.resourcemanager.resources.fluentcore.utils.ResourceManagerUtils;
import com.azure.resourcemanager.resources.models.Deployment;
import com.azure.resourcemanager.resources.models.DeploymentMode;
import com.azure.resourcemanager.resources.models.GenericResource;
import com.azure.resourcemanager.resources.models.Location;
import com.azure.resourcemanager.resources.models.RegionCategory;
import com.azure.resourcemanager.resources.models.RegionType;
import com.azure.resourcemanager.resources.models.Subscription;
import com.azure.resourcemanager.storage.models.StorageAccount;
import java.io.IOException;
import java.text.MessageFormat;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import com.azure.resourcemanager.test.ResourceManagerTestBase;
import com.azure.resourcemanager.test.utils.TestDelayProvider;
import com.azure.resourcemanager.test.utils.TestIdentifierProvider;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
public class AzureResourceManagerTests extends ResourceManagerTestBase {
private AzureResourceManager azureResourceManager;
private MsiManager msiManager;
@Override
protected HttpPipeline buildHttpPipeline(
TokenCredential credential,
AzureProfile profile,
HttpLogOptions httpLogOptions,
List<HttpPipelinePolicy> policies,
HttpClient httpClient) {
return HttpPipelineProvider.buildHttpPipeline(
credential,
profile,
null,
httpLogOptions,
null,
new RetryPolicy("Retry-After", ChronoUnit.SECONDS),
policies,
httpClient);
}
@Override
protected void initializeClients(HttpPipeline httpPipeline, AzureProfile profile) {
ResourceManagerUtils.InternalRuntimeContext.setDelayProvider(new TestDelayProvider(!isPlaybackMode()));
ResourceManagerUtils.InternalRuntimeContext internalContext = new ResourceManagerUtils.InternalRuntimeContext();
internalContext.setIdentifierFunction(name -> new TestIdentifierProvider(testResourceNamer));
AzureResourceManager.Authenticated azureAuthed = AzureResourceManager.authenticate(httpPipeline, profile);
azureResourceManager = azureAuthed.withDefaultSubscription();
this.msiManager = MsiManager.authenticate(httpPipeline, profile);
setInternalContext(internalContext, azureResourceManager, msiManager);
}
@Override
protected void cleanUpResources() {
}
/**
* Stress-tests the resilience of ExpandableEnum to multi-threaded access
*
* @throws Exception
*/
@Test
public void testExpandableEnum() throws Exception {
// Define some threads that read from enum
Runnable reader1 =
new Runnable() {
@Override
public void run() {
Assertions.assertEquals(CountryIsoCode.AFGHANISTAN, CountryIsoCode.fromString("AF"));
Assertions.assertEquals(CountryIsoCode.ANTARCTICA, CountryIsoCode.fromString("AQ"));
Assertions.assertEquals(CountryIsoCode.ANDORRA, CountryIsoCode.fromString("AD"));
Assertions.assertEquals(CountryIsoCode.ARGENTINA, CountryIsoCode.fromString("AR"));
Assertions.assertEquals(CountryIsoCode.ALBANIA, CountryIsoCode.fromString("AL"));
Assertions.assertEquals(CountryIsoCode.ALGERIA, CountryIsoCode.fromString("DZ"));
Assertions.assertEquals(CountryIsoCode.AMERICAN_SAMOA, CountryIsoCode.fromString("AS"));
Assertions.assertEquals(CountryIsoCode.ANGOLA, CountryIsoCode.fromString("AO"));
Assertions.assertEquals(CountryIsoCode.ANGUILLA, CountryIsoCode.fromString("AI"));
Assertions.assertEquals(CountryIsoCode.ANTIGUA_AND_BARBUDA, CountryIsoCode.fromString("AG"));
Assertions.assertEquals(CountryIsoCode.ARMENIA, CountryIsoCode.fromString("AM"));
Assertions.assertEquals(CountryIsoCode.ARUBA, CountryIsoCode.fromString("AW"));
Assertions.assertEquals(CountryIsoCode.AUSTRALIA, CountryIsoCode.fromString("AU"));
Assertions.assertEquals(CountryIsoCode.AUSTRIA, CountryIsoCode.fromString("AT"));
Assertions.assertEquals(CountryIsoCode.AZERBAIJAN, CountryIsoCode.fromString("AZ"));
Assertions.assertEquals(PowerState.DEALLOCATED, PowerState.fromString("PowerState/deallocated"));
Assertions.assertEquals(PowerState.DEALLOCATING, PowerState.fromString("PowerState/deallocating"));
Assertions.assertEquals(PowerState.RUNNING, PowerState.fromString("PowerState/running"));
}
};
Runnable reader2 =
new Runnable() {
@Override
public void run() {
Assertions.assertEquals(CountryIsoCode.BAHAMAS, CountryIsoCode.fromString("BS"));
Assertions.assertEquals(CountryIsoCode.BAHRAIN, CountryIsoCode.fromString("BH"));
Assertions.assertEquals(CountryIsoCode.BANGLADESH, CountryIsoCode.fromString("BD"));
Assertions.assertEquals(CountryIsoCode.BARBADOS, CountryIsoCode.fromString("BB"));
Assertions.assertEquals(CountryIsoCode.BELARUS, CountryIsoCode.fromString("BY"));
Assertions.assertEquals(CountryIsoCode.BELGIUM, CountryIsoCode.fromString("BE"));
Assertions.assertEquals(PowerState.STARTING, PowerState.fromString("PowerState/starting"));
Assertions.assertEquals(PowerState.STOPPED, PowerState.fromString("PowerState/stopped"));
Assertions.assertEquals(PowerState.STOPPING, PowerState.fromString("PowerState/stopping"));
Assertions.assertEquals(PowerState.UNKNOWN, PowerState.fromString("PowerState/unknown"));
}
};
// Define some threads that write to enum
Runnable writer1 =
new Runnable() {
@Override
public void run() {
for (int i = 1; i <= 10; i++) {
CountryIsoCode.fromString("CountryIsoCode" + i);
PowerState.fromString("PowerState" + i);
}
}
};
Runnable writer2 =
new Runnable() {
@Override
public void run() {
for (int i = 1; i <= 20; i++) {
CountryIsoCode.fromString("CountryIsoCode" + i);
PowerState.fromString("PowerState" + i);
}
}
};
// Start the threads and repeat a few times
ExecutorService threadPool = Executors.newFixedThreadPool(4);
for (int repeat = 0; repeat < 10; repeat++) {
threadPool.submit(reader1);
threadPool.submit(reader2);
threadPool.submit(writer1);
threadPool.submit(writer2);
}
// Give the test a fixed amount of time to finish
threadPool.awaitTermination(10, TimeUnit.SECONDS);
// Verify country ISO codes
Collection<CountryIsoCode> countryIsoCodes = CountryIsoCode.values();
System.out.println("\n## Country ISO codes: " + countryIsoCodes.size());
for (CountryIsoCode value : countryIsoCodes) {
System.out.println(value.toString());
}
Assertions.assertEquals(257, countryIsoCodes.size());
// Verify power states
Collection<PowerState> powerStates = PowerState.values();
System.out.println("\n## Power states: " + powerStates.size());
for (PowerState value : powerStates) {
System.out.println(value.toString());
}
Assertions.assertEquals(27, powerStates.size());
}
/**
* Tests ARM template deployments.
*
* @throws IOException
* @throws ManagementException
*/
@Test
public void testDeployments() throws Exception {
String testId = azureResourceManager.deployments().manager().resourceManager().internalContext().randomResourceName("", 8);
PagedIterable<Deployment> deployments = azureResourceManager.deployments().list();
System.out.println("Deployments: " + TestUtilities.getSize(deployments));
Deployment deployment =
azureResourceManager
.deployments()
.define("depl" + testId)
.withNewResourceGroup("rg" + testId, Region.US_WEST)
.withTemplateLink(
"https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vnet-two-subnets/azuredeploy.json",
"1.0.0.0")
.withParametersLink(
"https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vnet-two-subnets/azuredeploy.parameters.json",
"1.0.0.0")
.withMode(DeploymentMode.COMPLETE)
.create();
System.out.println("Created deployment: " + deployment.correlationId());
azureResourceManager.resourceGroups().beginDeleteByName("rg" + testId);
}
/**
* Tests basic generic resources retrieval.
*
* @throws Exception
*/
@Test
public void testGenericResources() throws Exception {
// Create some resources
NetworkSecurityGroup nsg =
azureResourceManager
.networkSecurityGroups()
.define(azureResourceManager.networkSecurityGroups().manager().resourceManager().internalContext().randomResourceName("nsg", 13))
.withRegion(Region.US_EAST)
.withNewResourceGroup()
.create();
azureResourceManager
.publicIpAddresses()
.define(azureResourceManager.networkSecurityGroups().manager().resourceManager().internalContext().randomResourceName("pip", 13))
.withRegion(Region.US_EAST)
.withExistingResourceGroup(nsg.resourceGroupName())
.create();
PagedIterable<GenericResource> resources =
azureResourceManager.genericResources().listByResourceGroup(nsg.resourceGroupName());
Assertions.assertEquals(2, TestUtilities.getSize(resources));
GenericResource firstResource = resources.iterator().next();
GenericResource resourceById = azureResourceManager.genericResources().getById(firstResource.id());
GenericResource resourceByDetails =
azureResourceManager
.genericResources()
.get(
firstResource.resourceGroupName(),
firstResource.resourceProviderNamespace(),
firstResource.resourceType(),
firstResource.name());
Assertions.assertTrue(resourceById.id().equalsIgnoreCase(resourceByDetails.id()));
azureResourceManager.resourceGroups().beginDeleteByName(nsg.resourceGroupName());
}
// /**
// * Tests management locks.
// * NOTE: This requires the service principal to have an Owner role on the subscription
// *
// * @throws Exception
// */
// @Test
// public void testManagementLocks() throws Exception {
// // Prepare a VM
// final String password = ResourceManagerUtils.InternalRuntimeContext.randomResourceName("P@s", 14);
// final String rgName = ResourceManagerUtils.InternalRuntimeContext.randomResourceName("rg", 15);
// final String vmName = ResourceManagerUtils.InternalRuntimeContext.randomResourceName("vm", 15);
// final String storageName = ResourceManagerUtils.InternalRuntimeContext.randomResourceName("st", 15);
// final String diskName = ResourceManagerUtils.InternalRuntimeContext.randomResourceName("dsk", 15);
// final String netName = ResourceManagerUtils.InternalRuntimeContext.randomResourceName("net", 15);
// final Region region = Region.US_EAST;
//
// ResourceGroup resourceGroup = null;
// ManagementLock lockGroup = null,
// lockVM = null,
// lockStorage = null,
// lockDiskRO = null,
// lockDiskDel = null,
// lockSubnet = null;
// try {
// resourceGroup = azure.resourceGroups().define(rgName)
// .withRegion(region)
// .create();
// Assertions.assertNotNull(resourceGroup);
//
// Creatable<Network> netDefinition = azure.networks().define(netName)
// .withRegion(region)
// .withExistingResourceGroup(resourceGroup)
// .withAddressSpace("10.0.0.0/28");
//
// // Define a VM for testing VM locks
// Creatable<VirtualMachine> vmDefinition = azure.virtualMachines().define(vmName)
// .withRegion(region)
// .withExistingResourceGroup(resourceGroup)
// .withNewPrimaryNetwork(netDefinition)
// .withPrimaryPrivateIPAddressDynamic()
// .withoutPrimaryPublicIPAddress()
// .withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
// .withRootUsername("tester")
// .withRootPassword(password)
// .withSize(VirtualMachineSizeTypes.BASIC_A1);
//
// // Define a managed disk for testing locks on that
// Creatable<Disk> diskDefinition = azure.disks().define(diskName)
// .withRegion(region)
// .withExistingResourceGroup(resourceGroup)
// .withData()
// .withSizeInGB(100);
//
// // Define a storage account for testing locks on that
// Creatable<StorageAccount> storageDefinition = azure.storageAccounts().define(storageName)
// .withRegion(region)
// .withExistingResourceGroup(resourceGroup);
//
// // Create resources in parallel to save time and money
// Observable.merge(
// storageDefinition.createAsync().subscribeOn(Schedulers.io()),
// vmDefinition.createAsync().subscribeOn(Schedulers.io()),
// diskDefinition.createAsync().subscribeOn(Schedulers.io()))
// .toBlocking().subscribe();
//
// VirtualMachine vm = (VirtualMachine) vmDefinition;
// StorageAccount storage = (StorageAccount) storageDefinition;
// Disk disk = (Disk) diskDefinition;
// Network network = vm.getPrimaryNetworkInterface().primaryIPConfiguration().getNetwork();
// Subnet subnet = network.subnets().values().iterator().next();
//
// // Lock subnet
// Creatable<ManagementLock> lockSubnetDef = azure.managementLocks().define("subnetLock")
// .withLockedResource(subnet.inner().id())
// .withLevel(LockLevel.READ_ONLY);
//
// // Lock VM
// Creatable<ManagementLock> lockVMDef = azure.managementLocks().define("vmlock")
// .withLockedResource(vm)
// .withLevel(LockLevel.READ_ONLY)
// .withNotes("vm readonly lock");
//
// // Lock resource group
// Creatable<ManagementLock> lockGroupDef = azure.managementLocks().define("rglock")
// .withLockedResource(resourceGroup.id())
// .withLevel(LockLevel.CAN_NOT_DELETE);
//
// // Lock storage
// Creatable<ManagementLock> lockStorageDef = azure.managementLocks().define("stLock")
// .withLockedResource(storage)
// .withLevel(LockLevel.CAN_NOT_DELETE);
//
// // Create locks in parallel
// @SuppressWarnings("unchecked")
// CreatedResources<ManagementLock> created = azure.managementLocks().create(lockVMDef, lockGroupDef,
// lockStorageDef, lockSubnetDef);
// lockVM = created.get(lockVMDef.key());
// lockStorage = created.get(lockStorageDef.key());
// lockGroup = created.get(lockGroupDef.key());
// lockSubnet = created.get(lockSubnetDef.key());
//
// // Lock disk synchronously
// lockDiskRO = azure.managementLocks().define("diskLockRO")
// .withLockedResource(disk)
// .withLevel(LockLevel.READ_ONLY)
// .create();
//
// lockDiskDel = azure.managementLocks().define("diskLockDel")
// .withLockedResource(disk)
// .withLevel(LockLevel.CAN_NOT_DELETE)
// .create();
//
// // Verify VM lock
// Assertions.assertEquals(2, azure.managementLocks().listForResource(vm.id()).size());
//
// Assertions.assertNotNull(lockVM);
// lockVM = azure.managementLocks().getById(lockVM.id());
// Assertions.assertNotNull(lockVM);
// TestUtils.print(lockVM);
// Assertions.assertEquals(LockLevel.READ_ONLY, lockVM.level());
// Assertions.assertTrue(vm.id().equalsIgnoreCase(lockVM.lockedResourceId()));
//
// // Verify resource group lock
// Assertions.assertNotNull(lockGroup);
// lockGroup = azure.managementLocks().getByResourceGroup(resourceGroup.name(), "rglock");
// Assertions.assertNotNull(lockGroup);
// TestUtils.print(lockVM);
// Assertions.assertEquals(LockLevel.CAN_NOT_DELETE, lockGroup.level());
// Assertions.assertTrue(resourceGroup.id().equalsIgnoreCase(lockGroup.lockedResourceId()));
//
// // Verify storage account lock
// Assertions.assertEquals(2, azure.managementLocks().listForResource(storage.id()).size());
//
// Assertions.assertNotNull(lockStorage);
// lockStorage = azure.managementLocks().getById(lockStorage.id());
// Assertions.assertNotNull(lockStorage);
// TestUtils.print(lockStorage);
// Assertions.assertEquals(LockLevel.CAN_NOT_DELETE, lockStorage.level());
// Assertions.assertTrue(storage.id().equalsIgnoreCase(lockStorage.lockedResourceId()));
//
// // Verify disk lock
// Assertions.assertEquals(3, azure.managementLocks().listForResource(disk.id()).size());
//
// Assertions.assertNotNull(lockDiskRO);
// lockDiskRO = azure.managementLocks().getById(lockDiskRO.id());
// Assertions.assertNotNull(lockDiskRO);
// TestUtils.print(lockDiskRO);
// Assertions.assertEquals(LockLevel.READ_ONLY, lockDiskRO.level());
// Assertions.assertTrue(disk.id().equalsIgnoreCase(lockDiskRO.lockedResourceId()));
//
// Assertions.assertNotNull(lockDiskDel);
// lockDiskDel = azure.managementLocks().getById(lockDiskDel.id());
// Assertions.assertNotNull(lockDiskDel);
// TestUtils.print(lockDiskDel);
// Assertions.assertEquals(LockLevel.CAN_NOT_DELETE, lockDiskDel.level());
// Assertions.assertTrue(disk.id().equalsIgnoreCase(lockDiskDel.lockedResourceId()));
//
// // Verify subnet lock
// Assertions.assertEquals(2, azure.managementLocks().listForResource(network.id()).size());
//
// lockSubnet = azure.managementLocks().getById(lockSubnet.id());
// Assertions.assertNotNull(lockSubnet);
// TestUtils.print(lockSubnet);
// Assertions.assertEquals(LockLevel.READ_ONLY, lockSubnet.level());
// Assertions.assertTrue(subnet.inner().id().equalsIgnoreCase(lockSubnet.lockedResourceId()));
//
// // Verify lock collection
// List<ManagementLock> locksSubscription = azure.managementLocks().list();
// List<ManagementLock> locksGroup = azure.managementLocks().listByResourceGroup(vm.resourceGroupName());
// Assertions.assertNotNull(locksSubscription);
// Assertions.assertNotNull(locksGroup);
//
// int locksAllCount = locksSubscription.size();
// System.out.println("All locks: " + locksAllCount);
// Assertions.assertTrue(6 <= locksAllCount);
//
// int locksGroupCount = locksGroup.size();
// System.out.println("Group locks: " + locksGroupCount);
// Assertions.assertEquals(6, locksGroup.size());
// } catch (Exception ex) {
// ex.printStackTrace(System.out);
// } finally {
// if (resourceGroup != null) {
// if (lockGroup != null) {
// azure.managementLocks().deleteById(lockGroup.id());
// }
// if (lockVM != null) {
// azure.managementLocks().deleteById(lockVM.id());
// }
// if (lockDiskRO != null) {
// azure.managementLocks().deleteById(lockDiskRO.id());
// }
// if (lockDiskDel != null) {
// azure.managementLocks().deleteById(lockDiskDel.id());
// }
// if (lockStorage != null) {
// azure.managementLocks().deleteById(lockStorage.id());
// }
// if (lockSubnet != null) {
// azure.managementLocks().deleteById(lockSubnet.id());
// }
// azure.resourceGroups().beginDeleteByName(resourceGroup.name());
// }
// }
// }
//
/**
* Tests VM images.
*
* @throws IOException
* @throws ManagementException
*/
@Test
public void testVMImages() throws ManagementException, IOException {
PagedIterable<VirtualMachinePublisher> publishers =
azureResourceManager.virtualMachineImages().publishers().listByRegion(Region.US_WEST);
Assertions.assertTrue(TestUtilities.getSize(publishers) > 0);
for (VirtualMachinePublisher p : publishers.stream().limit(5).toArray(VirtualMachinePublisher[]::new)) {
System.out.println(String.format("Publisher name: %s, region: %s", p.name(), p.region()));
for (VirtualMachineOffer o : p.offers().list().stream().limit(5).toArray(VirtualMachineOffer[]::new)) {
System.out.println(String.format("\tOffer name: %s", o.name()));
for (VirtualMachineSku s : o.skus().list().stream().limit(5).toArray(VirtualMachineSku[]::new)) {
System.out.println(String.format("\t\tSku name: %s", s.name()));
}
}
}
// TODO: limit vm images by filter
PagedIterable<VirtualMachineImage> images = azureResourceManager.virtualMachineImages().listByRegion(Region.US_WEST);
Assertions.assertTrue(TestUtilities.getSize(images) > 0);
// Seems to help avoid connection refused error on subsequent mock test
ResourceManagerUtils.sleep(Duration.ofSeconds(2));
}
/**
* Tests the network security group implementation.
*
* @throws Exception
*/
@Test
public void testNetworkSecurityGroups() throws Exception {
new TestNSG().runTest(azureResourceManager.networkSecurityGroups(), azureResourceManager.resourceGroups());
}
/**
* Tests the inbound NAT rule support in load balancers.
*
* @throws Exception
*/
@Test
public void testLoadBalancersNatRules() throws Exception {
new TestLoadBalancer().new InternetWithNatRule(azureResourceManager.virtualMachines().manager())
.runTest(azureResourceManager.loadBalancers(), azureResourceManager.resourceGroups());
}
/**
* Tests the inbound NAT pool support in load balancers.
*
* @throws Exception
*/
@Test
public void testLoadBalancersNatPools() throws Exception {
new TestLoadBalancer().new InternetWithNatPool(azureResourceManager.virtualMachines().manager())
.runTest(azureResourceManager.loadBalancers(), azureResourceManager.resourceGroups());
}
/**
* Tests the minimum Internet-facing load balancer with a load balancing rule only
*
* @throws Exception
*/
@Test
public void testLoadBalancersInternetMinimum() throws Exception {
new TestLoadBalancer().new InternetMinimal(azureResourceManager.virtualMachines().manager())
.runTest(azureResourceManager.loadBalancers(), azureResourceManager.resourceGroups());
}
/**
* Tests the minimum Internet-facing load balancer with a NAT rule only
*
* @throws Exception
*/
@Test
public void testLoadBalancersNatOnly() throws Exception {
new TestLoadBalancer().new InternetNatOnly(azureResourceManager.virtualMachines().manager())
.runTest(azureResourceManager.loadBalancers(), azureResourceManager.resourceGroups());
}
/**
* Tests the minimum internal load balancer.
*
* @throws Exception
*/
@Test
public void testLoadBalancersInternalMinimum() throws Exception {
new TestLoadBalancer().new InternalMinimal(azureResourceManager.virtualMachines().manager())
.runTest(azureResourceManager.loadBalancers(), azureResourceManager.resourceGroups());
}
/**
* Tests the internal load balancer with availability zone.
*
* @throws Exception
*/
@Test
@Disabled("Though valid scenario, NRP is failing")
public void testLoadBalancersInternalWithAvailabilityZone() throws Exception {
new TestLoadBalancer().new InternalWithZone(azureResourceManager.virtualMachines().manager())
.runTest(azureResourceManager.loadBalancers(), azureResourceManager.resourceGroups());
}
@Test
public void testManagedDiskVMUpdate() throws Exception {
ResourceManagerUtils.InternalRuntimeContext context = azureResourceManager.disks().manager().resourceManager().internalContext();
final String rgName = context.randomResourceName("rg", 13);
final String linuxVM2Name = context.randomResourceName("vm" + "-", 10);
final String linuxVM2Pip = context.randomResourceName("pip" + "-", 18);
VirtualMachine linuxVM2 =
azureResourceManager
.virtualMachines()
.define(linuxVM2Name)
.withRegion(Region.US_EAST)
.withNewResourceGroup(rgName)
.withNewPrimaryNetwork("10.0.0.0/28")
.withPrimaryPrivateIPAddressDynamic()
.withNewPrimaryPublicIPAddress(linuxVM2Pip)
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("tester")
.withRootPassword("Abcdef.123456!")
// Begin: Managed data disks
.withNewDataDisk(100)
.withNewDataDisk(100, 1, CachingTypes.READ_WRITE)
// End: Managed data disks
.withSize(VirtualMachineSizeTypes.STANDARD_D3_V2)
.create();
linuxVM2.deallocate();
linuxVM2.update().withoutDataDisk(2).withNewDataDisk(200).apply();
azureResourceManager.resourceGroups().beginDeleteByName(rgName);
}
/**
* Tests the public IP address implementation.
*
* @throws Exception
*/
@Test
public void testPublicIPAddresses() throws Exception {
new TestPublicIPAddress().runTest(azureResourceManager.publicIpAddresses(), azureResourceManager.resourceGroups());
}
/**
* Tests the public IP address implementation.
*
* @throws Exception
*/
@Test
public void testPublicIPPrefixes() throws Exception {
new TestPublicIPPrefix().runTest(azureResourceManager.publicIpPrefixes(), azureResourceManager.resourceGroups());
}
/**
* Tests the availability set implementation.
*
* @throws Exception
*/
@Test
public void testAvailabilitySets() throws Exception {
new TestAvailabilitySet().runTest(azureResourceManager.availabilitySets(), azureResourceManager.resourceGroups());
}
/**
* Tests the virtual network implementation.
*
* @throws Exception
*/
@Test
public void testNetworks() throws Exception {
new TestNetwork().new WithSubnets().runTest(azureResourceManager.networks(), azureResourceManager.resourceGroups());
}
/**
* Tests virtual network peering
*
* @throws Exception
*/
@Test
public void testNetworkWithAccessFromServiceToSubnet() throws Exception {
new TestNetwork().new WithAccessFromServiceToSubnet().runTest(azureResourceManager.networks(), azureResourceManager.resourceGroups());
}
/**
* Tests virtual network peering
*
* @throws Exception
*/
@Test
public void testNetworkPeerings() throws Exception {
new TestNetwork().new WithPeering().runTest(azureResourceManager.networks(), azureResourceManager.resourceGroups());
}
/**
* Tests virtual network with DDoS protection plan
*
* @throws Exception
*/
@Test
public void testDdosAndVmProtection() throws Exception {
new TestNetwork().new WithDDosProtectionPlanAndVmProtection().runTest(azureResourceManager.networks(), azureResourceManager.resourceGroups());
}
/**
* Tests updateTags for virtual network.
*
* @throws Exception
*/
@Test
public void testNetworkUpdateTags() throws Exception {
new TestNetwork().new WithUpdateTags().runTest(azureResourceManager.networks(), azureResourceManager.resourceGroups());
}
/**
* Tests route tables.
*
* @throws Exception
*/
@Test
public void testRouteTables() throws Exception {
new TestRouteTables().new Minimal().runTest(azureResourceManager.routeTables(), azureResourceManager.resourceGroups());
}
/** Tests the regions enum. */
@Test
public void testRegions() {
// Show built-in regions
System.out.println("Built-in regions list:");
int regionsCount = Region.values().size();
for (Region region : Region.values()) {
System.out.println("Name: " + region.name() + ", Label: " + region.label());
}
// Look up built-in region
Region region = Region.fromName("westus");
Assertions.assertTrue(region == Region.US_WEST);
// Add a region
Region region2 = Region.fromName("madeUpRegion");
Assertions.assertNotNull(region2);
Assertions.assertTrue(region2.name().equalsIgnoreCase("madeUpRegion"));
Region region3 = Region.fromName("madeupregion");
Assertions.assertEquals(region3, region2);
Assertions.assertEquals(Region.values().size(), regionsCount + 1);
}
/**
* Tests the network interface implementation.
*
* @throws Exception
*/
@Test
public void testNetworkInterfaces() throws Exception {
new TestNetworkInterface().runTest(azureResourceManager.networkInterfaces(), azureResourceManager.resourceGroups());
}
/**
* Tests the network watcher implementation.
*
* @throws Exception
*/
@Test
public void testNetworkWatchers() throws Exception {
new TestNetworkWatcher().runTest(azureResourceManager.networkWatchers(), azureResourceManager.resourceGroups());
}
@Test
@Disabled("Not stable test cases")
public void testNetworkWatcherFunctions() throws Exception {
String nwrg = null;
String tnwrg = null;
try {
TestNetworkWatcher tnw = new TestNetworkWatcher();
NetworkWatcher nw = tnw.createResource(azureResourceManager.networkWatchers());
tnwrg = tnw.groupName();
nwrg = nw.resourceGroupName();
// pre-create VMs to show topology on
VirtualMachine[] virtualMachines =
tnw
.ensureNetwork(
azureResourceManager.networkWatchers().manager().networks(),
azureResourceManager.virtualMachines(),
azureResourceManager.networkInterfaces());
ConnectionMonitor connectionMonitor =
nw
.connectionMonitors()
.define("NewConnectionMonitor")
.withSourceId(virtualMachines[0].id())
.withDestinationId(virtualMachines[1].id())
.withDestinationPort(80)
.withTag("tag1", "value1")
.withoutAutoStart()
.withMonitoringInterval(35)
.create();
Assertions.assertEquals("value1", connectionMonitor.tags().get("tag1"));
Assertions.assertEquals(35, connectionMonitor.monitoringIntervalInSeconds());
Assertions.assertEquals("NotStarted", connectionMonitor.monitoringStatus());
Assertions.assertEquals("NewConnectionMonitor", connectionMonitor.name());
connectionMonitor.start();
Assertions.assertEquals("Running", connectionMonitor.monitoringStatus());
Topology topology = nw.topology().withTargetResourceGroup(virtualMachines[0].resourceGroupName()).execute();
Assertions.assertEquals(11, topology.resources().size());
Assertions
.assertTrue(
topology
.resources()
.containsKey(virtualMachines[0].getPrimaryNetworkInterface().networkSecurityGroupId()));
Assertions
.assertEquals(
4, topology.resources().get(virtualMachines[0].primaryNetworkInterfaceId()).associations().size());
SecurityGroupView sgViewResult = nw.getSecurityGroupView(virtualMachines[0].id());
Assertions.assertEquals(1, sgViewResult.networkInterfaces().size());
Assertions
.assertEquals(
virtualMachines[0].primaryNetworkInterfaceId(),
sgViewResult.networkInterfaces().keySet().iterator().next());
FlowLogSettings flowLogSettings =
nw.getFlowLogSettings(virtualMachines[0].getPrimaryNetworkInterface().networkSecurityGroupId());
StorageAccount storageAccount = tnw.ensureStorageAccount(azureResourceManager.storageAccounts());
flowLogSettings
.update()
.withLogging()
.withStorageAccount(storageAccount.id())
.withRetentionPolicyDays(5)
.withRetentionPolicyEnabled()
.apply();
Assertions.assertEquals(true, flowLogSettings.enabled());
Assertions.assertEquals(5, flowLogSettings.retentionDays());
Assertions.assertEquals(storageAccount.id(), flowLogSettings.storageId());
NextHop nextHop =
nw
.nextHop()
.withTargetResourceId(virtualMachines[0].id())
.withSourceIpAddress("10.0.0.4")
.withDestinationIpAddress("8.8.8.8")
.execute();
Assertions.assertEquals("System Route", nextHop.routeTableId());
Assertions.assertEquals(NextHopType.INTERNET, nextHop.nextHopType());
Assertions.assertNull(nextHop.nextHopIpAddress());
VerificationIPFlow verificationIPFlow =
nw
.verifyIPFlow()
.withTargetResourceId(virtualMachines[0].id())
.withDirection(Direction.OUTBOUND)
.withProtocol(IpFlowProtocol.TCP)
.withLocalIPAddress("10.0.0.4")
.withRemoteIPAddress("8.8.8.8")
.withLocalPort("443")
.withRemotePort("443")
.execute();
Assertions.assertEquals(Access.ALLOW, verificationIPFlow.access());
Assertions
.assertTrue(
"defaultSecurityRules/AllowInternetOutBound".equalsIgnoreCase(verificationIPFlow.ruleName()));
// test packet capture
PagedIterable<PacketCapture> packetCaptures = nw.packetCaptures().list();
Assertions.assertEquals(0, TestUtilities.getSize(packetCaptures));
PacketCapture packetCapture =
nw
.packetCaptures()
.define("NewPacketCapture")
.withTarget(virtualMachines[0].id())
.withStorageAccountId(storageAccount.id())
.withTimeLimitInSeconds(1500)
.definePacketCaptureFilter()
.withProtocol(PcProtocol.TCP)
.withLocalIpAddresses(Arrays.asList("127.0.0.1", "127.0.0.5"))
.attach()
.create();
packetCaptures = nw.packetCaptures().list();
Assertions.assertEquals(1, TestUtilities.getSize(packetCaptures));
Assertions.assertEquals("NewPacketCapture", packetCapture.name());
Assertions.assertEquals(1500, packetCapture.timeLimitInSeconds());
Assertions.assertEquals(PcProtocol.TCP, packetCapture.filters().get(0).protocol());
Assertions.assertEquals("127.0.0.1;127.0.0.5", packetCapture.filters().get(0).localIpAddress());
// Assertions.assertEquals("Running",
// packetCapture.getStatus().packetCaptureStatus().toString());
packetCapture.stop();
Assertions.assertEquals(PcStatus.STOPPED, packetCapture.getStatus().packetCaptureStatus());
nw.packetCaptures().deleteByName(packetCapture.name());
ConnectivityCheck connectivityCheck =
nw
.checkConnectivity()
.toDestinationResourceId(virtualMachines[1].id())
.toDestinationPort(80)
.fromSourceVirtualMachine(virtualMachines[0].id())
.execute();
// Assertions.assertEquals("Reachable", connectivityCheck.connectionStatus().toString()); //
// not sure why it is Unknown now
ConnectionMonitorQueryResult queryResult = connectionMonitor.query();
azureResourceManager.virtualMachines().deleteById(virtualMachines[1].id());
topology.execute();
// Assertions.assertEquals(10, topology.resources().size()); // not sure why it is 18 now
} finally {
if (nwrg != null) {
azureResourceManager.resourceGroups().beginDeleteByName(nwrg);
}
if (tnwrg != null) {
azureResourceManager.resourceGroups().beginDeleteByName(tnwrg);
}
}
}
/**
* Tests the local network gateway implementation.
*
* @throws Exception
*/
@Test
public void testLocalNetworkGateways() throws Exception {
new TestLocalNetworkGateway().runTest(azureResourceManager.localNetworkGateways(), azureResourceManager.resourceGroups());
}
/**
* Tests the express route circuit implementation.
*
* @throws Exception
*/
@Test
public void testExpressRouteCircuits() throws Exception {
new TestExpressRouteCircuit().new Basic().runTest(azureResourceManager.expressRouteCircuits(), azureResourceManager.resourceGroups());
}
/**
* Tests the express route circuit peerings implementation.
*
* @throws Exception
*/
@Test
public void testExpressRouteCircuitPeering() throws Exception {
new TestExpressRouteCircuit().new ExpressRouteCircuitPeering()
.runTest(azureResourceManager.expressRouteCircuits(), azureResourceManager.resourceGroups());
}
/**
* Tests virtual machines.
*
* @throws Exception
*/
@Test
@Disabled("osDiskSize is returned as 127 instead of 128 - known service bug")
public void testVirtualMachines() throws Exception {
// Future: This method needs to have a better specific name since we are going to include unit test for
// different vm scenarios.
new TestVirtualMachine().runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
/**
* Tests the virtual machine data disk implementation.
*
* @throws Exception
*/
@Test
public void testVirtualMachineDataDisk() throws Exception {
new TestVirtualMachineDataDisk().runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
/**
* Tests the virtual machine network interface implementation.
*
* @throws Exception
*/
@Test
public void testVirtualMachineNics() throws Exception {
new TestVirtualMachineNics(azureResourceManager.networks().manager()).runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
/**
* Tests virtual machine support for SSH.
*
* @throws Exception
*/
@Test
public void testVirtualMachineSSh() throws Exception {
new TestVirtualMachineSsh(azureResourceManager.publicIpAddresses()).runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
/**
* Tests virtual machine sizes.
*
* @throws Exception
*/
@Test
public void testVirtualMachineSizes() throws Exception {
new TestVirtualMachineSizes().runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
@Test
public void testVirtualMachineCustomData() throws Exception {
new TestVirtualMachineCustomData(azureResourceManager.publicIpAddresses())
.runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
@Test
public void testVirtualMachineInAvailabilitySet() throws Exception {
new TestVirtualMachineInAvailabilitySet().runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
@Test
public void testVirtualMachineSyncPoller() throws Exception {
new TestVirtualMachineSyncPoller(azureResourceManager.networks().manager())
.runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
/**
* Tests subscription listing.
*
* @throws Exception
*/
@Test
public void listSubscriptions() throws Exception {
Assertions.assertTrue(0 < TestUtilities.getSize(azureResourceManager.subscriptions().list()));
Subscription subscription = azureResourceManager.getCurrentSubscription();
Assertions.assertNotNull(subscription);
Assertions.assertTrue(azureResourceManager.subscriptionId().equalsIgnoreCase(subscription.subscriptionId()));
}
/**
* Tests location listing.
*
* @throws Exception
*/
@Test
public void listLocations() throws Exception {
Subscription subscription = azureResourceManager.getCurrentSubscription();
Assertions.assertNotNull(subscription);
for (Location location : subscription.listLocations()) {
Region region = Region.fromName(location.name());
Assertions.assertNotNull(region, "Could not find region " + location.name());
Assertions.assertEquals(region, location.region());
Assertions.assertEquals(region.name().toLowerCase(), location.name().toLowerCase());
}
Location location = subscription.getLocationByRegion(Region.US_WEST);
Assertions.assertNotNull(location);
Assertions.assertTrue(Region.US_WEST.name().equalsIgnoreCase(location.name()));
}
/**
* Tests resource group listing.
*
* @throws Exception
*/
@Test
public void listResourceGroups() throws Exception {
int groupCount = TestUtilities.getSize(azureResourceManager.resourceGroups().list());
System.out.println(String.format("Group count: %s", groupCount));
Assertions.assertTrue(0 < groupCount);
}
/**
* Tests storage account listing.
*
* @throws Exception
*/
@Test
public void listStorageAccounts() throws Exception {
Assertions.assertTrue(0 < TestUtilities.getSize(azureResourceManager.storageAccounts().list()));
}
@Test
public void createStorageAccount() throws Exception {
String storageAccountName = generateRandomResourceName("testsa", 12);
StorageAccount storageAccount =
azureResourceManager
.storageAccounts()
.define(storageAccountName)
.withRegion(Region.ASIA_EAST)
.withNewResourceGroup()
.withSku(StorageAccountSkuType.PREMIUM_LRS)
.create();
Assertions.assertEquals(storageAccount.name(), storageAccountName);
azureResourceManager.resourceGroups().beginDeleteByName(storageAccount.resourceGroupName());
}
// @Test
// public void testBatchAccount() throws Exception {
// new TestBatch().runTest(azure.batchAccounts(), azure.resourceGroups());
// }
@Test
public void testTrafficManager() throws Exception {
new TestTrafficManager(azureResourceManager.publicIpAddresses())
.runTest(azureResourceManager.trafficManagerProfiles(), azureResourceManager.resourceGroups());
}
@Test
public void testRedis() throws Exception {
new TestRedis().runTest(azureResourceManager.redisCaches(), azureResourceManager.resourceGroups());
}
// @Test
// public void testCdnManager() throws Exception {
// new TestCdn()
// .runTest(azure.cdnProfiles(), azure.resourceGroups());
// }
@Test
public void testDnsZones() throws Exception {
new TestDns().runTest(azureResourceManager.dnsZones(), azureResourceManager.resourceGroups());
}
@Test
public void testPrivateDnsZones() throws Exception {
new TestPrivateDns().runTest(azureResourceManager.privateDnsZones(), azureResourceManager.resourceGroups());
}
@Test
public void testSqlServer() throws Exception {
new TestSql().runTest(azureResourceManager.sqlServers(), azureResourceManager.resourceGroups());
}
@Test
public void testResourceStreaming() throws Exception {
new TestResourceStreaming(azureResourceManager.storageAccounts()).runTest(azureResourceManager.virtualMachines(), azureResourceManager.resourceGroups());
}
@Test
public void testKubernetesCluster() throws Exception {
new TestKubernetesCluster().runTest(azureResourceManager.kubernetesClusters(), azureResourceManager.resourceGroups());
}
@Test
public void testContainerInstanceWithPublicIpAddressWithSystemAssignedMsi() throws Exception {
new TestContainerInstanceWithPublicIpAddressWithSystemAssignedMSI()
.runTest(azureResourceManager.containerGroups(), azureResourceManager.resourceGroups(), azureResourceManager.subscriptionId());
}
@Test
public void testContainerInstanceWithPublicIpAddressWithUserAssignedMsi() throws Exception {
final String cgName = generateRandomResourceName("aci", 10);
final String rgName = generateRandomResourceName("rgaci", 10);
String identityName1 = generateRandomResourceName("msi-id", 15);
String identityName2 = generateRandomResourceName("msi-id", 15);
final Identity createdIdentity =
msiManager
.identities()
.define(identityName1)
.withRegion(Region.US_WEST)
.withNewResourceGroup(rgName)
.withAccessToCurrentResourceGroup(BuiltInRole.READER)
.create();
Creatable<Identity> creatableIdentity =
msiManager
.identities()
.define(identityName2)
.withRegion(Region.US_WEST)
.withExistingResourceGroup(rgName)
.withAccessToCurrentResourceGroup(BuiltInRole.CONTRIBUTOR);
List<String> dnsServers = new ArrayList<String>();
dnsServers.add("dnsServer1");
ContainerGroup containerGroup =
azureResourceManager
.containerGroups()
.define(cgName)
.withRegion(Region.US_EAST2)
.withExistingResourceGroup(rgName)
.withLinux()
.withPublicImageRegistryOnly()
.withEmptyDirectoryVolume("emptydir1")
.defineContainerInstance("tomcat")
.withImage("tomcat")
.withExternalTcpPort(8080)
.withCpuCoreCount(1)
.withEnvironmentVariable("ENV1", "value1")
.attach()
.defineContainerInstance("nginx")
.withImage("nginx")
.withExternalTcpPort(80)
.withEnvironmentVariableWithSecuredValue("ENV2", "securedValue1")
.attach()
.withExistingUserAssignedManagedServiceIdentity(createdIdentity)
.withNewUserAssignedManagedServiceIdentity(creatableIdentity)
.withRestartPolicy(ContainerGroupRestartPolicy.NEVER)
.withDnsPrefix(cgName)
.withTag("tag1", "value1")
.create();
Assertions.assertEquals(cgName, containerGroup.name());
Assertions.assertEquals("Linux", containerGroup.osType().toString());
Assertions.assertEquals(0, containerGroup.imageRegistryServers().size());
Assertions.assertEquals(1, containerGroup.volumes().size());
Assertions.assertNotNull(containerGroup.volumes().get("emptydir1"));
Assertions.assertNotNull(containerGroup.ipAddress());
Assertions.assertTrue(containerGroup.isIPAddressPublic());
Assertions.assertEquals(2, containerGroup.externalTcpPorts().length);
Assertions.assertEquals(2, containerGroup.externalPorts().size());
Assertions.assertEquals(2, containerGroup.externalTcpPorts().length);
Assertions.assertEquals(8080, containerGroup.externalTcpPorts()[0]);
Assertions.assertEquals(80, containerGroup.externalTcpPorts()[1]);
Assertions.assertEquals(2, containerGroup.containers().size());
Container tomcatContainer = containerGroup.containers().get("tomcat");
Assertions.assertNotNull(tomcatContainer);
Container nginxContainer = containerGroup.containers().get("nginx");
Assertions.assertNotNull(nginxContainer);
Assertions.assertEquals("tomcat", tomcatContainer.name());
Assertions.assertEquals("tomcat", tomcatContainer.image());
Assertions.assertEquals(1.0, tomcatContainer.resources().requests().cpu(), .1);
Assertions.assertEquals(1.5, tomcatContainer.resources().requests().memoryInGB(), .1);
Assertions.assertEquals(1, tomcatContainer.ports().size());
Assertions.assertEquals(8080, tomcatContainer.ports().get(0).port());
Assertions.assertNull(tomcatContainer.volumeMounts());
Assertions.assertNull(tomcatContainer.command());
Assertions.assertNotNull(tomcatContainer.environmentVariables());
Assertions.assertEquals(1, tomcatContainer.environmentVariables().size());
Assertions.assertEquals("nginx", nginxContainer.name());
Assertions.assertEquals("nginx", nginxContainer.image());
Assertions.assertEquals(1.0, nginxContainer.resources().requests().cpu(), .1);
Assertions.assertEquals(1.5, nginxContainer.resources().requests().memoryInGB(), .1);
Assertions.assertEquals(1, nginxContainer.ports().size());
Assertions.assertEquals(80, nginxContainer.ports().get(0).port());
Assertions.assertNull(nginxContainer.volumeMounts());
Assertions.assertNull(nginxContainer.command());
Assertions.assertNotNull(nginxContainer.environmentVariables());
Assertions.assertEquals(1, nginxContainer.environmentVariables().size());
Assertions.assertTrue(containerGroup.tags().containsKey("tag1"));
Assertions.assertEquals(ContainerGroupRestartPolicy.NEVER, containerGroup.restartPolicy());
Assertions.assertTrue(containerGroup.isManagedServiceIdentityEnabled());
Assertions.assertEquals(ResourceIdentityType.USER_ASSIGNED, containerGroup.managedServiceIdentityType());
Assertions.assertNull(containerGroup.systemAssignedManagedServiceIdentityPrincipalId()); // No Local MSI enabled
// Ensure the "User Assigned (External) MSI" id can be retrieved from the virtual machine
//
Set<String> emsiIds = containerGroup.userAssignedManagedServiceIdentityIds();
Assertions.assertNotNull(emsiIds);
Assertions.assertEquals(2, emsiIds.size());
Assertions.assertEquals(cgName, containerGroup.dnsPrefix());
// TODO: add network and dns testing when questions have been answered
ContainerGroup containerGroup2 = azureResourceManager.containerGroups().getByResourceGroup(rgName, cgName);
List<ContainerGroup> containerGroupList =
azureResourceManager.containerGroups().listByResourceGroup(rgName).stream().collect(Collectors.toList());
Assertions.assertTrue(containerGroupList.size() > 0);
containerGroup.refresh();
Set<Operation> containerGroupOperations =
azureResourceManager.containerGroups().listOperations().stream().collect(Collectors.toSet());
// Number of supported operation can change hence don't assert with a predefined number.
Assertions.assertTrue(containerGroupOperations.size() > 0);
}
@Disabled("Cannot run test due to unknown parameter")
@Test
public void testContainerInstanceWithPrivateIpAddress() throws Exception {
// LIVE ONLY TEST BECAUSE IT REQUIRES SUBSCRIPTION ID
if (!isPlaybackMode()) {
new TestContainerInstanceWithPrivateIpAddress()
.runTest(azureResourceManager.containerGroups(), azureResourceManager.resourceGroups(), azureResourceManager.subscriptionId());
}
}
@Test
public void testContainerRegistry() throws Exception {
new TestContainerRegistry().runTest(azureResourceManager.containerRegistries(), azureResourceManager.resourceGroups());
}
@Test
public void testCosmosDB() throws Exception {
new TestCosmosDB().runTest(azureResourceManager.cosmosDBAccounts(), azureResourceManager.resourceGroups());
}
// @Test
// public void testSearchServiceFreeSku() throws Exception {
// new TestSearchService.SearchServiceFreeSku()
// .runTest(azure.searchServices(), azure.resourceGroups());
// }
// @Test
// public void testSearchServiceBasicSku() throws Exception {
// new TestSearchService.SearchServiceBasicSku()
// .runTest(azure.searchServices(), azure.resourceGroups());
// }
//
// @Test
// public void testSearchServiceStandardSku() throws Exception {
// new TestSearchService.SearchServiceStandardSku()
// .runTest(azure.searchServices(), azure.resourceGroups());
// }
//
// @Test
// public void testSearchServiceAnySku() throws Exception {
// new TestSearchService.SearchServiceAnySku()
// .runTest(azure.searchServices(), azure.resourceGroups());
// }
@Test
@Disabled("Util to generate missing regions")
public void generateMissingRegion() {
// Please double check generated code and make adjustment e.g. GERMANY_WEST_CENTRAL -> GERMANY_WESTCENTRAL
StringBuilder sb = new StringBuilder();
PagedIterable<Location> locations =
azureResourceManager
.getCurrentSubscription()
.listLocations(); // note the region is not complete since it depends on current subscription
List<Location> locationGroupByGeography = new ArrayList<>();
List<String> geographies = Arrays.asList(
"US", "Canada", "South America", "Europe", "Asia Pacific", "Middle East", "Africa");
for (String geography : geographies) {
for (Location location : locations) {
if (location.regionType() == RegionType.PHYSICAL) {
if (geography.equals(location.innerModel().metadata().geographyGroup())) {
locationGroupByGeography.add(location);
}
}
}
}
for (Location location : locations) {
if (location.regionType() == RegionType.PHYSICAL) {
if (!geographies.contains(location.innerModel().metadata().geographyGroup())) {
locationGroupByGeography.add(location);
}
}
}
for (Location location : locationGroupByGeography) {
if (location.regionType() == RegionType.PHYSICAL) {
Region region = findByLabelOrName(location.name());
if (region == null) {
sb
.append("\n").append("/**")
.append("\n").append(MessageFormat.format(
" * {0} ({1})",
location.displayName(),
location.innerModel().metadata().geographyGroup()))
.append(location.innerModel().metadata().regionCategory() == RegionCategory.RECOMMENDED
? " (recommended)" : "")
.append("\n").append(" */")
.append("\n").append(MessageFormat.format(
"public static final Region {0} = new Region(\"{1}\", \"{2}\");",
getLocationVariableName(location),
location.name(),
location.displayName()));
}
}
}
Assertions.assertTrue(sb.length() == 0, sb.toString());
}
private static Region findByLabelOrName(String labelOrName) {
if (labelOrName == null) {
return null;
}
String nameLowerCase = labelOrName.toLowerCase(Locale.ROOT).replace(" ", "");
return Region.values().stream()
.filter(r -> nameLowerCase.equals(r.name().toLowerCase(Locale.ROOT)))
.findFirst()
.orElse(null);
}
private static String getLocationVariableName(Location location) {
final String geographyGroup = location.innerModel().metadata().geographyGroup();
String displayName = location.displayName();
if ("US".equals(geographyGroup)) {
if (displayName.contains(" US")) {
displayName = displayName.replace(" US", "");
displayName = "US " + displayName;
}
} else if ("Europe".equals(geographyGroup)) {
if (displayName.contains(" Europe")) {
displayName = displayName.replace(" Europe", "");
displayName = "Europe " + displayName;
}
} else if ("Asia Pacific".equals(geographyGroup)) {
if (displayName.contains(" Asia")) {
displayName = displayName.replace(" Asia", "");
displayName = "Asia " + displayName;
} else if (displayName.contains(" India")) {
displayName = displayName.replace(" India", "");
displayName = "India " + displayName;
}
} else if ("Africa".equals(geographyGroup)) {
if (displayName.startsWith("South Africa")) {
displayName = displayName.replace("South Africa", "SouthAfrica");
}
}
if (displayName.length() > 2 && displayName.charAt(displayName.length() - 1) >= '0'
&& displayName.charAt(displayName.length() - 1) <= '9'
&& displayName.charAt(displayName.length() - 2) == ' ') {
displayName = displayName.replace(displayName.substring(displayName.length() - 2),
displayName.substring(displayName.length() - 1));
}
return displayName.toUpperCase().replace(" ", "_");
}
}
| |
/*
* Copyright 1997-2022 Optimatika (www.optimatika.se)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.ojalgo.benchmark.lab.library;
import org.apache.commons.math3.linear.Array2DRowRealMatrix;
import org.apache.commons.math3.linear.CholeskyDecomposition;
import org.apache.commons.math3.linear.EigenDecomposition;
import org.apache.commons.math3.linear.LUDecomposition;
import org.apache.commons.math3.linear.QRDecomposition;
import org.apache.commons.math3.linear.RealMatrix;
import org.apache.commons.math3.linear.SingularValueDecomposition;
import org.ojalgo.benchmark.MatrixBenchmarkLibrary;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.DecompositionOperation;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.MutatingBinaryMatrixMatrixOperation;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.MutatingBinaryMatrixScalarOperation;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.MutatingUnaryMatrixOperation;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.ProducingBinaryMatrixMatrixOperation;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.ProducingUnaryMatrixOperation;
import org.ojalgo.benchmark.MatrixBenchmarkOperation.PropertyOperation;
/**
* Apache Commons Math
*/
public class ACM extends MatrixBenchmarkLibrary<RealMatrix, Array2DRowRealMatrix> {
@Override
public MatrixBenchmarkLibrary<RealMatrix, Array2DRowRealMatrix>.MatrixBuilder getMatrixBuilder(final int numberOfRows, final int numberOfColumns) {
return new MatrixBuilder() {
private final Array2DRowRealMatrix myMatrix = new Array2DRowRealMatrix(numberOfRows, numberOfColumns);
public Array2DRowRealMatrix get() {
return myMatrix;
}
@Override
public MatrixBuilder set(final int row, final int col, final double value) {
myMatrix.setEntry(row, col, value);
return this;
}
};
}
@Override
public MutatingBinaryMatrixMatrixOperation<RealMatrix, Array2DRowRealMatrix> getOperationAdd() {
return (a, b, c) -> this.copy(a.add(b), c);
}
@Override
public PropertyOperation<RealMatrix, Array2DRowRealMatrix> getOperationDeterminant(final int dim) {
return (matA) -> {
final LUDecomposition lu = new LUDecomposition(matA);
return lu.getDeterminant();
};
}
@Override
public ProducingBinaryMatrixMatrixOperation<RealMatrix, Array2DRowRealMatrix> getOperationEquationSystemSolver(final int numbEquations,
final int numbVariables, final int numbSolutions, final boolean spd) {
if (numbEquations == numbVariables) {
if (spd) {
return (body, rhs) -> {
final CholeskyDecomposition cholesky = new CholeskyDecomposition(body);
return cholesky.getSolver().solve(rhs);
};
} else {
return (body, rhs) -> {
final LUDecomposition lu = new LUDecomposition(body);
return lu.getSolver().solve(rhs);
};
}
} else if (numbEquations > numbVariables) {
return (body, rhs) -> {
final QRDecomposition qr = new QRDecomposition(body);
return qr.getSolver().solve(rhs);
};
} else {
return null;
}
}
@Override
public DecompositionOperation<RealMatrix, RealMatrix> getOperationEvD(final int dim) {
final RealMatrix[] ret = this.makeArray(3);
return (matrix) -> {
final EigenDecomposition svd = new EigenDecomposition(matrix);
ret[0] = svd.getV();
ret[1] = svd.getD();
ret[2] = svd.getVT();
return ret;
};
}
@Override
public MutatingBinaryMatrixMatrixOperation<RealMatrix, Array2DRowRealMatrix> getOperationFillByMultiplying(final boolean transpL, final boolean transpR) {
return (left, right, product) -> this.copy((transpL ? left.transpose() : left).multiply((transpR ? right.transpose() : right)), product);
}
@Override
public MutatingUnaryMatrixOperation<RealMatrix, Array2DRowRealMatrix> getOperationInvert(final int dim, final boolean spd) {
if (spd) {
return (matA, result) -> {
final CholeskyDecomposition chol = new CholeskyDecomposition(matA);
this.copy(chol.getSolver().getInverse(), result);
};
} else {
return (matA, result) -> {
final LUDecomposition lu = new LUDecomposition(matA);
this.copy(lu.getSolver().getInverse(), result);
};
}
}
@Override
public ProducingBinaryMatrixMatrixOperation<RealMatrix, RealMatrix> getOperationMultiplyToProduce() {
return (left, right) -> left.multiply(right);
}
@Override
public ProducingUnaryMatrixOperation<RealMatrix, Array2DRowRealMatrix> getOperationPseudoinverse(final int dim) {
return (matrix) -> new SingularValueDecomposition(matrix).getSolver().getInverse();
}
@Override
public MutatingBinaryMatrixScalarOperation<RealMatrix, Array2DRowRealMatrix> getOperationScale() {
return (a, s, b) -> this.copy(a.scalarMultiply(s), b);
}
@Override
public DecompositionOperation<RealMatrix, RealMatrix> getOperationSVD(final int dim) {
final RealMatrix[] ret = this.makeArray(3);
return (matrix) -> {
final SingularValueDecomposition svd = new SingularValueDecomposition(matrix);
ret[0] = svd.getU();
ret[1] = svd.getS();
ret[2] = svd.getVT();
return ret;
};
}
@Override
public MutatingUnaryMatrixOperation<RealMatrix, Array2DRowRealMatrix> getOperationTranspose() {
return (matA, result) -> this.copy(matA.transpose(), result);
}
@Override
protected Array2DRowRealMatrix copy(final RealMatrix source, final Array2DRowRealMatrix destination) {
for (int i = 0, rlim = source.getRowDimension(); i < rlim; i++) {
for (int j = 0, clim = destination.getColumnDimension(); j < clim; j++) {
destination.setEntry(i, j, source.getEntry(i, j));
}
}
return destination;
}
@Override
protected RealMatrix[] makeArray(final int length) {
return new RealMatrix[length];
}
@Override
protected RealMatrix multiply(final RealMatrix... factors) {
RealMatrix retVal = factors[0];
for (int f = 1; f < factors.length; f++) {
retVal = retVal.multiply(factors[f]);
}
return retVal;
}
@Override
protected double norm(final RealMatrix matrix) {
return matrix.getFrobeniusNorm();
}
@Override
protected RealMatrix subtract(final RealMatrix left, final RealMatrix right) {
return left.subtract(right);
}
}
| |
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sprintapi.api.http.header.adapter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import org.junit.Test;
import org.sprintapi.api.http.header.adapter.accept.HttpAcceptAdapter;
import org.sprintapi.api.http.lang.HttpLexScanner;
import org.sprintapi.api.meta.type.Accept;
import org.sprintapi.api.meta.type.MediaType;
public class HttpAcceptHeaderAdapterTest {
@Test
public void testRead1() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("audio/*; q=0.2, audio/basic"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(2, a.getItems().length);
assertNotNull(a.getItems()[0]);
assertNotNull(a.getItems()[0].getRange());
assertEquals("audio", a.getItems()[0].getRange().getType());
assertEquals("basic", a.getItems()[0].getRange().getSubtype());
assertNull(a.getItems()[1].getRange().getParameters());
assertEquals(1f, a.getItems()[0].getQualityFactor(), 0);
assertNotNull(a.getItems()[1]);
assertNotNull(a.getItems()[1].getRange());
assertEquals("audio", a.getItems()[1].getRange().getType());
assertEquals("*", a.getItems()[1].getRange().getSubtype());
assertNull(a.getItems()[1].getRange().getParameters());
assertEquals(0.2f, a.getItems()[1].getQualityFactor(), 0);
}
@Test
public void testRead2() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("text/plain; q=0.5, text/html, text/x-dvi; q=0.8, text/x-c"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(4, a.getItems().length);
assertNotNull(a.getItems()[0]);
assertNotNull(a.getItems()[0].getRange());
assertEquals("text", a.getItems()[0].getRange().getType());
assertEquals("html", a.getItems()[0].getRange().getSubtype());
assertNull(a.getItems()[0].getRange().getParameters());
assertEquals(1f, a.getItems()[0].getQualityFactor(), 0);
assertNotNull(a.getItems()[1]);
assertNotNull(a.getItems()[1].getRange());
assertEquals("text", a.getItems()[1].getRange().getType());
assertEquals("x-c", a.getItems()[1].getRange().getSubtype());
assertNull(a.getItems()[1].getRange().getParameters());
assertEquals(1f, a.getItems()[1].getQualityFactor(), 0);
assertNotNull(a.getItems()[2]);
assertNotNull(a.getItems()[2].getRange());
assertEquals("text", a.getItems()[2].getRange().getType());
assertEquals("x-dvi", a.getItems()[2].getRange().getSubtype());
assertNull(a.getItems()[2].getRange().getParameters());
assertEquals(0.8f, a.getItems()[2].getQualityFactor(), 0);
assertNotNull(a.getItems()[3]);
assertNotNull(a.getItems()[3].getRange());
assertEquals("text", a.getItems()[3].getRange().getType());
assertEquals("plain", a.getItems()[3].getRange().getSubtype());
assertNull(a.getItems()[3].getRange().getParameters());
assertEquals(0.5f, a.getItems()[3].getQualityFactor(), 0);
}
@Test
public void testRead3() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("text/*, text/html, text/html;level=1, */*"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(4, a.getItems().length);
}
@Test
public void testRead4() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(5, a.getItems().length);
}
@Test
public void testRead5() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(4, a.getItems().length);
}
@Test
public void testRead6() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("application/xml,application/xhtml+xml,text/html;q=0.9, text/plain;q=0.8,image/png,*/*;q=0.5"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(6, a.getItems().length);
}
@Test
public void testRead7() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("image/jpeg, application/x-ms-application, image/gif,application/xaml+xml, image/pjpeg, application/x-ms-xbap,application/x-shockwave-flash, application/msword, */*"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(9, a.getItems().length);
}
@Test
public void testRead8() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("application/vnd.ms-xpsdocument, application/xaml+xml, application/x-ms-xbap, application/x-shockwave-flash, application/x-silverlight-2-b2, application/x-silverlight, application/vnd.ms-excel, application/vnd.ms-powerpoint, application/msword, */*"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(10, a.getItems().length);
}
@Test
public void testRead9() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("text/plain"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(1, a.getItems().length);
}
@Test
public void testRead10() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("application/json, application/xml"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(2, a.getItems().length);
}
@Test
public void testAccept() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("application/vnd.x+json, application/xml; q=0.5, application/json"), null);
assertNotNull(a);
assertEquals(3, a.accept(new MediaType("application", "vnd.x+json")));
assertEquals(2, a.accept(new MediaType("application", "json")));
assertEquals(1, a.accept(new MediaType("application", "xml")));
assertEquals(0, a.accept(new MediaType("text", "html")));
}
@Test
public void testReadHuge() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner(
"text/plain ; q = 0\n"
+ " .1\n"
+ " ,,,,\n"
+ " ,,\n"
+ " ,,, ,,,\n"
+ " ,,, ,,,\n"
+ " ,,,,,,,,\n"
+ " ,,,,\n"
+ " ,,\n"
+ " text/* ; q =\n"
+ " 1 . 00"
), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(2, a.getItems().length);
assertNotNull(a.getItems()[0]);
assertNotNull(a.getItems()[0].getRange());
assertEquals("text", a.getItems()[0].getRange().getType());
assertEquals("*", a.getItems()[0].getRange().getSubtype());
assertNull(a.getItems()[0].getRange().getParameters());
assertEquals(1f, a.getItems()[0].getQualityFactor(), 0);
assertNotNull(a.getItems()[1]);
assertNotNull(a.getItems()[1].getRange());
assertEquals("text", a.getItems()[1].getRange().getType());
assertEquals("plain", a.getItems()[1].getRange().getSubtype());
assertNull(a.getItems()[1].getRange().getParameters());
assertEquals(0.1f, a.getItems()[1].getQualityFactor(), 0);
}
@Test
public void testEmptyList() {
Accept a = HttpAcceptAdapter.read(new HttpLexScanner("text/plain,,,,image/*"), null);
assertNotNull(a);
assertNotNull(a.getItems());
assertEquals(2, a.getItems().length);
assertNotNull(a.getItems()[0]);
assertNotNull(a.getItems()[0].getRange());
assertEquals("text", a.getItems()[0].getRange().getType());
assertEquals("plain", a.getItems()[0].getRange().getSubtype());
assertNull(a.getItems()[0].getRange().getParameters());
assertEquals(1f, a.getItems()[0].getQualityFactor(), 0);
assertNotNull(a.getItems()[1]);
assertNotNull(a.getItems()[1].getRange());
assertEquals("image", a.getItems()[1].getRange().getType());
assertEquals("*", a.getItems()[1].getRange().getSubtype());
assertNull(a.getItems()[1].getRange().getParameters());
assertEquals(1f, a.getItems()[1].getQualityFactor(), 0);
}
}
| |
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.tests.java.util.stream;
import java.util.Arrays;
import java.util.Optional;
import java.util.Spliterator;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.OpTestCase;
import java.util.stream.SpliteratorTestHelper;
import java.util.stream.Stream;
import java.util.stream.TestData;
import org.testng.annotations.Test;
/**
* Primitive range tests
*
* @author Brian Goetz
*/
@Test
public class RangeTest extends OpTestCase {
public void testInfiniteRangeFindFirst() {
Integer first = Stream.iterate(0, i -> i + 1).filter(i -> i > 10000).findFirst().get();
assertEquals(first, Stream.iterate(0, i -> i + 1).parallel().filter(i -> i > 10000).findFirst().get());
// Limit is required to transform the infinite stream to a finite stream
// since the exercising requires a finite stream
withData(TestData.Factory.ofSupplier(
"", () -> Stream.iterate(0, i -> i + 1).filter(i -> i > 10000).limit(20000))).
terminal(s->s.findFirst()).expectedResult(Optional.of(10001)).exercise();
}
//
public void testIntRange() {
// Half-open
for (int start : Arrays.asList(1, 10, -1, -10)) {
setContext("start", start);
for (int end : Arrays.asList(1, 10, -1, -10)) {
setContext("end", end);
int size = (start < end) ? end - start : 0;
int[] exp = new int[size];
for (int i = start, p = 0; i < end; i++, p++) {
exp[p] = i;
}
int[] inc = IntStream.range(start, end).toArray();
assertEquals(inc.length, size);
assertTrue(Arrays.equals(exp, inc));
withData(intRangeData(start, end)).stream(s -> s).
expectedResult(exp).exercise();
}
}
// Closed
for (int start : Arrays.asList(1, 10, -1, -10)) {
setContext("start", start);
for (int end : Arrays.asList(1, 10, -1, -10)) {
setContext("end", end);
int size = (start <= end) ? end - start + 1 : 0;
int[] exp = new int[size];
for (int i = start, p = 0; i <= end; i++, p++) {
exp[p] = i;
}
int[] inc = IntStream.rangeClosed(start, end).toArray();
assertEquals(inc.length, size);
assertTrue(Arrays.equals(exp, inc));
withData(intRangeClosedData(start, end)).stream(s -> s).
expectedResult(exp).exercise();
}
}
// Closed, maximum upper bound of Integer.MAX_VALUE
{
int[] inc = IntStream.rangeClosed(Integer.MAX_VALUE - 1, Integer.MAX_VALUE).toArray();
assertEquals(2, inc.length);
assertEquals(Integer.MAX_VALUE - 1, inc[0]);
assertEquals(Integer.MAX_VALUE, inc[1]);
inc = IntStream.rangeClosed(Integer.MAX_VALUE, Integer.MAX_VALUE).toArray();
assertEquals(1, inc.length);
assertEquals(Integer.MAX_VALUE, inc[0]);
SpliteratorTestHelper.testIntSpliterator(
() -> IntStream.rangeClosed(Integer.MAX_VALUE - 8, Integer.MAX_VALUE).spliterator());
}
// Range wider than Integer.MAX_VALUE
{
Spliterator.OfInt s = IntStream.rangeClosed(Integer.MIN_VALUE, Integer.MAX_VALUE).
spliterator();
assertEquals(s.estimateSize(), 1L << 32);
}
}
TestData.OfInt intRangeData(int start, int end) {
return TestData.Factory.ofIntSupplier("int range", () -> IntStream.range(start, end));
}
TestData.OfInt intRangeClosedData(int start, int end) {
return TestData.Factory.ofIntSupplier("int rangeClosed", () -> IntStream.rangeClosed(start, end));
}
public void tesIntRangeReduce() {
withData(intRangeData(0, 10000)).
terminal(s -> s.reduce(0, Integer::sum)).exercise();
}
public void testIntInfiniteRangeLimit() {
withData(TestData.Factory.ofIntSupplier(
"int range", () -> IntStream.iterate(0, i -> i + 1).limit(10000))).
terminal(s -> s.reduce(0, Integer::sum)).exercise();
}
public void testIntInfiniteRangeFindFirst() {
int first = IntStream.iterate(0, i -> i + 1).filter(i -> i > 10000).findFirst().getAsInt();
assertEquals(first, IntStream.iterate(0, i -> i + 1).parallel().filter(i -> i > 10000).findFirst().getAsInt());
}
//
public void testLongRange() {
// Half-open
for (long start : Arrays.asList(1, 1000, -1, -1000)) {
setContext("start", start);
for (long end : Arrays.asList(1, 1000, -1, -1000)) {
setContext("end", end);
long size = start < end ? end - start : 0;
long[] exp = new long[(int) size];
for (long i = start, p = 0; i < end; i++, p++) {
exp[(int) p] = i;
}
long[] inc = LongStream.range(start, end).toArray();
assertEquals(inc.length, size);
assertTrue(Arrays.equals(exp, inc));
withData(longRangeData(start, end)).stream(s -> s).
expectedResult(exp).exercise();
}
}
// Closed
for (long start : Arrays.asList(1, 1000, -1, -1000)) {
setContext("start", start);
for (long end : Arrays.asList(1, 1000, -1, -1000)) {
setContext("end", end);
long size = start <= end ? end - start + 1: 0;
long[] exp = new long[(int) size];
for (long i = start, p = 0; i <= end; i++, p++) {
exp[(int) p] = i;
}
long[] inc = LongStream.rangeClosed(start, end).toArray();
assertEquals(inc.length, size);
assertTrue(Arrays.equals(exp, inc));
withData(longRangeClosedData(start, end)).stream(s -> s).
expectedResult(exp).exercise();
}
}
// Closed, maximum upper bound of Long.MAX_VALUE
{
long[] inc = LongStream.rangeClosed(Long.MAX_VALUE - 1, Long.MAX_VALUE).toArray();
assertEquals(2, inc.length);
assertEquals(Long.MAX_VALUE - 1, inc[0]);
assertEquals(Long.MAX_VALUE, inc[1]);
inc = LongStream.rangeClosed(Long.MAX_VALUE, Long.MAX_VALUE).toArray();
assertEquals(1, inc.length);
assertEquals(Long.MAX_VALUE, inc[0]);
SpliteratorTestHelper.testLongSpliterator(
() -> LongStream.rangeClosed(Long.MAX_VALUE - 8, Long.MAX_VALUE).spliterator());
}
}
TestData.OfLong longRangeData(long start, long end) {
return TestData.Factory.ofLongSupplier("long range", () -> LongStream.range(start, end));
}
TestData.OfLong longRangeClosedData(long start, long end) {
return TestData.Factory.ofLongSupplier("long rangeClosed", () -> LongStream.rangeClosed(start, end));
}
public void testLongRangeReduce() {
withData(longRangeData(0, 10000)).
terminal(s -> s.reduce(0, Long::sum)).exercise();
}
public void testLongInfiniteRangeLimit() {
withData(TestData.Factory.ofLongSupplier(
"long range", () -> LongStream.iterate(0, i -> i + 1).limit(10000))).
terminal(s -> s.reduce(0, Long::sum)).exercise();
}
public void testLongInfiniteRangeFindFirst() {
long first = LongStream.iterate(0, i -> i + 1).filter(i -> i > 10000).findFirst().getAsLong();
assertEquals(first, LongStream.iterate(0, i -> i + 1).parallel().filter(i -> i > 10000).findFirst().getAsLong());
}
private static void assertSizedAndSubSized(Spliterator<?> s) {
assertTrue(s.hasCharacteristics(Spliterator.SIZED | Spliterator.SUBSIZED));
}
private static void assertNotSizedAndSubSized(Spliterator<?> s) {
assertFalse(s.hasCharacteristics(Spliterator.SIZED | Spliterator.SUBSIZED));
}
public void testLongLongRange() {
// Test [Long.MIN_VALUE, Long.MAX_VALUE)
// This will concatenate streams of three ranges
// [Long.MIN_VALUE, x) [x, 0) [0, Long.MAX_VALUE)
// where x = Long.divideUnsigned(0 - Long.MIN_VALUE, 2) + 1
{
Spliterator.OfLong s = LongStream.range(Long.MIN_VALUE, Long.MAX_VALUE).spliterator();
assertEquals(s.estimateSize(), Long.MAX_VALUE);
assertNotSizedAndSubSized(s);
Spliterator.OfLong s1 = s.trySplit();
assertNotSizedAndSubSized(s1);
assertSizedAndSubSized(s);
Spliterator.OfLong s2 = s1.trySplit();
assertSizedAndSubSized(s1);
assertSizedAndSubSized(s2);
assertTrue(s.estimateSize() == Long.MAX_VALUE);
assertTrue(s1.estimateSize() < Long.MAX_VALUE);
assertTrue(s2.estimateSize() < Long.MAX_VALUE);
assertEquals(s.estimateSize() + s1.estimateSize() + s2.estimateSize(),
Long.MAX_VALUE - Long.MIN_VALUE);
}
long[][] ranges = { {Long.MIN_VALUE, 0}, {-1, Long.MAX_VALUE} };
for (int i = 0; i < ranges.length; i++) {
long start = ranges[i][0];
long end = ranges[i][1];
Spliterator.OfLong s = LongStream.range(start, end).spliterator();
assertEquals(s.estimateSize(), Long.MAX_VALUE);
assertNotSizedAndSubSized(s);
Spliterator.OfLong s1 = s.trySplit();
assertSizedAndSubSized(s1);
assertSizedAndSubSized(s);
assertTrue(s.estimateSize() < Long.MAX_VALUE);
assertTrue(s1.estimateSize() < Long.MAX_VALUE);
assertEquals(s.estimateSize() + s1.estimateSize(), end - start);
}
}
public void testLongLongRangeClosed() {
// Test [Long.MIN_VALUE, Long.MAX_VALUE]
// This will concatenate streams of four ranges
// [Long.MIN_VALUE, x) [x, 0) [0, y) [y, Long.MAX_VALUE]
// where x = Long.divideUnsigned(0 - Long.MIN_VALUE, 2) + 1
// y = Long.divideUnsigned(Long.MAX_VALUE, 2) + 1
{
Spliterator.OfLong s = LongStream.rangeClosed(Long.MIN_VALUE, Long.MAX_VALUE).spliterator();
assertEquals(s.estimateSize(), Long.MAX_VALUE);
assertNotSizedAndSubSized(s);
Spliterator.OfLong s1 = s.trySplit();
assertNotSizedAndSubSized(s1);
assertNotSizedAndSubSized(s);
Spliterator.OfLong s2 = s1.trySplit();
assertSizedAndSubSized(s1);
assertSizedAndSubSized(s2);
Spliterator.OfLong s3 = s.trySplit();
assertSizedAndSubSized(s3);
assertSizedAndSubSized(s);
assertTrue(s.estimateSize() < Long.MAX_VALUE);
assertTrue(s3.estimateSize() < Long.MAX_VALUE);
assertTrue(s1.estimateSize() < Long.MAX_VALUE);
assertTrue(s2.estimateSize() < Long.MAX_VALUE);
assertEquals(s.estimateSize() + s3.estimateSize() + s1.estimateSize() + s2.estimateSize(),
Long.MAX_VALUE - Long.MIN_VALUE + 1);
}
long[][] ranges = { {Long.MIN_VALUE, 0}, {-1, Long.MAX_VALUE} };
for (int i = 0; i < ranges.length; i++) {
long start = ranges[i][0];
long end = ranges[i][1];
Spliterator.OfLong s = LongStream.rangeClosed(start, end).spliterator();
assertEquals(s.estimateSize(), Long.MAX_VALUE);
assertNotSizedAndSubSized(s);
Spliterator.OfLong s1 = s.trySplit();
assertSizedAndSubSized(s1);
assertSizedAndSubSized(s);
assertTrue(s.estimateSize() < Long.MAX_VALUE);
assertTrue(s1.estimateSize() < Long.MAX_VALUE);
assertEquals(s.estimateSize() + s1.estimateSize(), end - start + 1);
}
}
}
| |
/**
* Copyright (C) 2014-2017 Xavier Witdouck
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zavtech.morpheus.frame;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import com.zavtech.morpheus.array.Array;
import com.zavtech.morpheus.util.Tuple;
/**
* An interface that provides various functions to operate on the rows or columns of a DataFrame
*
* @param <X> the key type for this dimension
* @param <Y> the other dimension key type
* @param <R> the key type of the row dimension
* @param <C> the key type of the column dimension
* @param <V> the vector type for this dimension
*
* <p>This is open source software released under the <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache 2.0 License</a></p>
*
* @author Xavier Witdouck
*/
public interface DataFrameAxis<X,Y,R,C,V extends DataFrameVector,T extends DataFrameAxis,G> extends Iterable<V> {
enum Type {
ROWS,
COLS;
public boolean isRow() {
return this == ROWS;
}
public boolean isCol() {
return this == COLS;
}
}
/**
* Checks if this axis is empty, according to its number of entries.
* @return true if the axis is empty, false otherwise
*/
boolean isEmpty();
/**
* Returns the number of entries in this axis
* @return the count for this axis
*/
int count();
/**
* Returns a parallel implementation of this interface
* @return a parallel implementation of this interface
*/
T parallel();
/**
* Returns a sequential implementation of this interface
* @return a sequential implementation of this interface
*/
T sequential();
/**
* Returns the key type for this dimension
* @return the key type for dimension
*/
Class<X> keyType();
/**
* Returns a stream of row or column keys
* @return a stream of row or column keys
*/
Stream<X> keys();
/**
* Returns the keys for this dimension as array
* @return array of keys for this dimension
*/
Array<X> keyArray();
/**
* Returns a stream of the row or column ordinals for this axis.
* @return the stream of row or column ordinals
*/
IntStream ordinals();
/**
* Returns true if this axis is operating in parallel mode
* @return true if operating in parallel mode
*/
boolean isParallel();
/**
* Returns the array type for the row or column vector
* @param key the row or column vector
* @return the array type definition
*/
Class<?> type(X key);
/**
* Returns a stream of array types for the row or column vectors
* @return a stream of of types for the row or column vectors
*/
Stream<Class<?>> types();
/**
* Returns the row or column key for the index specified
* @param index the row or column index
* @return the matching row or column key
*/
X key(int index);
/**
* Returns the row or column ordinal for the key specified
* @param key the row or column key
* @return the row or column ordinal, -1 if not match
*/
int ordinalOf(X key);
/**
* Returns the row or column ordinal for the key specified
* @param key the row or column key
* @param strict if true, throw an exception if no match for key
* @return the row or column ordinal, -1 if not match
*/
int ordinalOf(X key, boolean strict);
/**
* Returns true if this dimension contains the key specified
* @param key the row or column key
* @return true if key is matched
*/
boolean contains(X key);
/**
* Returns true if this dimension contains all the keys specified
* @param keys the row or column keys to match
* @return true if all keys are matched
*/
boolean containsAll(Iterable<X> keys);
/**
* Returns a filtered axis operator based on the keys specified
* @param keys the keys to filter on
* @return the filtered row operator
*/
T filter(X... keys);
/**
* Returns a filtered axis operator based on the keys specified
* @param keys the keys to filter on
* @return the filtered row operator
*/
T filter(Iterable<X> keys);
/**
* Returns a filtered axis operator based on the keys specified
* @param predicate the predicate to filter on
* @return the filtered row operator
*/
T filter(Predicate<V> predicate);
/**
* Groups the DataFrame along this axis based on values in the vector specified
* @param keys the keys of the vectors to group by
* @return the resulting groups of DataFrames
*/
G groupBy(Y... keys);
/**
* Groups rows according to the provided function and returns the groups
* @param function the grouping function to apply
* @return the resulting groups of DataFrames
*/
G groupBy(Function<V,Tuple> function);
/**
* Returns a stream of vectors for this dimension
* @return the stream of vectors for this dimension
*/
Stream<V> stream();
/**
* Returns an iterator of vectors for this dimension
* @return iterator of vectors
*/
Iterator<V> iterator();
/**
* Returns an <code>Optional</code> on the first key in this dimension
* @return <code>Optional</code> on the first key (row or column key)
*/
Optional<X> firstKey();
/**
* Returns an <code>Optional</code> on the last key in this dimension
* @return <code>Optional</code> on the last key (row or column key)
*/
Optional<X> lastKey();
/**
* Returns an <code>Optional</code> on the first record in this dimension
* @return <code>Optional</code> on the first record in this dimension
*/
Optional<V> first();
/**
* Returns an <code>Optional</code> on the last record in this dimension
* @return <code>Optional</code> on the last record in this dimension
*/
Optional<V> last();
/**
* Returns an <code>Optional</code> on the first row that matches the predicate
* @param predicate the predicate to match rows
* @return <code>Optional</code> on the first row to match
*/
Optional<V> first(Predicate<V> predicate);
/**
* Returns an <code>Optional</code> on the last row that matches the predicate
* @param predicate the predicate to match rows
* @return <code>Optional</code> on the last row to match
*/
Optional<V> last(Predicate<V> predicate);
/**
* Returns an <code>Optional</code> on the greatest key strictly less than the given key
* This operation only works if the index is sorted, otherwise result is undefined
* @param key the key to find the next lower key
* @return the <code>Optional</code> on the greatest key strictly less than the given key
*/
Optional<X> lowerKey(X key);
/**
* Returns an <code>Optional</code> on the least key strictly greater than the given key
* This operation only works if the index is sorted, otherwise result is undefined
* @param key the key to find the next highest from
* @return the <code>Optional</code> on the least key strictly greater than the given key
*/
Optional<X> higherKey(X key);
/**
* Replaces an existing key with the new key in place
* @param key the existing key to replace
* @param newKey the replacement key, which must not already exist
* @return the DataFrame reference
*/
DataFrame<R,C> replaceKey(X key, X newKey);
/**
* Returns a DataFrame view containing only the row or column keys specified
* @param keys the row or column keys to select
* @return the <code>DataFrame</code> view
*/
DataFrame<R,C> select(X... keys);
/**
* Returns a DataFrame view containing only the row or column keys specified
* @param keys the row or column keys to select
* @return the <code>DataFrame</code> view
*/
DataFrame<R,C> select(Iterable<X> keys);
/**
* Returns a DataFrame which includes all rows that match the specified predicate
* @param predicate the predicate to select matching rows
* @return the <code>DataFrame</code> view
*/
DataFrame<R,C> select(Predicate<V> predicate);
/**
* Sorts the DataFrame along this axis based on the keys in ascending/descending order
* @param ascending true for ascending order, false for descending
* @return the sorted DataFrame
*/
DataFrame<R,C> sort(boolean ascending);
/**
* Sorts the DataFrame along this axis in ascending/descending order
* @param ascending true for ascending, false for descending
* @param key the row or column key to sort by
* @return the sorted DataFrame
*/
DataFrame<R,C> sort(boolean ascending, Y key);
/**
* Sorts the DataFrame along this axis in ascending/descending order
* @param ascending true for ascending, false for descending
* @param keys the list of row or column keys to sort by
* @return the sorted DataFrame
*/
DataFrame<R,C> sort(boolean ascending, List<Y> keys);
/**
* Sorts the DataFrame along this axis according to the comparator provided
* @param comparator the comparator to sort rows, null to remove sorting
* @return the sorted DataFrame
*/
DataFrame<R,C> sort(Comparator<V> comparator);
/**
* Applies the consumer on every vector of the DataFrame
* @param consumer the consumer to receive each vector
* @return the DataFrame reference
*/
DataFrame<R,C> apply(Consumer<V> consumer);
/**
* Subtracts the mean from each row / column defined by this axis
* @param inPlace if true, demean in pliace
* @return the resulting DataFrame
*/
DataFrame<R,C> demean(boolean inPlace);
/**
* Returns the row/column that represents the minimum value given the comparator
* @param comparator the comparator to compare rows/columns
* @return the row/column, empty if size() == 0
*/
Optional<V> min(Comparator<V> comparator);
/**
* Returns the row/column that represents the maximum value given the comparator
* @param comparator the comparator to compare rows/columns
* @return the row/column, empty if size() == 0
*/
Optional<V> max(Comparator<V> comparator);
/**
* Adds all the rows or columns from the frame provided
* Only data for intersecting rows will be copied into this frame
* @param frame the frame to add columns from
* @return the column keys of newly added columns
*/
Array<X> addAll(DataFrame<R,C> frame);
}
| |
package com.reactnativenavigation.controllers;
import android.content.Intent;
import android.os.Bundle;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.WritableMap;
import com.reactnativenavigation.NavigationApplication;
import com.reactnativenavigation.params.ActivityParams;
import com.reactnativenavigation.params.FabParams;
import com.reactnativenavigation.params.LightBoxParams;
import com.reactnativenavigation.params.ScreenParams;
import com.reactnativenavigation.params.SlidingOverlayParams;
import com.reactnativenavigation.params.SnackbarParams;
import com.reactnativenavigation.params.TitleBarButtonParams;
import com.reactnativenavigation.params.TitleBarLeftButtonParams;
import com.reactnativenavigation.params.parsers.ActivityParamsParser;
import com.reactnativenavigation.params.parsers.ScreenParamsParser;
import com.reactnativenavigation.utils.OrientationHelper;
import com.reactnativenavigation.views.SideMenu.Side;
import java.util.List;
public class NavigationCommandsHandler {
private static final String ACTIVITY_PARAMS_BUNDLE = "ACTIVITY_PARAMS_BUNDLE";
static ActivityParams parseActivityParams(Intent intent) {
return ActivityParamsParser.parse(intent.getBundleExtra(NavigationCommandsHandler.ACTIVITY_PARAMS_BUNDLE));
}
/**
* start a new activity with CLEAR_TASK | NEW_TASK
*
* @param params ActivityParams as bundle
*/
public static void startApp(Bundle params) {
Intent intent = new Intent(NavigationApplication.instance, NavigationActivity.class);
IntentDataHandler.onStartApp(intent);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK);
intent.putExtra(ACTIVITY_PARAMS_BUNDLE, params);
intent.putExtra("animationType", params.getString("animationType"));
NavigationApplication.instance.startActivity(intent);
}
public static void updateDrawerToScreen(final Bundle params) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable()
{
@Override
public void run()
{
currentActivity.updateDrawerToScreen(ScreenParamsParser.parse(params));
}
});
}
public static void push(Bundle screenParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
final ScreenParams params = ScreenParamsParser.parse(screenParams);
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.push(params);
}
});
}
public static void pop(Bundle screenParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
final ScreenParams params = ScreenParamsParser.parse(screenParams);
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.pop(params);
}
});
}
public static void popToRoot(Bundle screenParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
final ScreenParams params = ScreenParamsParser.parse(screenParams);
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.popToRoot(params);
}
});
}
public static void newStack(Bundle screenParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
final ScreenParams params = ScreenParamsParser.parse(screenParams);
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.newStack(params);
}
});
}
public static void setTopBarVisible(final String screenInstanceID, final boolean hidden, final boolean animated) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setTopBarVisible(screenInstanceID, hidden, animated);
}
});
}
public static void setBottomTabsVisible(final boolean hidden, final boolean animated) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setBottomTabsVisible(hidden, animated);
}
});
}
public static void setScreenTitleBarTitle(final String screenInstanceId, final String title) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setTitleBarTitle(screenInstanceId, title);
}
});
}
public static void setScreenTitleBarSubtitle(final String screenInstanceId, final String subtitle) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setTitleBarSubtitle(screenInstanceId, subtitle);
}
});
}
public static void showModal(final Bundle params) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.showModal(ScreenParamsParser.parse(params));
}
});
}
public static void showLightBox(final LightBoxParams params) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.showLightBox(params);
}
});
}
public static void dismissLightBox() {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.dismissLightBox();
}
});
}
public static void setScreenTitleBarRightButtons(final String screenInstanceId,
final String navigatorEventId,
final List<TitleBarButtonParams> titleBarButtons) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setTitleBarButtons(screenInstanceId, navigatorEventId, titleBarButtons);
}
});
}
public static void setScreenTitleBarLeftButtons(final String screenInstanceId,
final String navigatorEventId,
final TitleBarLeftButtonParams titleBarButtons) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setTitleBarLeftButton(screenInstanceId, navigatorEventId, titleBarButtons);
}
});
}
public static void setScreenFab(final String screenInstanceId, final String navigatorEventId, final FabParams fab) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setScreenFab(screenInstanceId, navigatorEventId, fab);
}
});
}
public static void setScreenStyle(final String screenInstanceId, final Bundle styleParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setScreenStyle(screenInstanceId, styleParams);
}
});
}
public static void dismissTopModal(final Promise promise) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.dismissTopModal(promise);
}
});
}
public static void dismissAllModals() {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.dismissAllModals();
}
});
}
public static void toggleSideMenuVisible(final boolean animated, final Side side) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.toggleSideMenuVisible(animated, side);
}
});
}
public static void disableOpenGesture(final boolean disableOpenGesture) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable()
{
@Override
public void run()
{
currentActivity.disableOpenGesture(disableOpenGesture);
}
});
}
public static void setSideMenuVisible(final boolean animated, final boolean visible, final Side side) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setSideMenuVisible(animated, visible, side);
}
});
}
public static void disableBackNavigation(final boolean disableBackNavigation) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable()
{
@Override
public void run()
{
currentActivity.disableBackNavigation(disableBackNavigation);
}
});
}
public static void selectTopTabByTabIndex(final String screenInstanceId, final int index) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.selectTopTabByTabIndex(screenInstanceId, index);
}
});
}
public static void selectTopTabByScreen(final String screenInstanceId) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.selectTopTabByScreen(screenInstanceId);
}
});
}
public static void selectBottomTabByTabIndex(final Integer index) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.selectBottomTabByTabIndex(index);
}
});
}
public static void selectBottomTabByNavigatorId(final String navigatorId) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.selectBottomTabByNavigatorId(navigatorId);
}
});
}
public static void setBottomTabButtonByIndex(final Integer index, final Bundle screenParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
final ScreenParams params = ScreenParamsParser.parse(screenParams);
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setBottomTabButtonByIndex(index, params);
}
});
}
public static void setBottomTabButtonByNavigatorId(final String navigatorId, final Bundle screenParams) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
final ScreenParams params = ScreenParamsParser.parse(screenParams);
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.setBottomTabButtonByNavigatorId(navigatorId, params);
}
});
}
public static void showSlidingOverlay(final SlidingOverlayParams params) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.showSlidingOverlay(params);
}
});
}
public static void hideSlidingOverlay() {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.hideSlidingOverlay();
}
});
}
public static void showSnackbar(final SnackbarParams params) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.showSnackbar(params);
}
});
}
public static void dismissSnackbar() {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
currentActivity.dismissSnackbar();
}
});
}
public static void getOrientation(Promise promise) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
return;
}
promise.resolve(OrientationHelper.getOrientation(currentActivity));
}
public static void isAppLaunched(Promise promise) {
final boolean isAppLaunched = NavigationActivity.currentActivity != null;
promise.resolve(isAppLaunched);
}
public static void getCurrentlyVisibleScreenId(final Promise promise) {
final NavigationActivity currentActivity = NavigationActivity.currentActivity;
if (currentActivity == null) {
promise.resolve("");
return;
}
NavigationApplication.instance.runOnMainThread(new Runnable() {
@Override
public void run() {
WritableMap map = Arguments.createMap();
map.putString("screenId", currentActivity.getCurrentlyVisibleScreenId());
promise.resolve(map);
}
});
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package jme3tools.navigation;
/**
* This class represents the position of an entity in the world.
*
* @author Benjamin Jakobus (based on JMarine by Cormac Gebruers and Benjamin Jakobus)
* @version 1.0
* @since 1.0
*/
public class Position {
/* the latitude (+ N/E) */
private Coordinate lat;
/* the longitude (-W/S) */
private Coordinate lng;
/* An optional time to associate with this position - for historical tracking */
private String utcTimeStamp;
/* Degree position */
private double degree;
/**
* A new position expressed in decimal format
* @param dblLat
* @param dblLng
* @since 1.0
*/
public Position(double dblLat, double dblLng) throws InvalidPositionException {
lat = new Coordinate(dblLat, Coordinate.LAT);
lng = new Coordinate(dblLng, Coordinate.LNG);
}
/**
* A new position expressed in decimal format and degrees
* @param dblLat
* @param dblLng
* @param degree
* @since 1.0
*/
// public Position(double dblLat, double dblLng, double degree) throws InvalidPositionException {
// lat = new Coordinate(dblLat, Coordinate.LAT);
// lng = new Coordinate(dblLng, Coordinate.LNG);
// this.degree = degree;
// }
/**
* A new position expressed in DegMin format
* @param latDeg
* @param latMin
* @param lngDeg
* @param lngMin
* @since 1.0
*/
public Position(int latDeg, float latMin, int latQuad, int lngDeg,
float lngMin, int lngQuad) throws InvalidPositionException {
lat = new Coordinate(latDeg, latMin, Coordinate.LAT, latQuad);
lng = new Coordinate(lngDeg, lngMin, Coordinate.LNG, lngQuad);
}
/**
* A new position expressed in ALRS format
* @param lat
* @param lng
* @since 1.0
*/
public Position(String lat, String lng) throws InvalidPositionException {
this.lat = new Coordinate(lat);
this.lng = new Coordinate(lng);
}
/**
* A new position expressed in NMEA GPS message format:
* 4807.038,N,01131.000,E
* @param latNMEAGPS
* @param latQuad
* @param lngNMEAGPS
* @param lngQuad
* @param utcTimeStamp
* @since 12.0
*/
public Position(String latNMEAGPS, String latQuad, String lngNMEAGPS, String lngQuad, String utcTimeStamp) {
int quad;
//LAT
if (latQuad.compareTo("N") == 0) {
quad = Coordinate.N;
} else {
quad = Coordinate.S;
}
try {
this.lat = new Coordinate(Integer.valueOf(latNMEAGPS.substring(0, 2)), Float.valueOf(latNMEAGPS.substring(2)), Coordinate.LAT, quad);
} catch (InvalidPositionException e) {
e.printStackTrace();
}
//LNG
if (lngQuad.compareTo("E") == 0) {
quad = Coordinate.E;
} else {
quad = Coordinate.W;
}
try {
this.lng = new Coordinate(Integer.valueOf(lngNMEAGPS.substring(0, 3)), Float.valueOf(lngNMEAGPS.substring(3)), Coordinate.LNG, quad);
} catch (InvalidPositionException e) {
e.printStackTrace();
}
//TIMESTAMP
this.associateUTCTime(utcTimeStamp);
}
/**
* Add a reference time for this position - useful for historical tracking
* @param data
* @since 1.0
*/
public void associateUTCTime(String data) {
utcTimeStamp = data;
}
/**
* Returns the UTC time stamp
* @return str the UTC timestamp
* @since 1.0
*/
public String utcTimeStamp() {
return utcTimeStamp;
}
/**
* Prints out position using decimal format
* @return the position in decimal format
*/
public String toStringDec() {
return lat.toStringDec() + " " + lng.toStringDec();
}
/**
* Return the position latitude in decimal format
* @return the latitude in decimal format
* @since 1.0
*/
public double getLatitude() {
return lat.decVal();
}
/**
* Returns the degree of the entity
* @return degree
* @since 1.0
*/
// public double getDegree() {
// return degree;
// }
/**
* Return the position longitude in decimal format
* @return the longitude in decimal format
* @since 1.0
*/
public double getLongitude() {
return lng.decVal();
}
/**
* Prints out position using DegMin format
* @return the position in DegMin Format
* @since 1.0
*/
public String toStringDegMin() {
String output = "";
output += lat.toStringDegMin();
output += " " + lng.toStringDegMin();
return output;
}
/**
* Prints out the position latitude
* @return the latitude as a string for display purposes
* @since 1.0
*/
public String toStringDegMinLat() {
return lat.toStringDegMin();
}
/**
* Prints out the position longitude
* @return the longitude as a string for display purposes
* @since 1.0
*/
public String toStringDegMinLng() {
return lng.toStringDegMin();
}
/**
* Prints out the position latitude
* @return the latitude as a string for display purposes
* @since 1.0
*/
public String toStringDecLat() {
return lat.toStringDec();
}
/**
* Prints out the position longitude
* @return the longitude as a string for display purposes
* @since 1.0
*/
public String toStringDecLng() {
return lng.toStringDec();
}
//TEST HARNESS - DO NOT DELETE!
public static void main(String[] argsc) {
//NMEA GPS Position format:
Position p = new Position("4807.038", "N", "01131.000", "W", "123519");
System.out.println(p.toStringDegMinLat());
System.out.println(p.getLatitude());
System.out.println(p.getLongitude());
System.out.println(p.toStringDegMinLng());
System.out.println(p.utcTimeStamp());
}//main
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.views.image;
import javax.annotation.Nullable;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Shader;
import android.graphics.drawable.Animatable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import com.facebook.common.util.UriUtil;
import com.facebook.drawee.controller.AbstractDraweeControllerBuilder;
import com.facebook.drawee.controller.BaseControllerListener;
import com.facebook.drawee.controller.ControllerListener;
import com.facebook.drawee.controller.ForwardingControllerListener;
import com.facebook.drawee.drawable.AutoRotateDrawable;
import com.facebook.drawee.drawable.ScalingUtils;
import com.facebook.drawee.generic.GenericDraweeHierarchy;
import com.facebook.drawee.generic.GenericDraweeHierarchyBuilder;
import com.facebook.drawee.generic.RoundingParams;
import com.facebook.drawee.view.GenericDraweeView;
import com.facebook.imagepipeline.common.ResizeOptions;
import com.facebook.imagepipeline.image.ImageInfo;
import com.facebook.imagepipeline.request.BasePostprocessor;
import com.facebook.imagepipeline.request.ImageRequest;
import com.facebook.imagepipeline.request.ImageRequestBuilder;
import com.facebook.imagepipeline.request.Postprocessor;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.common.SystemClock;
import com.facebook.react.uimanager.PixelUtil;
import com.facebook.react.uimanager.UIManagerModule;
import com.facebook.react.uimanager.events.EventDispatcher;
/**
* Wrapper class around Fresco's GenericDraweeView, enabling persisting props across multiple view
* update and consistent processing of both static and network images.
*/
public class ReactImageView extends GenericDraweeView {
public static final int REMOTE_IMAGE_FADE_DURATION_MS = 300;
/*
* Implementation note re rounded corners:
*
* Fresco's built-in rounded corners only work for 'cover' resize mode -
* this is a limitation in Android itself. Fresco has a workaround for this, but
* it requires knowing the background color.
*
* So for the other modes, we use a postprocessor.
* Because the postprocessor uses a modified bitmap, that would just get cropped in
* 'cover' mode, so we fall back to Fresco's normal implementation.
*/
private static final Matrix sMatrix = new Matrix();
private static final Matrix sInverse = new Matrix();
private class RoundedCornerPostprocessor extends BasePostprocessor {
float getRadius(Bitmap source) {
ScalingUtils.getTransform(
sMatrix,
new Rect(0, 0, source.getWidth(), source.getHeight()),
source.getWidth(),
source.getHeight(),
0.0f,
0.0f,
mScaleType);
sMatrix.invert(sInverse);
return sInverse.mapRadius(mBorderRadius);
}
@Override
public void process(Bitmap output, Bitmap source) {
output.setHasAlpha(true);
if (mBorderRadius < 0.01f) {
super.process(output, source);
return;
}
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setShader(new BitmapShader(source, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP));
Canvas canvas = new Canvas(output);
float radius = getRadius(source);
canvas.drawRoundRect(
new RectF(0, 0, source.getWidth(), source.getHeight()),
radius,
radius,
paint);
}
}
private @Nullable Uri mUri;
private @Nullable Drawable mLoadingImageDrawable;
private int mBorderColor;
private int mOverlayColor;
private float mBorderWidth;
private float mBorderRadius;
private ScalingUtils.ScaleType mScaleType;
private boolean mIsDirty;
private boolean mIsLocalImage;
private final AbstractDraweeControllerBuilder mDraweeControllerBuilder;
private final RoundedCornerPostprocessor mRoundedCornerPostprocessor;
private @Nullable ControllerListener mControllerListener;
private @Nullable ControllerListener mControllerForTesting;
private final @Nullable Object mCallerContext;
private int mFadeDurationMs = -1;
private boolean mProgressiveRenderingEnabled;
// We can't specify rounding in XML, so have to do so here
private static GenericDraweeHierarchy buildHierarchy(Context context) {
return new GenericDraweeHierarchyBuilder(context.getResources())
.setRoundingParams(RoundingParams.fromCornersRadius(0))
.build();
}
public ReactImageView(
Context context,
AbstractDraweeControllerBuilder draweeControllerBuilder,
@Nullable Object callerContext) {
super(context, buildHierarchy(context));
mScaleType = ImageResizeMode.defaultValue();
mDraweeControllerBuilder = draweeControllerBuilder;
mRoundedCornerPostprocessor = new RoundedCornerPostprocessor();
mCallerContext = callerContext;
}
public void setShouldNotifyLoadEvents(boolean shouldNotify) {
if (!shouldNotify) {
mControllerListener = null;
} else {
final EventDispatcher mEventDispatcher = ((ReactContext) getContext()).
getNativeModule(UIManagerModule.class).getEventDispatcher();
mControllerListener = new BaseControllerListener<ImageInfo>() {
@Override
public void onSubmit(String id, Object callerContext) {
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), SystemClock.nanoTime(), ImageLoadEvent.ON_LOAD_START)
);
}
@Override
public void onFinalImageSet(
String id,
@Nullable final ImageInfo imageInfo,
@Nullable Animatable animatable) {
if (imageInfo != null) {
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), SystemClock.nanoTime(), ImageLoadEvent.ON_LOAD_END)
);
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), SystemClock.nanoTime(), ImageLoadEvent.ON_LOAD)
);
}
}
@Override
public void onFailure(String id, Throwable throwable) {
mEventDispatcher.dispatchEvent(
new ImageLoadEvent(getId(), SystemClock.nanoTime(), ImageLoadEvent.ON_LOAD_END)
);
}
};
}
mIsDirty = true;
}
public void setBorderColor(int borderColor) {
mBorderColor = borderColor;
mIsDirty = true;
}
public void setOverlayColor(int overlayColor) {
mOverlayColor = overlayColor;
mIsDirty = true;
}
public void setBorderWidth(float borderWidth) {
mBorderWidth = PixelUtil.toPixelFromDIP(borderWidth);
mIsDirty = true;
}
public void setBorderRadius(float borderRadius) {
mBorderRadius = PixelUtil.toPixelFromDIP(borderRadius);
mIsDirty = true;
}
public void setScaleType(ScalingUtils.ScaleType scaleType) {
mScaleType = scaleType;
mIsDirty = true;
}
public void setSource(
@Nullable String source,
ResourceDrawableIdHelper resourceDrawableIdHelper) {
mUri = null;
if (source != null) {
try {
mUri = Uri.parse(source);
// Verify scheme is set, so that relative uri (used by static resources) are not handled.
if (mUri.getScheme() == null) {
mUri = null;
}
} catch (Exception e) {
// ignore malformed uri, then attempt to extract resource ID.
}
if (mUri == null) {
mUri = resourceDrawableIdHelper.getResourceDrawableUri(getContext(), source);
mIsLocalImage = true;
} else {
mIsLocalImage = false;
}
}
mIsDirty = true;
}
public void setLoadingIndicatorSource(
@Nullable String name,
ResourceDrawableIdHelper resourceDrawableIdHelper) {
Drawable drawable = resourceDrawableIdHelper.getResourceDrawable(getContext(), name);
mLoadingImageDrawable =
drawable != null ? (Drawable) new AutoRotateDrawable(drawable, 1000) : null;
mIsDirty = true;
}
public void setProgressiveRenderingEnabled(boolean enabled) {
mProgressiveRenderingEnabled = enabled;
// no worth marking as dirty if it already rendered..
}
public void setFadeDuration(int durationMs) {
mFadeDurationMs = durationMs;
// no worth marking as dirty if it already rendered..
}
public void maybeUpdateView() {
if (!mIsDirty) {
return;
}
boolean doResize = shouldResize(mUri);
if (doResize && (getWidth() <= 0 || getHeight() <=0)) {
// If need a resize and the size is not yet set, wait until the layout pass provides one
return;
}
GenericDraweeHierarchy hierarchy = getHierarchy();
hierarchy.setActualImageScaleType(mScaleType);
if (mLoadingImageDrawable != null) {
hierarchy.setPlaceholderImage(mLoadingImageDrawable, ScalingUtils.ScaleType.CENTER);
}
boolean usePostprocessorScaling =
mScaleType != ScalingUtils.ScaleType.CENTER_CROP &&
mScaleType != ScalingUtils.ScaleType.FOCUS_CROP;
float hierarchyRadius = usePostprocessorScaling ? 0 : mBorderRadius;
RoundingParams roundingParams = hierarchy.getRoundingParams();
roundingParams.setCornersRadius(hierarchyRadius);
roundingParams.setBorder(mBorderColor, mBorderWidth);
if (mOverlayColor != Color.TRANSPARENT) {
roundingParams.setOverlayColor(mOverlayColor);
} else {
// make sure the default rounding method is used.
roundingParams.setRoundingMethod(RoundingParams.RoundingMethod.BITMAP_ONLY);
}
hierarchy.setRoundingParams(roundingParams);
hierarchy.setFadeDuration(
mFadeDurationMs >= 0
? mFadeDurationMs
: mIsLocalImage ? 0 : REMOTE_IMAGE_FADE_DURATION_MS);
Postprocessor postprocessor = usePostprocessorScaling ? mRoundedCornerPostprocessor : null;
ResizeOptions resizeOptions = doResize ? new ResizeOptions(getWidth(), getHeight()) : null;
ImageRequest imageRequest = ImageRequestBuilder.newBuilderWithSource(mUri)
.setPostprocessor(postprocessor)
.setResizeOptions(resizeOptions)
.setAutoRotateEnabled(true)
.setProgressiveRenderingEnabled(mProgressiveRenderingEnabled)
.build();
// This builder is reused
mDraweeControllerBuilder.reset();
mDraweeControllerBuilder
.setAutoPlayAnimations(true)
.setCallerContext(mCallerContext)
.setOldController(getController())
.setImageRequest(imageRequest);
if (mControllerListener != null && mControllerForTesting != null) {
ForwardingControllerListener combinedListener = new ForwardingControllerListener();
combinedListener.addListener(mControllerListener);
combinedListener.addListener(mControllerForTesting);
mDraweeControllerBuilder.setControllerListener(combinedListener);
} else if (mControllerForTesting != null) {
mDraweeControllerBuilder.setControllerListener(mControllerForTesting);
} else if (mControllerListener != null) {
mDraweeControllerBuilder.setControllerListener(mControllerListener);
}
setController(mDraweeControllerBuilder.build());
mIsDirty = false;
}
// VisibleForTesting
public void setControllerListener(ControllerListener controllerListener) {
mControllerForTesting = controllerListener;
mIsDirty = true;
maybeUpdateView();
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
if (w > 0 && h > 0) {
maybeUpdateView();
}
}
/**
* ReactImageViews only render a single image.
*/
@Override
public boolean hasOverlappingRendering() {
return false;
}
private static boolean shouldResize(@Nullable Uri uri) {
// Resizing is inferior to scaling. See http://frescolib.org/docs/resizing-rotating.html#_
// We resize here only for images likely to be from the device's camera, where the app developer
// has no control over the original size
return uri != null && (UriUtil.isLocalContentUri(uri) || UriUtil.isLocalFileUri(uri));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.auth;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.security.auth.login.LoginException;
import javax.security.sasl.AuthenticationException;
import javax.security.sasl.Sasl;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.metastore.security.DBTokenStore;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.rpc.thrift.TCLIService;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TSaslServerTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class helps in some aspects of authentication. It creates the proper Thrift classes for the
* given configuration as well as helps with authenticating requests.
*/
public class HiveAuthFactory {
private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
private HadoopThriftAuthBridge.Server saslServer;
private String authTypeStr;
private final String transportMode;
private final HiveConf conf;
private String hadoopAuth;
private MetastoreDelegationTokenManager delegationTokenManager = null;
public HiveAuthFactory(HiveConf conf) throws TTransportException {
this.conf = conf;
transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
// ShimLoader.getHadoopShims().isSecurityEnabled() will only check that
// hadoopAuth is not simple, it does not guarantee it is kerberos
hadoopAuth = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
// In http mode we use NOSASL as the default auth type
if (authTypeStr == null) {
if ("http".equalsIgnoreCase(transportMode)) {
authTypeStr = HiveAuthConstants.AuthTypes.NOSASL.getAuthName();
} else {
authTypeStr = HiveAuthConstants.AuthTypes.NONE.getAuthName();
}
}
if (isSASLWithKerberizedHadoop()) {
saslServer =
HadoopThriftAuthBridge.getBridge().createServer(
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL),
conf.getVar(ConfVars.HIVE_SERVER2_CLIENT_KERBEROS_PRINCIPAL));
// Start delegation token manager
delegationTokenManager = new MetastoreDelegationTokenManager();
try {
Object baseHandler = null;
String tokenStoreClass = MetaStoreServerUtils.getTokenStoreClassName(conf);
if (tokenStoreClass.equals(DBTokenStore.class.getName())) {
// IMetaStoreClient is needed to access token store if DBTokenStore is to be used. It
// will be got via Hive.get(conf).getMSC in a thread where the DelegationTokenStore
// is called. To avoid the cyclic reference, we pass the Hive class to DBTokenStore where
// it is used to get a threadLocal Hive object with a synchronized MetaStoreClient using
// Java reflection.
// Note: there will be two HS2 life-long opened MSCs, one is stored in HS2 thread local
// Hive object, the other is in a daemon thread spawned in DelegationTokenSecretManager
// to remove expired tokens.
baseHandler = Hive.class;
}
delegationTokenManager.startDelegationTokenSecretManager(conf, baseHandler,
HadoopThriftAuthBridge.Server.ServerMode.HIVESERVER2);
saslServer.setSecretManager(delegationTokenManager.getSecretManager());
}
catch (IOException e) {
throw new TTransportException("Failed to start token manager", e);
}
}
}
public Map<String, String> getSaslProperties() {
Map<String, String> saslProps = new HashMap<String, String>();
SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
saslProps.put(Sasl.QOP, saslQOP.toString());
saslProps.put(Sasl.SERVER_AUTH, "true");
return saslProps;
}
public TTransportFactory getAuthTransFactory() throws LoginException {
TTransportFactory transportFactory;
TSaslServerTransport.Factory serverTransportFactory;
if (isSASLWithKerberizedHadoop()) {
try {
serverTransportFactory = saslServer.createSaslServerTransportFactory(
getSaslProperties());
} catch (TTransportException e) {
throw new LoginException(e.getMessage());
}
if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.KERBEROS.getAuthName())) {
// no-op
} else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NONE.getAuthName()) ||
authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.LDAP.getAuthName()) ||
authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.PAM.getAuthName()) ||
authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.CUSTOM.getAuthName())) {
try {
serverTransportFactory.addServerDefinition("PLAIN",
authTypeStr, null, new HashMap<String, String>(),
new PlainSaslHelper.PlainServerCallbackHandler(authTypeStr));
} catch (AuthenticationException e) {
throw new LoginException ("Error setting callback handler" + e);
}
} else {
throw new LoginException("Unsupported authentication type " + authTypeStr);
}
transportFactory = saslServer.wrapTransportFactory(serverTransportFactory);
} else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NONE.getAuthName()) ||
authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.LDAP.getAuthName()) ||
authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.PAM.getAuthName()) ||
authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.CUSTOM.getAuthName())) {
transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
} else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.getAuthName())) {
transportFactory = new TTransportFactory();
} else {
throw new LoginException("Unsupported authentication type " + authTypeStr);
}
String trustedDomain = HiveConf.getVar(conf, ConfVars.HIVE_SERVER2_TRUSTED_DOMAIN).trim();
if (!trustedDomain.isEmpty()) {
transportFactory = PlainSaslHelper.getDualPlainTransportFactory(transportFactory, trustedDomain);
}
return transportFactory;
}
/**
* Returns the thrift processor factory for HiveServer2 running in binary mode
* @param service
* @return
* @throws LoginException
*/
public TProcessorFactory getAuthProcFactory(TCLIService.Iface service) throws LoginException {
if (isSASLWithKerberizedHadoop()) {
return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
} else {
return PlainSaslHelper.getPlainProcessorFactory(service);
}
}
public String getRemoteUser() {
return saslServer == null ? null : saslServer.getRemoteUser();
}
public String getIpAddress() {
if (saslServer == null || saslServer.getRemoteAddress() == null) {
return null;
} else {
return saslServer.getRemoteAddress().getHostAddress();
}
}
public String getUserAuthMechanism() {
return saslServer == null ? null : saslServer.getUserAuthMechanism();
}
public boolean isSASLWithKerberizedHadoop() {
return "kerberos".equalsIgnoreCase(hadoopAuth)
&& !authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.getAuthName());
}
public boolean isSASLKerberosUser() {
return AuthMethod.KERBEROS.getMechanismName().equals(getUserAuthMechanism())
|| AuthMethod.TOKEN.getMechanismName().equals(getUserAuthMechanism());
}
// Perform kerberos login using the hadoop shim API if the configuration is available
public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
if (principal.isEmpty() || keyTabFile.isEmpty()) {
throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
} else {
UserGroupInformation.loginUserFromKeytab(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
}
}
// Perform SPNEGO login using the hadoop shim API if the configuration is available
public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
throws IOException {
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
if (principal.isEmpty() || keyTabFile.isEmpty()) {
throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
} else {
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
}
}
// retrieve delegation token for the given user
public String getDelegationToken(String owner, String renewer, String remoteAddr)
throws HiveSQLException {
if (delegationTokenManager == null) {
throw new HiveSQLException(
"Delegation token only supported over kerberos authentication", "08S01");
}
try {
String tokenStr = delegationTokenManager.getDelegationTokenWithService(owner, renewer,
HiveAuthConstants.HS2_CLIENT_TOKEN, remoteAddr);
if (tokenStr == null || tokenStr.isEmpty()) {
throw new HiveSQLException(
"Received empty retrieving delegation token for user " + owner, "08S01");
}
return tokenStr;
} catch (IOException e) {
throw new HiveSQLException(
"Error retrieving delegation token for user " + owner, "08S01", e);
} catch (InterruptedException e) {
throw new HiveSQLException("delegation token retrieval interrupted", "08S01", e);
}
}
// cancel given delegation token
public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
if (delegationTokenManager == null) {
throw new HiveSQLException(
"Delegation token only supported over kerberos authentication", "08S01");
}
try {
delegationTokenManager.cancelDelegationToken(delegationToken);
} catch (IOException e) {
throw new HiveSQLException(
"Error canceling delegation token " + delegationToken, "08S01", e);
}
}
public void renewDelegationToken(String delegationToken) throws HiveSQLException {
if (delegationTokenManager == null) {
throw new HiveSQLException(
"Delegation token only supported over kerberos authentication", "08S01");
}
try {
delegationTokenManager.renewDelegationToken(delegationToken);
} catch (IOException e) {
throw new HiveSQLException(
"Error renewing delegation token " + delegationToken, "08S01", e);
}
}
public String verifyDelegationToken(String delegationToken) throws HiveSQLException {
if (delegationTokenManager == null) {
throw new HiveSQLException(
"Delegation token only supported over kerberos authentication", "08S01");
}
try {
return delegationTokenManager.verifyDelegationToken(delegationToken);
} catch (IOException e) {
String msg = "Error verifying delegation token " + delegationToken;
LOG.error(msg, e);
throw new HiveSQLException(msg, "08S01", e);
}
}
public String getUserFromToken(String delegationToken) throws HiveSQLException {
if (delegationTokenManager == null) {
throw new HiveSQLException(
"Delegation token only supported over kerberos authentication", "08S01");
}
try {
return delegationTokenManager.getUserFromToken(delegationToken);
} catch (IOException e) {
throw new HiveSQLException(
"Error extracting user from delegation token " + delegationToken, "08S01", e);
}
}
public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
HiveConf hiveConf) throws HiveSQLException {
try {
UserGroupInformation sessionUgi;
if (UserGroupInformation.isSecurityEnabled()) {
KerberosNameShim kerbName = ShimLoader.getHadoopShims().getKerberosNameShim(realUser);
sessionUgi = UserGroupInformation.createProxyUser(
kerbName.getServiceName(), UserGroupInformation.getLoginUser());
} else {
sessionUgi = UserGroupInformation.createRemoteUser(realUser);
}
if (!proxyUser.equalsIgnoreCase(realUser)) {
ProxyUsers.refreshSuperUserGroupsConfiguration(hiveConf);
ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi),
ipAddress, hiveConf);
}
} catch (IOException e) {
throw new HiveSQLException(
"Failed to validate proxy privilege of " + realUser + " for " + proxyUser, "08S01", e);
}
}
}
| |
//
// FPlayAndroid is distributed under the FreeBSD License
//
// Copyright (c) 2013-2014, Carlos Rafael Gimenes das Neves
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// The views and conclusions contained in the software and documentation are those
// of the authors and should not be interpreted as representing official policies,
// either expressed or implied, of the FreeBSD Project.
//
// https://github.com/carlosrafaelgn/FPlayAndroid
//
package br.com.carlosrafaelgn.fplay.plugin;
import android.app.Activity;
import android.content.Context;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import br.com.carlosrafaelgn.fplay.R;
import br.com.carlosrafaelgn.fplay.activity.MainHandler;
import br.com.carlosrafaelgn.fplay.list.FileSt;
import br.com.carlosrafaelgn.fplay.list.Song;
import br.com.carlosrafaelgn.fplay.playback.Player;
import br.com.carlosrafaelgn.fplay.ui.UI;
import br.com.carlosrafaelgn.fplay.visualizer.SimpleVisualizerJni;
public final class PluginManager implements FPlay {
private static final PluginManager pluginManager = new PluginManager();
public static FPlay getFPlay() {
return pluginManager;
}
private PluginManager() {
}
@Override
public int getApiVersion() {
return FPlayPlugin.API_VERSION;
}
@Override
public int getFPlayVersionCode() {
return UI.VERSION_CODE;
}
@Override
public String getFPlayVersionName() {
return UI.VERSION_NAME;
}
@Override
public boolean isAlive() {
return (Player.state == Player.STATE_ALIVE);
}
@Override
public Object getApplicationContext() {
return Player.theApplication;
}
@Override
public boolean isOnMainThread() {
return MainHandler.isOnMainThread();
}
@Override
public void postToMainThread(Runnable runnable) {
MainHandler.postToMainThread(runnable);
}
@Override
public void postToMainThreadAtTime(Runnable runnable, long uptimeMillis) {
MainHandler.postToMainThreadAtTime(runnable, uptimeMillis);
}
@Override
public void sendMessage(Object callback, int what) {
MainHandler.sendMessage((MainHandler.Callback)callback, what);
}
@Override
public void sendMessage(Object callback, int what, int arg1, int arg2) {
MainHandler.sendMessage((MainHandler.Callback)callback, what, arg1, arg2);
}
@Override
public void sendMessageAtTime(Object callback, int what, int arg1, int arg2, long uptimeMillis) {
MainHandler.sendMessageAtTime((MainHandler.Callback)callback, what, arg1, arg2, uptimeMillis);
}
@Override
public void removeMessages(Object callback, int what) {
MainHandler.removeMessages((MainHandler.Callback)callback, what);
}
@Override
public boolean deviceHasTelephonyRadio() {
return Player.deviceHasTelephonyRadio();
}
@Override
public boolean isConnectedToTheInternet() {
return Player.isConnectedToTheInternet();
}
@Override
public boolean isInternetConnectedViaWiFi() {
return Player.isInternetConnectedViaWiFi();
}
@Override
public int getWiFiIpAddress() {
return Player.getWiFiIpAddress();
}
@Override
public String getWiFiIpAddressStr() {
return Player.getWiFiIpAddressStr();
}
@Override
public String emoji(CharSequence text) {
return UI.emoji(text);
}
@Override
public void toast(String message) {
MainHandler.toast(message);
}
@Override
public <E> ItemSelectorDialog<E> showItemSelectorDialog(final Object activity, final CharSequence title, final CharSequence loadingMessage, final CharSequence connectingMessage, final boolean progressBarVisible, final Class<E> clazz, final E[] initialElements, final ItemSelectorDialog.Observer<E> observer) {
//I know this is *SUPER UGLY*! But it saves a few classes :)
final class Local extends br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog.Item implements ItemSelectorDialog<E>, br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog.Observer<Local> {
private final br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog<Local> dialog;
private final E item;
Local(E item) {
super(new FileSt("", (item == null) ? "" : item.toString()));
this.dialog = null;
this.item = item;
}
Local() {
super(null);
final Local[] localInitialElements = ((initialElements == null) ? null : new Local[initialElements.length]);
if (initialElements != null) {
for (int i = initialElements.length - 1; i >= 0; i--)
localInitialElements[i] = new Local(initialElements[i]);
}
dialog = br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog.showDialog((Activity)activity, title, loadingMessage, connectingMessage, progressBarVisible, Local.class, localInitialElements, this);
item = null;
}
@Override
public void onItemSelectorDialogClosed(br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog<Local> itemSelectorDialog) {
if (observer != null)
observer.onItemSelectorDialogClosed(this);
}
@Override
public void onItemSelectorDialogRefreshList(br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog<Local> itemSelectorDialog) {
if (observer != null)
observer.onItemSelectorDialogRefreshList(this);
}
@Override
public void onItemSelectorDialogItemClicked(br.com.carlosrafaelgn.fplay.ui.ItemSelectorDialog<Local> itemSelectorDialog, int position, Local item) {
if (observer != null)
observer.onItemSelectorDialogItemClicked(this, position, item.item);
}
@Override
public void add(E item) {
if (dialog != null)
dialog.add(new Local(item));
}
@Override
public void clear() {
if (dialog != null)
dialog.clear();
}
@Override
public void remove(int position) {
if (dialog != null)
dialog.remove(position);
}
@Override
public void dismiss() {
if (dialog != null)
dialog.dismiss();
}
@Override
public void cancel() {
if (dialog != null)
dialog.cancel();
}
@Override
public boolean isCancelled() {
return (dialog == null || dialog.isCancelled());
}
@Override
public int getCount() {
return (dialog == null ? 0 : dialog.getCount());
}
@Override
public E getItem(int position) {
if (dialog == null)
return null;
final Local item = dialog.getItem(position);
return (item == null ? null : item.item);
}
@Override
public void showProgressBar(boolean show) {
if (dialog != null)
dialog.showProgressBar(show);
}
@Override
public void showConnecting(boolean connecting) {
if (dialog != null)
dialog.showConnecting(connecting);
}
}
return new Local();
}
@Override
public String getString(int str) {
return (str == STR_VISUALIZER_NOT_SUPPORTED ? UI.emoji(Player.theApplication.getText(R.string.visualizer_not_supported)) : "");
}
@Override
public void fixLocale(Object context) {
if (context != null)
UI.reapplyForcedLocaleOnPlugins((Context)context);
}
@Override
public String formatIntAsFloat(int number, boolean useTwoDecimalPlaces, boolean removeDecimalPlacesIfExact) {
return UI.formatIntAsFloat(number, useTwoDecimalPlaces, removeDecimalPlacesIfExact);
}
@Override
public void formatIntAsFloat(StringBuilder sb, int number, boolean useTwoDecimalPlaces, boolean removeDecimalPlacesIfExact) {
UI.formatIntAsFloat(sb, number, useTwoDecimalPlaces, removeDecimalPlacesIfExact);
}
@Override
public int dpToPxI(float dp) {
return UI.dpToPxI(dp);
}
@Override
public int spToPxI(float sp) {
return UI.spToPxI(sp);
}
@Override
public VisualizerService createVisualizerService(Visualizer visualizer, VisualizerService.Observer observer) {
return new br.com.carlosrafaelgn.fplay.playback.context.VisualizerService(visualizer, observer);
}
@Override
public void visualizerSetSpeed(int speed) {
SimpleVisualizerJni.commonSetSpeed(speed);
}
@Override
public void visualizerSetColorIndex(int colorIndex) {
SimpleVisualizerJni.commonSetColorIndex(colorIndex);
}
@Override
public void visualizerUpdateMultiplier(boolean isVoice, boolean hq) {
SimpleVisualizerJni.commonUpdateMultiplier(isVoice, hq);
}
@Override
public int visualizerProcess(byte[] waveform, int opt) {
return SimpleVisualizerJni.commonProcess(waveform, opt);
}
@Override
public void previous() {
Player.previous();
}
@Override
public void pause() {
Player.pause();
}
@Override
public void resume() {
Player.resume();
}
@Override
public void playPause() {
Player.playPause();
}
@Override
public void next() {
Player.next();
}
@Override
public void setVolumeInPercentage(int percentage) {
Player.setVolumeInPercentage(percentage);
}
@Override
public int getVolumeInPercentage() {
return Player.getVolumeInPercentage();
}
@Override
public int increaseVolume() {
return Player.increaseVolume();
}
@Override
public int decreaseVolume() {
return Player.decreaseVolume();
}
@Override
public boolean isPlaying() {
return Player.localPlaying;
}
@Override
public boolean isPreparing() {
return Player.isPreparing();
}
@Override
public int getPlaybackPosition() {
return Player.getPosition();
}
@Override
public boolean isMediaButton(int keyCode) {
return Player.isMediaButton(keyCode);
}
@Override
public boolean handleMediaButton(int keyCode) {
return Player.handleMediaButton(keyCode);
}
@Override
public boolean currentSongInfo(SongInfo info) {
final Song song = Player.localSong;
if (song == null)
return false;
song.info(info);
return true;
}
@Override
public String formatTime(int timeMS) {
return Song.formatTime(timeMS);
}
@Override
public int getPlaylistVersion() {
return Player.songs.getModificationVersion();
}
@Override
public int getPlaylistCount() {
return Player.songs.getCount();
}
@Override
public void getPlaylistSongInfo(int index, SongInfo info) {
Player.songs.getItem(index).info(info);
}
@Override
public String encodeAddressPort(int address, int port) {
return Player.encodeAddressPort(address, port);
}
@Override
public byte[] decodeAddressPort(String encodedAddressPort) {
return Player.decodeAddressPort(encodedAddressPort);
}
@Override
public void adjustJsonString(StringBuilder builder, String str) {
if (str != null) {
final int len = str.length();
for (int i = 0; i < len; i++) {
final char c = str.charAt(i);
switch (c) {
case '\\':
builder.append("\\\\");
break;
case '\"':
builder.append("\\\"");
break;
case '\r':
builder.append("\\r");
break;
case '\n':
builder.append("\\n");
break;
case '\t':
builder.append("\\t");
break;
case '\0':
builder.append(' ');
break;
default:
builder.append(c);
break;
}
}
}
}
@Override
public String toJson(Object src) {
return (new Gson()).toJson(src);
}
@Override
public <T> T fromJson(String json, Class<T> clazz) throws JsonSyntaxException {
return (new Gson()).fromJson(json, clazz);
}
}
| |
/*
* Copyright (C) 2016 Robinhood Markets, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.robinhood.ticker;
import android.graphics.Canvas;
import android.graphics.Paint;
/**
* Represents a column of characters to be drawn on the screen. This class primarily handles
* animating within the column from one character to the next and drawing all of the intermediate
* states.
*
* @author Jin Cao, Robinhood
*/
class TickerColumn {
private TickerCharacterList[] characterLists;
private final TickerDrawMetrics metrics;
private char currentChar = TickerUtils.EMPTY_CHAR;
private char targetChar = TickerUtils.EMPTY_CHAR;
// The indices characters simply signify what positions are for the current and target
// characters in the assigned characterList. This tells us how to animate from the current
// to the target characters.
private char[] currentCharacterList;
private int startIndex;
private int endIndex;
// Drawing state variables that get updated whenever animation progress gets updated.
private int bottomCharIndex;
private float bottomDelta;
private float charHeight;
// Drawing state variables for handling size transition
private float sourceWidth, currentWidth, targetWidth, minimumRequiredWidth;
// The bottom delta variables signifies the vertical offset that the bottom drawn character
// is seeing. If the delta is 0, it means that the character is perfectly centered. If the
// delta is negative, it means that the bottom character is poking out from the bottom and
// part of the top character is visible. The delta should never be positive because it means
// that the bottom character is not actually the bottom character.
private float currentBottomDelta;
private float previousBottomDelta;
private int directionAdjustment;
TickerColumn(TickerCharacterList[] characterLists, TickerDrawMetrics metrics) {
this.characterLists = characterLists;
this.metrics = metrics;
}
/**
* Updates the characterLists used in the column.
*/
void setCharacterLists(TickerCharacterList[] characterLists) {
this.characterLists = characterLists;
}
/**
* Tells the column that the next character it should show is {@param targetChar}. This can
* change can either be animated or instant depending on the animation progress set by
* {@link #setAnimationProgress(float)}.
*/
void setTargetChar(char targetChar) {
// Set the current and target characters for the animation
this.targetChar = targetChar;
this.sourceWidth = this.currentWidth;
this.targetWidth = metrics.getCharWidth(targetChar);
this.minimumRequiredWidth = Math.max(this.sourceWidth, this.targetWidth);
// Calculate the current indices
setCharacterIndices();
final boolean scrollDown = endIndex >= startIndex;
directionAdjustment = scrollDown ? 1 : -1;
// Save the currentBottomDelta as previousBottomDelta in case this call to setTargetChar
// interrupted a previously running animation. The deltas will then be used to compute
// offset so that the interruption feels smooth on the UI.
previousBottomDelta = currentBottomDelta;
currentBottomDelta = 0f;
}
char getCurrentChar() {
return currentChar;
}
char getTargetChar() {
return targetChar;
}
float getCurrentWidth() {
checkForDrawMetricsChanges();
return currentWidth;
}
float getMinimumRequiredWidth() {
checkForDrawMetricsChanges();
return minimumRequiredWidth;
}
/**
* A helper method for populating {@link #startIndex} and {@link #endIndex} given the
* current and target characters for the animation.
*/
private void setCharacterIndices() {
currentCharacterList = null;
for (int i = 0; i < characterLists.length; i++) {
final TickerCharacterList.CharacterIndices indices =
characterLists[i].getCharacterIndices(currentChar, targetChar, metrics.getPreferredScrollingDirection());
if (indices != null) {
this.currentCharacterList = this.characterLists[i].getCharacterList();
this.startIndex = indices.startIndex;
this.endIndex = indices.endIndex;
}
}
// If we didn't find a list that contains both characters, just perform a default animation
// going straight from source to target
if (currentCharacterList == null) {
if (currentChar == targetChar) {
currentCharacterList = new char[] {currentChar};
startIndex = endIndex = 0;
} else {
currentCharacterList = new char[] {currentChar, targetChar};
startIndex = 0;
endIndex = 1;
}
}
}
void onAnimationEnd() {
checkForDrawMetricsChanges();
minimumRequiredWidth = currentWidth;
}
private void checkForDrawMetricsChanges() {
final float currentTargetWidth = metrics.getCharWidth(targetChar);
// Only resize due to DrawMetrics changes when we are done with whatever animation we
// are running.
if (currentWidth == targetWidth && targetWidth != currentTargetWidth) {
this.minimumRequiredWidth = this.currentWidth = this.targetWidth = currentTargetWidth;
}
}
void setAnimationProgress(float animationProgress) {
if (animationProgress == 1f) {
// Animation finished (or never started), set to stable state.
this.currentChar = this.targetChar;
currentBottomDelta = 0f;
previousBottomDelta = 0f;
}
final float charHeight = metrics.getCharHeight();
// First let's find the total height of this column between the start and end chars.
final float totalHeight = charHeight * Math.abs(endIndex - startIndex);
// The current base is then the part of the total height that we have progressed to
// from the animation. For example, there might be 5 characters, each character is
// 2px tall, so the totalHeight is 10. If we are at 50% progress, then our baseline
// in this column is at 5 out of 10 (which is the 3rd character with a -50% offset
// to the baseline).
final float currentBase = animationProgress * totalHeight;
// Given the current base, we now can find which character should drawn on the bottom.
// Note that this position is a float. For example, if the bottomCharPosition is
// 4.5, it means that the bottom character is the 4th character, and it has a -50%
// offset relative to the baseline.
final float bottomCharPosition = currentBase / charHeight;
// By subtracting away the integer part of bottomCharPosition, we now have the
// percentage representation of the bottom char's offset.
final float bottomCharOffsetPercentage = bottomCharPosition - (int) bottomCharPosition;
// We might have interrupted a previous animation if previousBottomDelta is not 0f.
// If that's the case, we need to take this delta into account so that the previous
// character offset won't be wiped away when we start a new animation.
// We multiply by the inverse percentage so that the offset contribution from the delta
// progresses along with the rest of the animation (from full delta to 0).
final float additionalDelta = previousBottomDelta * (1f - animationProgress);
// Now, using the bottom char's offset percentage and the delta we have from the
// previous animation, we can now compute what's the actual offset of the bottom
// character in the column relative to the baseline.
bottomDelta = bottomCharOffsetPercentage * charHeight * directionAdjustment
+ additionalDelta;
// Figure out what the actual character index is in the characterList, and then
// draw the character with the computed offset.
bottomCharIndex = startIndex + ((int) bottomCharPosition * directionAdjustment);
this.charHeight = charHeight;
this.currentWidth = sourceWidth + (targetWidth - sourceWidth) * animationProgress;
}
/**
* Draw the current state of the column as it's animating from one character in the list
* to another. This method will take into account various factors such as animation
* progress and the previously interrupted animation state to render the characters
* in the correct position on the canvas.
*/
void draw(Canvas canvas, Paint textPaint) {
if (drawText(canvas, textPaint, currentCharacterList, bottomCharIndex, bottomDelta)) {
// Save the current drawing state in case our animation gets interrupted
if (bottomCharIndex >= 0) {
currentChar = currentCharacterList[bottomCharIndex];
}
currentBottomDelta = bottomDelta;
}
// Draw the corresponding top and bottom characters if applicable
drawText(canvas, textPaint, currentCharacterList, bottomCharIndex + 1,
bottomDelta - charHeight);
// Drawing the bottom character here might seem counter-intuitive because we've been
// computing for the bottom character this entire time. But the bottom character
// computed above might actually be above the baseline if we interrupted a previous
// animation that gave us a positive additionalDelta.
drawText(canvas, textPaint, currentCharacterList, bottomCharIndex - 1,
bottomDelta + charHeight);
}
/**
* @return whether the text was successfully drawn on the canvas
*/
private boolean drawText(Canvas canvas, Paint textPaint, char[] characterList,
int index, float verticalOffset) {
if (index >= 0 && index < characterList.length) {
canvas.drawText(characterList, index, 1, 0f, verticalOffset, textPaint);
return true;
}
return false;
}
}
| |
/*
* Copyright (c) 2001, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sbql4j8.com.sun.tools.javac.tree;
import sbql4j8.com.sun.tools.javac.util.*;
import sbql4j8.com.sun.tools.javac.tree.JCTree.*;
/** A subclass of Tree.Visitor, this class defines
* a general tree scanner pattern. Translation proceeds recursively in
* left-to-right order down a tree. There is one visitor method in this class
* for every possible kind of tree node. To obtain a specific
* scanner, it suffices to override those visitor methods which
* do some interesting work. The scanner class itself takes care of all
* navigational aspects.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*/
public class TreeScanner extends Visitor {
/** Visitor method: Scan a single node.
*/
public void scan(JCTree tree) {
if(tree!=null) tree.accept(this);
}
/** Visitor method: scan a list of nodes.
*/
public void scan(List<? extends JCTree> trees) {
if (trees != null)
for (List<? extends JCTree> l = trees; l.nonEmpty(); l = l.tail)
scan(l.head);
}
/* ***************************************************************************
* Visitor methods
****************************************************************************/
public void visitTopLevel(JCCompilationUnit tree) {
scan(tree.packageAnnotations);
scan(tree.pid);
scan(tree.defs);
}
public void visitImport(JCImport tree) {
scan(tree.qualid);
}
public void visitClassDef(JCClassDecl tree) {
scan(tree.mods);
scan(tree.typarams);
scan(tree.extending);
scan(tree.implementing);
scan(tree.defs);
}
public void visitMethodDef(JCMethodDecl tree) {
scan(tree.mods);
scan(tree.restype);
scan(tree.typarams);
scan(tree.recvparam);
scan(tree.params);
scan(tree.thrown);
scan(tree.defaultValue);
scan(tree.body);
}
public void visitVarDef(JCVariableDecl tree) {
scan(tree.mods);
scan(tree.vartype);
scan(tree.nameexpr);
scan(tree.init);
}
public void visitSkip(JCSkip tree) {
}
public void visitBlock(JCBlock tree) {
scan(tree.stats);
}
public void visitDoLoop(JCDoWhileLoop tree) {
scan(tree.body);
scan(tree.cond);
}
public void visitWhileLoop(JCWhileLoop tree) {
scan(tree.cond);
scan(tree.body);
}
public void visitForLoop(JCForLoop tree) {
scan(tree.init);
scan(tree.cond);
scan(tree.step);
scan(tree.body);
}
public void visitForeachLoop(JCEnhancedForLoop tree) {
scan(tree.var);
scan(tree.expr);
scan(tree.body);
}
public void visitLabelled(JCLabeledStatement tree) {
scan(tree.body);
}
public void visitSwitch(JCSwitch tree) {
scan(tree.selector);
scan(tree.cases);
}
public void visitCase(JCCase tree) {
scan(tree.pat);
scan(tree.stats);
}
public void visitSynchronized(JCSynchronized tree) {
scan(tree.lock);
scan(tree.body);
}
public void visitTry(JCTry tree) {
scan(tree.resources);
scan(tree.body);
scan(tree.catchers);
scan(tree.finalizer);
}
public void visitCatch(JCCatch tree) {
scan(tree.param);
scan(tree.body);
}
public void visitConditional(JCConditional tree) {
scan(tree.cond);
scan(tree.truepart);
scan(tree.falsepart);
}
public void visitIf(JCIf tree) {
scan(tree.cond);
scan(tree.thenpart);
scan(tree.elsepart);
}
public void visitExec(JCExpressionStatement tree) {
scan(tree.expr);
}
public void visitBreak(JCBreak tree) {
}
public void visitContinue(JCContinue tree) {
}
public void visitReturn(JCReturn tree) {
scan(tree.expr);
}
public void visitThrow(JCThrow tree) {
scan(tree.expr);
}
public void visitAssert(JCAssert tree) {
scan(tree.cond);
scan(tree.detail);
}
public void visitApply(JCMethodInvocation tree) {
scan(tree.typeargs);
scan(tree.meth);
scan(tree.args);
}
public void visitNewClass(JCNewClass tree) {
scan(tree.encl);
scan(tree.typeargs);
scan(tree.clazz);
scan(tree.args);
scan(tree.def);
}
public void visitNewArray(JCNewArray tree) {
scan(tree.annotations);
scan(tree.elemtype);
scan(tree.dims);
for (List<JCAnnotation> annos : tree.dimAnnotations)
scan(annos);
scan(tree.elems);
}
public void visitLambda(JCLambda tree) {
scan(tree.body);
scan(tree.params);
}
public void visitParens(JCParens tree) {
scan(tree.expr);
}
public void visitAssign(JCAssign tree) {
scan(tree.lhs);
scan(tree.rhs);
}
public void visitAssignop(JCAssignOp tree) {
scan(tree.lhs);
scan(tree.rhs);
}
public void visitUnary(JCUnary tree) {
scan(tree.arg);
}
public void visitBinary(JCBinary tree) {
scan(tree.lhs);
scan(tree.rhs);
}
public void visitTypeCast(JCTypeCast tree) {
scan(tree.clazz);
scan(tree.expr);
}
public void visitTypeTest(JCInstanceOf tree) {
scan(tree.expr);
scan(tree.clazz);
}
public void visitIndexed(JCArrayAccess tree) {
scan(tree.indexed);
scan(tree.index);
}
public void visitSelect(JCFieldAccess tree) {
scan(tree.selected);
}
public void visitReference(JCMemberReference tree) {
scan(tree.expr);
scan(tree.typeargs);
}
public void visitIdent(JCIdent tree) {
}
public void visitLiteral(JCLiteral tree) {
}
public void visitTypeIdent(JCPrimitiveTypeTree tree) {
}
public void visitTypeArray(JCArrayTypeTree tree) {
scan(tree.elemtype);
}
public void visitTypeApply(JCTypeApply tree) {
scan(tree.clazz);
scan(tree.arguments);
}
public void visitTypeUnion(JCTypeUnion tree) {
scan(tree.alternatives);
}
public void visitTypeIntersection(JCTypeIntersection tree) {
scan(tree.bounds);
}
public void visitTypeParameter(JCTypeParameter tree) {
scan(tree.annotations);
scan(tree.bounds);
}
@Override
public void visitWildcard(JCWildcard tree) {
scan(tree.kind);
if (tree.inner != null)
scan(tree.inner);
}
@Override
public void visitTypeBoundKind(TypeBoundKind that) {
}
public void visitModifiers(JCModifiers tree) {
scan(tree.annotations);
}
public void visitAnnotation(JCAnnotation tree) {
scan(tree.annotationType);
scan(tree.args);
}
public void visitAnnotatedType(JCAnnotatedType tree) {
scan(tree.annotations);
scan(tree.underlyingType);
}
public void visitErroneous(JCErroneous tree) {
}
public void visitLetExpr(LetExpr tree) {
scan(tree.defs);
scan(tree.expr);
}
public void visitTree(JCTree tree) {
Assert.error();
}
}
| |
package org.apache.solr.common.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.noggit.JSONUtil;
import org.noggit.JSONWriter;
import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
/**
* Models a Collection in zookeeper (but that Java name is obviously taken, hence "DocCollection")
*/
public class DocCollection extends ZkNodeProps {
public static final String DOC_ROUTER = "router";
public static final String SHARDS = "shards";
public static final String STATE_FORMAT = "stateFormat";
public static final String RULE = "rule";
public static final String SNITCH = "snitch";
private int znodeVersion = -1; // sentinel
private final String name;
private final Map<String, Slice> slices;
private final Map<String, Slice> activeSlices;
private final DocRouter router;
private final String znode;
private final Integer replicationFactor;
private final Integer maxShardsPerNode;
private final Boolean autoAddReplicas;
public DocCollection(String name, Map<String, Slice> slices, Map<String, Object> props, DocRouter router) {
this(name, slices, props, router, -1, ZkStateReader.CLUSTER_STATE);
}
/**
* @param name The name of the collection
* @param slices The logical shards of the collection. This is used directly and a copy is not made.
* @param props The properties of the slice. This is used directly and a copy is not made.
*/
public DocCollection(String name, Map<String, Slice> slices, Map<String, Object> props, DocRouter router, int zkVersion, String znode) {
super(props==null ? props = new HashMap<String,Object>() : props);
this.znodeVersion = zkVersion;
this.name = name;
this.slices = slices;
this.activeSlices = new HashMap<>();
this.replicationFactor = (Integer) verifyProp(props, REPLICATION_FACTOR);
this.maxShardsPerNode = (Integer) verifyProp(props, MAX_SHARDS_PER_NODE);
Boolean autoAddReplicas = (Boolean) verifyProp(props, AUTO_ADD_REPLICAS);
this.autoAddReplicas = autoAddReplicas == null ? false : autoAddReplicas;
verifyProp(props, RULE);
verifyProp(props, SNITCH);
Iterator<Map.Entry<String, Slice>> iter = slices.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Slice> slice = iter.next();
if (slice.getValue().getState() == Slice.State.ACTIVE)
this.activeSlices.put(slice.getKey(), slice.getValue());
}
this.router = router;
this.znode = znode == null? ZkStateReader.CLUSTER_STATE : znode;
assert name != null && slices != null;
}
public static Object verifyProp(Map<String, Object> props, String propName) {
Object o = props.get(propName);
if (o == null) return null;
switch (propName) {
case MAX_SHARDS_PER_NODE:
case REPLICATION_FACTOR:
return Integer.parseInt(o.toString());
case AUTO_ADD_REPLICAS:
return Boolean.parseBoolean(o.toString());
case "snitch":
case "rule":
return (List) o;
default:
throw new SolrException(ErrorCode.SERVER_ERROR, "Unknown property " + propName);
}
}
/**Use this to make an exact copy of DocCollection with a new set of Slices and every other property as is
* @param slices the new set of Slices
* @return the resulting DocCollection
*/
public DocCollection copyWithSlices(Map<String, Slice> slices){
return new DocCollection(getName(), slices, propMap, router, znodeVersion,znode);
}
/**
* Return collection name.
*/
public String getName() {
return name;
}
public Slice getSlice(String sliceName) {
return slices.get(sliceName);
}
/**
* Gets the list of all slices for this collection.
*/
public Collection<Slice> getSlices() {
return slices.values();
}
/**
* Return the list of active slices for this collection.
*/
public Collection<Slice> getActiveSlices() {
return activeSlices.values();
}
/**
* Get the map of all slices (sliceName->Slice) for this collection.
*/
public Map<String, Slice> getSlicesMap() {
return slices;
}
/**
* Get the map of active slices (sliceName->Slice) for this collection.
*/
public Map<String, Slice> getActiveSlicesMap() {
return activeSlices;
}
public int getZNodeVersion(){
return znodeVersion;
}
public int getStateFormat() {
return ZkStateReader.CLUSTER_STATE.equals(znode) ? 1 : 2;
}
/**
* @return replication factor for this collection or null if no
* replication factor exists.
*/
public Integer getReplicationFactor() {
return replicationFactor;
}
public boolean getAutoAddReplicas() {
return autoAddReplicas;
}
public int getMaxShardsPerNode() {
if (maxShardsPerNode == null) {
throw new SolrException(ErrorCode.BAD_REQUEST, MAX_SHARDS_PER_NODE + " is not in the cluster state.");
}
return maxShardsPerNode;
}
public String getZNode(){
return znode;
}
public DocRouter getRouter() {
return router;
}
@Override
public String toString() {
return "DocCollection("+name+")=" + JSONUtil.toJSON(this);
}
@Override
public void write(JSONWriter jsonWriter) {
LinkedHashMap<String, Object> all = new LinkedHashMap<>(slices.size() + 1);
all.putAll(propMap);
all.put(SHARDS, slices);
jsonWriter.write(all);
}
public Replica getReplica(String coreNodeName) {
for (Slice slice : slices.values()) {
Replica replica = slice.getReplica(coreNodeName);
if (replica != null) return replica;
}
return null;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.deprecation;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.daemon.JavaErrorMessages;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightMessageUtil;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.codeInspection.ProblemsHolder;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiImplUtil;
import com.intellij.psi.impl.compiled.ClsMethodImpl;
import com.intellij.psi.infos.MethodCandidateInfo;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.javadoc.PsiDocTag;
import com.intellij.psi.util.*;
import com.intellij.refactoring.util.RefactoringChangeUtil;
import com.intellij.util.ObjectUtils;
import com.siyeh.ig.psiutils.ExpectedTypeUtils;
import com.siyeh.ig.psiutils.ExpressionUtils;
import one.util.streamex.MoreCollectors;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.Objects;
import java.util.stream.Stream;
public abstract class DeprecationInspectionBase extends LocalInspectionTool {
public boolean IGNORE_IN_SAME_OUTERMOST_CLASS = true;
@Override
public boolean isEnabledByDefault() {
return true;
}
public static void checkDeprecated(@NotNull PsiElement element,
@NotNull PsiElement elementToHighlight,
@Nullable TextRange rangeInElement,
boolean ignoreInsideDeprecated,
boolean ignoreImportStatements,
boolean ignoreMethodsOfDeprecated,
boolean ignoreInSameOutermostClass,
@NotNull ProblemsHolder holder,
boolean forRemoval,
@NotNull ProblemHighlightType highlightType) {
if (!(element instanceof PsiModifierListOwner)) {
return;
}
PsiModifierListOwner modifierListOwner = (PsiModifierListOwner)element;
if (PsiImplUtil.isDeprecated(modifierListOwner)) {
if (forRemoval != isForRemovalAttributeSet(modifierListOwner)) {
return;
}
}
else {
if (!ignoreMethodsOfDeprecated) {
PsiClass containingClass = element instanceof PsiMember ? ((PsiMember)element).getContainingClass() : null;
if (containingClass != null) {
checkDeprecated(containingClass, elementToHighlight, rangeInElement, ignoreInsideDeprecated, ignoreImportStatements,
false, ignoreInSameOutermostClass, holder, forRemoval, highlightType);
}
}
return;
}
if (ignoreInSameOutermostClass && areElementsInSameOutermostClass(element, elementToHighlight)) return;
if (ignoreInsideDeprecated && isElementInsideDeprecated(elementToHighlight)) return;
if (ignoreImportStatements && isElementInsideImportStatement(elementToHighlight)) return;
String description = JavaErrorMessages.message(forRemoval ? "marked.for.removal.symbol" : "deprecated.symbol",
getPresentableName(element));
LocalQuickFix replacementQuickFix = getReplacementQuickFix(element, elementToHighlight);
holder.registerProblem(elementToHighlight, getDescription(description, forRemoval, highlightType), highlightType, rangeInElement,
replacementQuickFix);
}
private static boolean isElementInsideImportStatement(@NotNull PsiElement elementToHighlight) {
return PsiTreeUtil.getParentOfType(elementToHighlight, PsiImportStatement.class) != null;
}
public static boolean isElementInsideDeprecated(@NotNull PsiElement element) {
PsiElement parent = element;
while ((parent = PsiTreeUtil.getParentOfType(parent, PsiModifierListOwner.class, true)) != null) {
if (PsiImplUtil.isDeprecated(parent)) {
return true;
}
}
return false;
}
@Nullable
private static LocalQuickFix getReplacementQuickFix(@NotNull PsiElement refElement, @NotNull PsiElement elementToHighlight) {
PsiMethodCallExpression methodCall = getMethodCall(elementToHighlight);
if (refElement instanceof PsiMethod && methodCall != null) {
PsiMethod replacement = findReplacementInJavaDoc((PsiMethod)refElement, methodCall);
if (replacement != null) {
return new ReplaceMethodCallFix((PsiMethodCallExpression)elementToHighlight.getParent().getParent(), replacement);
}
}
if (refElement instanceof PsiField) {
PsiReferenceExpression referenceExpression = ObjectUtils.tryCast(elementToHighlight.getParent(), PsiReferenceExpression.class);
if (referenceExpression != null) {
PsiField replacement = findReplacementInJavaDoc((PsiField)refElement, referenceExpression);
if (replacement != null) {
return new ReplaceFieldReferenceFix(referenceExpression, replacement);
}
}
}
return null;
}
public static String getPresentableName(@NotNull PsiElement psiElement) {
//Annotation attribute methods don't have parameters.
if (psiElement instanceof PsiMethod && PsiUtil.isAnnotationMethod(psiElement)) {
return ((PsiMethod)psiElement).getName();
}
return HighlightMessageUtil.getSymbolName(psiElement);
}
protected static boolean isForRemovalAttributeSet(@NotNull PsiModifierListOwner element) {
PsiAnnotation annotation = AnnotationUtil.findAnnotation(element, CommonClassNames.JAVA_LANG_DEPRECATED);
if (annotation != null) {
return isForRemovalAttributeSet(annotation);
}
return false;
}
/**
* Returns value of {@link Deprecated#forRemoval} attribute, which is available since Java 9.
*
* @param deprecatedAnnotation annotation instance to extract value of
* @return {@code true} if the {@code forRemoval} attribute is set to true,
* {@code false} if it isn't set or is set to {@code false}.
*/
protected static boolean isForRemovalAttributeSet(@NotNull PsiAnnotation deprecatedAnnotation) {
return Boolean.TRUE == AnnotationUtil.getBooleanAttributeValue(deprecatedAnnotation, "forRemoval");
}
private static boolean areElementsInSameOutermostClass(PsiElement refElement, PsiElement elementToHighlight) {
PsiClass outermostClass = CachedValuesManager.getCachedValue(
refElement,
() -> new CachedValueProvider.Result<>(PsiUtil.getTopLevelClass(refElement), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT)
);
return outermostClass != null && outermostClass == PsiUtil.getTopLevelClass(elementToHighlight);
}
static void addSameOutermostClassCheckBox(MultipleCheckboxOptionsPanel panel) {
panel.addCheckbox("Ignore in the same outermost class", "IGNORE_IN_SAME_OUTERMOST_CLASS");
}
protected static String getDescription(@NotNull String description, boolean forRemoval, ProblemHighlightType highlightType) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
ProblemHighlightType defaultType = forRemoval ? ProblemHighlightType.LIKE_MARKED_FOR_REMOVAL : ProblemHighlightType.LIKE_DEPRECATED;
if (highlightType != defaultType) {
return description + "(" + highlightType + ")";
}
}
return description;
}
private static PsiField findReplacementInJavaDoc(@NotNull PsiField field, @NotNull PsiReferenceExpression referenceExpression) {
return getReplacementCandidatesFromJavadoc(field, PsiField.class, field, getQualifierClass(referenceExpression))
.filter(tagField -> areReplaceable(tagField, referenceExpression))
.collect(MoreCollectors.onlyOne())
.orElse(null);
}
private static PsiMethod findReplacementInJavaDoc(@NotNull PsiMethod method, @NotNull PsiMethodCallExpression call) {
if (method instanceof PsiConstructorCall) return null;
if (method instanceof ClsMethodImpl) {
PsiMethod sourceMethod = ((ClsMethodImpl)method).getSourceMirrorMethod();
return sourceMethod == null ? null : findReplacementInJavaDoc(sourceMethod, call);
}
return getReplacementCandidatesFromJavadoc(method, PsiMethod.class, call, getQualifierClass(call.getMethodExpression()))
.filter(tagMethod -> areReplaceable(method, tagMethod, call))
.collect(MoreCollectors.onlyOne())
.orElse(null);
}
@NotNull
private static <T extends PsiDocCommentOwner> Stream<? extends T> getReplacementCandidatesFromJavadoc(T member, Class<T> clazz, PsiElement context, PsiClass qualifierClass) {
PsiDocComment doc = member.getDocComment();
if (doc == null) return Stream.empty();
Collection<PsiDocTag> docTags = PsiTreeUtil.findChildrenOfType(doc, PsiDocTag.class);
if (docTags.isEmpty()) return Stream.empty();
return docTags
.stream()
.filter(t -> {
String name = t.getName();
return "link".equals(name) || "see".equals(name);
})
.map(tag -> tag.getValueElement())
.filter(Objects::nonNull)
.map(value -> value.getReference())
.filter(Objects::nonNull)
.map(reference -> reference.resolve())
.distinct()
.map(resolved -> ObjectUtils.tryCast(resolved, clazz))
.filter(Objects::nonNull)
.filter(tagMethod -> !tagMethod.isDeprecated()) // not deprecated
.filter(tagMethod -> PsiResolveHelper.SERVICE.getInstance(context.getProject()).isAccessible(tagMethod, context, qualifierClass)) // accessible
.filter(tagMethod -> !member.getManager().areElementsEquivalent(tagMethod, member)); // not the same
}
private static boolean areReplaceable(PsiField suggested, PsiReferenceExpression expression) {
if (ExpressionUtils.isVoidContext(expression)) return true;
PsiType expectedType = ExpectedTypeUtils.findExpectedType(expression, true);
if (expectedType == null) return true;
PsiType suggestedType = suggested.getType();
return TypeConversionUtil.isAssignable(expectedType, suggestedType);
}
private static boolean areReplaceable(@NotNull PsiMethod initial,
@NotNull PsiMethod suggestedReplacement,
@NotNull PsiMethodCallExpression call) {
boolean isInitialStatic = initial.hasModifierProperty(PsiModifier.STATIC);
String qualifierText;
if (isInitialStatic) {
qualifierText = ObjectUtils.notNull(suggestedReplacement.getContainingClass()).getQualifiedName() + ".";
}
else {
PsiExpression qualifierExpression = call.getMethodExpression().getQualifierExpression();
qualifierText = qualifierExpression == null ? "" : qualifierExpression.getText() + ".";
}
PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(initial.getProject());
PsiExpressionList arguments = call.getArgumentList();
PsiMethodCallExpression suggestedCall = (PsiMethodCallExpression)elementFactory
.createExpressionFromText(qualifierText + suggestedReplacement.getName() + arguments.getText(), call);
PsiType type = ExpectedTypeUtils.findExpectedType(call, true);
if (type != null && !type.equals(PsiType.VOID)) {
PsiType suggestedCallType = suggestedCall.getType();
if (!ExpressionUtils.isVoidContext(call) && suggestedCallType != null && !TypeConversionUtil.isAssignable(type, suggestedCallType)) {
return false;
}
}
MethodCandidateInfo result = ObjectUtils.tryCast(suggestedCall.resolveMethodGenerics(), MethodCandidateInfo.class);
return result != null && result.isApplicable();
}
@Nullable
private static PsiClass getQualifierClass(@NotNull PsiReferenceExpression referenceExpression) {
PsiExpression expression = referenceExpression.getQualifierExpression();
if (expression == null) {
return RefactoringChangeUtil.getThisClass(referenceExpression);
}
return PsiUtil.resolveClassInType(expression.getType());
}
@Nullable
private static PsiMethodCallExpression getMethodCall(@NotNull PsiElement element) {
if (!(element instanceof PsiIdentifier)) return null;
PsiElement parent = element.getParent();
if (!(parent instanceof PsiReferenceExpression)) return null;
return ObjectUtils.tryCast(parent.getParent(), PsiMethodCallExpression.class);
}
}
| |
/* Copyright (C) 2016 New York University
This file is part of Data Polygamy which is released under the Revised BSD License
See file LICENSE for full license details. */
package edu.nyu.vida.data_polygamy.exp;
import it.unimi.dsi.fastutil.ints.IntIterator;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.ints.IntSet;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import edu.nyu.vida.data_polygamy.ct.MergeTrees.TreeType;
import edu.nyu.vida.data_polygamy.ctdata.SpatioTemporalVal;
import edu.nyu.vida.data_polygamy.ctdata.TopologicalIndex;
import edu.nyu.vida.data_polygamy.ctdata.TopologicalIndex.Attribute;
import edu.nyu.vida.data_polygamy.utils.FrameworkUtils;
import edu.nyu.vida.data_polygamy.utils.SpatialGraph;
import edu.nyu.vida.data_polygamy.utils.Utilities;
public class StandaloneExp {
static String[] dataAttributes = {"count-db_idx"};
HashSet<String> dataAttributesHashSet = new HashSet<String>(Arrays.asList(dataAttributes));
HashMap<String, Attribute> attributes = new HashMap<String, Attribute>();
HashMap<String, ArrayList<Float>> values = new HashMap<String, ArrayList<Float>>();
TreeType [] types = {TreeType.JoinTree, TreeType.SplitTree};
static long indexTimes = 0L;
static long queryTimes = 0L;
void load1DData(String aggregatesFile) {
String[] s = null;
try {
BufferedReader buf = new BufferedReader(new FileReader(aggregatesFile));
s = Utilities.getLine(buf, ",");
while (true) {
if (s == null) {
break;
}
String attr = Utilities.splitString(s[0], ":")[1].trim();
//System.out.println("Attribute: " + attr);
Attribute a = new Attribute();
a.nodeSet.add(0);
s = Utilities.getLine(buf, ",");
if(s != null && s.length > 0 && s[0].toLowerCase().startsWith("spatial")) {
s = Utilities.getLine(buf, ",");
}
if(s == null || s.length == 0) {
System.out.println("Empty: ---------------------- " + attr);
}
while (s != null && s.length > 0) {
int month = Integer.parseInt(Utilities.splitString(s[0], ":")[1].trim());
s = Utilities.getLine(buf, ",");
HashSet<SpatioTemporalVal> set = new HashSet<SpatioTemporalVal>();
while (s != null && s.length == 2) {
int time = Integer.parseInt(s[0]);
float value = Float.parseFloat(s[1]);
SpatioTemporalVal val = new SpatioTemporalVal(0, time, value);
set.add(val);
ArrayList<Float> vals = (values.get(attr) == null) ? new ArrayList<Float>() : values.get(attr);
vals.add(value);
values.put(attr, vals);
set.add(val);
s = Utilities.getLine(buf, ",");
}
if (set.size() > 0) {
ArrayList<SpatioTemporalVal> arr = new ArrayList<SpatioTemporalVal>(set);
Collections.sort(arr);
a.data.put(month, arr);
}
}
if (dataAttributesHashSet.contains(attr)) {
attributes.put(attr, a);
}
s = Utilities.getLine(buf, ",");
}
buf.close();
} catch (Exception e) {
e.printStackTrace();
}
}
void load2DData(String aggregateFile) {
String[] s = null;
IntOpenHashSet nodeSet = new IntOpenHashSet();
try {
BufferedReader buf = new BufferedReader(new FileReader(aggregateFile));
s = Utilities.getLine(buf, ",");
//System.out.println(s[0]);
while (true) {
if (s == null) {
break;
}
String attr = Utilities.splitString(s[0], ":")[1].trim();
Attribute a = attributes.get(attr);
if(a == null) {
a = new Attribute();
attributes.put(attr, a);
}
s = Utilities.getLine(buf, ":");
int sid = Integer.parseInt(s[1].trim());
nodeSet.add(sid);
s = Utilities.getLine(buf, ",");
while (s != null && s.length > 0) {
int month = Integer.parseInt(Utilities.splitString(s[0], ":")[1].trim());
s = Utilities.getLine(buf, ",");
HashSet<SpatioTemporalVal> set = new HashSet<SpatioTemporalVal>();
while (s != null && s.length == 2) {
int time = Integer.parseInt(s[0]);
float value = Float.parseFloat(s[1]);
SpatioTemporalVal val = new SpatioTemporalVal(sid, time, value);
ArrayList<Float> vals = (values.get(attr) == null) ? new ArrayList<Float>() : values.get(attr);
vals.add(value);
values.put(attr, vals);
set.add(val);
s = Utilities.getLine(buf, ",");
}
if (set.size() > 0) {
ArrayList<SpatioTemporalVal> monthlyArr = a.data.get(month);
if(monthlyArr == null) {
monthlyArr = new ArrayList<>();
a.data.put(month, monthlyArr);
}
monthlyArr.addAll(set);
}
}
if (dataAttributesHashSet.contains(attr)) {
attributes.put(attr, a);
}
s = Utilities.getLine(buf, ",");
}
buf.close();
for(Attribute a: attributes.values()) {
for(ArrayList<SpatioTemporalVal> arr: a.data.values()) {
Collections.sort(arr);
}
a.nodeSet = nodeSet;
}
} catch (Exception e) {
e.printStackTrace();
}
}
public Attribute createNewAttribute(HashMap<String, Attribute> attributes,
String attribute, int noMonths) {
Attribute a = attributes.get(attribute);
int ct = 0;
IntSet keys = a.data.keySet();
IntIterator it = keys.iterator();
ArrayList<SpatioTemporalVal> arr = new ArrayList<SpatioTemporalVal>();
while(ct < noMonths) {
if(!it.hasNext()) {
Utilities.er("no. of months is greater than what is present");
}
int month = it.nextInt();
arr.addAll(a.data.get(month));
ct++;
}
Collections.sort(arr);
Attribute na = new Attribute();
na.data.put(0, arr);
na.nodeSet = a.nodeSet;
return na;
}
public TopologicalIndex createIndex(Attribute na, int spatialRes, int nv, int[][] edges) {
TopologicalIndex index = new TopologicalIndex(
spatialRes, FrameworkUtils.HOUR, nv);
long st = System.nanoTime();
index.createIndex(na, edges);
indexTimes = System.nanoTime() - st;
return index;
}
public void test1d(int noMonths, String dataFile) {
load1DData(dataFile);
int[][] edges = new int[0][0];
int spatialRes = FrameworkUtils.CITY;
int nv = 1;
for(int cc = 0; cc < 3; cc++) {
Attribute na = createNewAttribute(attributes, dataAttributes[0], noMonths);
TopologicalIndex index = createIndex(na, spatialRes, nv, edges);
long st = System.nanoTime();
ArrayList<byte[]> e1 = index.queryEvents(0.4f, false, na, "");
queryTimes = System.nanoTime() - st;
e1.clear();
}
System.out.println(noMonths + "\t" + indexTimes + "\t" + queryTimes);
}
public void test2d(int noMonths, String dataFile, String graphFile, String polygonsFile) throws IOException {
load2DData(dataFile);
int[][] edges = new int[0][0];
int spatialRes = FrameworkUtils.NBHD;
SpatialGraph spatialGraph = new SpatialGraph();
try {
spatialGraph.init(polygonsFile, graphFile);
} catch (IOException e) {
e.printStackTrace();
return;
}
BufferedReader reader = new BufferedReader(new FileReader(graphFile));
String [] s = Utilities.splitString(reader.readLine().trim());
int nv = Integer.parseInt(s[0].trim());
int ne = Integer.parseInt(s[1].trim());
edges = new int[ne][2];
for(int i = 0;i < ne;i ++) {
s = Utilities.splitString(reader.readLine().trim());
int v1 = Integer.parseInt(s[0].trim());
int v2 = Integer.parseInt(s[1].trim());
if(v1 == v2) {
continue;
}
edges[i][0] = v1;
edges[i][1] = v2;
}
reader.close();
for (int cc = 0; cc < 3; cc++) {
Attribute na = createNewAttribute(attributes, dataAttributes[0], noMonths);
TopologicalIndex index = createIndex(na, spatialRes, nv, edges);
long st = System.nanoTime();
ArrayList<byte[]> e1 = index.queryEvents(0.4f, false, na, "");
queryTimes = System.nanoTime() - st;
e1.clear();
}
System.out.println(noMonths + "\t" + indexTimes + "\t" + queryTimes);
}
public void run(int noMonths, String dataFile, String graphFile, String polygonsFile, boolean is2D) throws IOException {
if (!is2D) {
test1d(noMonths, dataFile);
} else {
test2d(noMonths, dataFile, graphFile, polygonsFile);
}
}
public static void main(String[] args) throws IOException {
int noMonths = Integer.parseInt(args[0]);
// data set file
String dataFile = args[1];
// 2D graph file
String graphFile = args[2];
// 2D polygons
String polygonsFile = args[3];
// 1D or 2D ?
boolean is2D = Boolean.parseBoolean(args[4]);
StandaloneExp exp = new StandaloneExp();
exp.run(noMonths, dataFile, graphFile, polygonsFile, is2D);
}
}
| |
/* $This file is distributed under the terms of the license in LICENSE$ */
package edu.cornell.mannlib.vitro.webapp.utils.jena;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.ontology.OntModelSpec;
import org.apache.jena.rdf.model.AnonId;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.ModelMaker;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.ResIterator;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.shared.Lock;
import org.apache.jena.util.ResourceUtils;
import org.apache.jena.util.iterator.ClosableIterator;
import org.apache.jena.vocabulary.OWL;
import org.apache.jena.vocabulary.RDF;
import org.apache.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
public class JenaIngestUtils {
private static final Log log = LogFactory.getLog(JenaIngestUtils.class.getName());
private Random random = new Random(System.currentTimeMillis());
/**
* Returns a new copy of the input model with blank nodes renamed with namespaceEtc plus a random int.
* @param inModel input Jena Model
* @param namespaceEtc Namespace
*/
public Model renameBNodes(Model inModel, String namespaceEtc) {
return renameBNodes(inModel, namespaceEtc, null);
}
/**
* Returns a new copy of the input model with blank nodes renamed with namespaceEtc plus a random int.
* Will prevent URI collisions with supplied dedupModel
* @param inModel input Jena Model
* @param namespaceEtc Namespace
*/
public Model renameBNodes(Model inModel, String namespaceEtc, Model dedupModel) {
Model outModel = ModelFactory.createDefaultModel();
OntModel dedupUnionModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); // we're not using OWL here, just the OntModel submodel infrastructure
dedupUnionModel.addSubModel(outModel);
if (dedupModel != null) {
dedupUnionModel.addSubModel(dedupModel);
}
// the dedupUnionModel is so we can guard against reusing a URI in an
// existing model, as well as in the course of running this process
inModel.enterCriticalSection(Lock.READ);
Set<AnonId> doneSet = new HashSet<AnonId>();
try {
outModel.add(inModel);
ClosableIterator closeIt = inModel.listSubjects();
try {
for (Iterator it = closeIt; it.hasNext();) {
Resource res = (Resource) it.next();
if (res.isAnon() && !(doneSet.contains(res.getId()))) {
// now we do something hacky to get the same resource in the outModel, since there's no getResourceById();
ClosableIterator closfIt = outModel.listStatements(res,(Property)null,(RDFNode)null);
Statement stmt = null;
try {
if (closfIt.hasNext()) {
stmt = (Statement) closfIt.next();
}
} finally {
closfIt.close();
}
if (stmt != null) {
Resource outRes = stmt.getSubject();
ResourceUtils.renameResource(outRes,getNextURI(namespaceEtc,dedupUnionModel));
doneSet.add(res.getId());
}
}
}
} finally {
closeIt.close();
}
closeIt = inModel.listObjects();
try {
for (Iterator it = closeIt; it.hasNext();) {
RDFNode rdfn = (RDFNode) it.next();
if (rdfn.isResource()) {
Resource res = (Resource) rdfn;
if (res.isAnon() && !(doneSet.contains(res.getId()))) {
// now we do something hacky to get the same resource in the outModel, since there's no getResourceById();
ClosableIterator closfIt = outModel.listStatements((Resource)null,(Property)null,res);
Statement stmt = null;
try {
if (closfIt.hasNext()) {
stmt = (Statement) closfIt.next();
}
} finally {
closfIt.close();
}
if (stmt != null) {
Resource outRes = stmt.getSubject();
ResourceUtils.renameResource(outRes,getNextURI(namespaceEtc, dedupUnionModel));
doneSet.add(res.getId());
}
}
}
}
} finally {
closeIt.close();
}
} finally {
inModel.leaveCriticalSection();
}
return outModel;
}
public Model renameBNodesByPattern(Model inModel, String namespaceEtc, Model dedupModel, String pattern, String property){
Model outModel = ModelFactory.createDefaultModel();
Property propertyRes = ResourceFactory.createProperty(property);
OntModel dedupUnionModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); // we're not using OWL here, just the OntModel submodel infrastructure
dedupUnionModel.addSubModel(outModel);
if (dedupModel != null) {
dedupUnionModel.addSubModel(dedupModel);
}
// the dedupUnionModel is so we can guard against reusing a URI in an
// existing model, as well as in the course of running this process
inModel.enterCriticalSection(Lock.READ);
Set<AnonId> doneSet = new HashSet<AnonId>();
try {
outModel.add(inModel);
ClosableIterator closeIt = inModel.listSubjects();
try {
for (Iterator it = closeIt; it.hasNext();) {
Resource res = (Resource) it.next();
if (res.isAnon() && !(doneSet.contains(res.getId()))) {
// now we do something hacky to get the same resource in the outModel, since there's no getResourceById();
ClosableIterator closfIt = outModel.listStatements(res,propertyRes,(RDFNode)null);
Statement stmt = null;
try {
if (closfIt.hasNext()) {
stmt = (Statement) closfIt.next();
}
} finally {
closfIt.close();
}
if (stmt != null) {
Resource outRes = stmt.getSubject();
if(stmt.getObject().isLiteral()){
String value = ((Literal) stmt.getObject()).getLexicalForm();
String suffix = (pattern.contains("$$$"))
? pattern.replace("$$$", value)
: pattern + value;
ResourceUtils.renameResource(outRes, namespaceEtc + suffix);
}
doneSet.add(res.getId());
}
}
}
} finally {
closeIt.close();
}
} finally {
inModel.leaveCriticalSection();
}
return outModel;
}
public Map<String, LinkedList<String>> generatePropertyMap(List<Model> sourceModels, ModelMaker maker){
Map<String,LinkedList<String>> propertyMap = Collections.synchronizedMap(new HashMap<String, LinkedList<String>>());
Set<AnonId> doneSet = new HashSet<AnonId>();
for(Model model : sourceModels) {
ClosableIterator cItr = model.listSubjects();
while(cItr.hasNext()){
Resource res = (Resource) cItr.next();
if(res.isAnon() && !doneSet.contains(res.getId())){
doneSet.add(res.getId());
StmtIterator stmtItr = model.listStatements(res, (Property)null, (RDFNode)null);
while(stmtItr.hasNext()){
Statement stmt = stmtItr.next();
if(!stmt.getObject().isResource()){
if(propertyMap.containsKey(stmt.getPredicate().getURI())){
LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
linkList.add(stmt.getObject().toString());
}
else{
propertyMap.put(stmt.getPredicate().getURI(), new LinkedList());
LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
linkList.add(stmt.getObject().toString());
}
}
}
}
}
cItr = model.listObjects();
while(cItr.hasNext()){
RDFNode rdfn = (RDFNode) cItr.next();
if(rdfn.isResource()){
Resource res = (Resource)rdfn;
if(res.isAnon() && !doneSet.contains(res.getId())){
doneSet.add(res.getId());
StmtIterator stmtItr = model.listStatements(res, (Property)null, (RDFNode)null);
while(stmtItr.hasNext()){
Statement stmt = stmtItr.next();
if(!stmt.getObject().isResource()){
if(propertyMap.containsKey(stmt.getPredicate().getURI())){
LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
linkList.add(stmt.getObject().toString());
}
else{
propertyMap.put(stmt.getPredicate().getURI(), new LinkedList());
LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
linkList.add(stmt.getObject().toString());
}
}
}
}
}
}
cItr.close();
}
return propertyMap;
}
private String getNextURI(String namespaceEtc, Model model) {
String nextURI = null;
boolean duplicate = true;
while (duplicate) {
nextURI = namespaceEtc+random.nextInt(9999999);
Resource res = ResourceFactory.createResource(nextURI);
duplicate = false;
ClosableIterator closeIt = model.listStatements(res, (Property)null, (RDFNode)null);
try {
if (closeIt.hasNext()) {
duplicate = true;
}
} finally {
closeIt.close();
}
if (!duplicate) {
closeIt = model.listStatements((Resource)null, (Property)null, res);
try {
if (closeIt.hasNext()) {
duplicate = true;
}
} finally {
closeIt.close();
}
}
}
return nextURI;
}
public void processPropertyValueStrings(Model source, Model destination, Model additions, Model retractions,
String processorClass, String processorMethod, String originalPropertyURI, String newPropertyURI) {
Model additionsModel = ModelFactory.createDefaultModel();
Model retractionsModel = ModelFactory.createDefaultModel();
Class stringProcessorClass = null;
Object processor = null;
Class[] methArgs = {String.class};
Method meth = null;
try {
stringProcessorClass = Class.forName(processorClass);
processor = stringProcessorClass.newInstance();
meth = stringProcessorClass.getMethod(processorMethod,methArgs);
} catch (Exception e) {
log.error(e, e);
return;
}
Property prop = ResourceFactory.createProperty(originalPropertyURI);
Property newProp = ResourceFactory.createProperty(newPropertyURI);
source.enterCriticalSection(Lock.READ);
try {
ClosableIterator closeIt = source.listStatements((Resource)null,prop,(RDFNode)null);
for (Iterator stmtIt = closeIt; stmtIt.hasNext(); ) {
Statement stmt = (Statement) stmtIt.next();
if (stmt.getObject().isLiteral()) {
Literal lit = (Literal) stmt.getObject();
String lex = lit.getLexicalForm();
Object[] args = {lex};
String newLex = null;
try {
if (log.isDebugEnabled()) {
log.debug("invoking string processor method on ["+lex.substring(0,lex.length()>50 ? 50 : lex.length())+"...");
}
newLex = (String) meth.invoke(processor,args);
} catch (Exception e) {
log.error(e, e);
return;
}
if (!newLex.equals(lex)) {
retractionsModel.add(stmt);
Literal newLit = null;
if (lit.getLanguage()!=null && lit.getLanguage().length()>0) {
newLit = additionsModel.createLiteral(newLex,lit.getLanguage());
} else if (lit.getDatatype() != null) {
newLit = additionsModel.createTypedLiteral(newLex,lit.getDatatype());
} else {
newLit = additionsModel.createLiteral(newLex);
}
additionsModel.add(stmt.getSubject(),newProp,newLit);
}
}
}
if (destination != null) {
destination.enterCriticalSection(Lock.WRITE);
try {
destination.add(additionsModel);
destination.remove(retractionsModel);
} finally {
destination.leaveCriticalSection();
}
}
if (additions != null) {
additions.enterCriticalSection(Lock.WRITE);
try {
additions.add(additionsModel);
} finally {
additions.leaveCriticalSection();
}
}
if (retractions != null) {
retractions.enterCriticalSection(Lock.WRITE);
try {
retractions.add(retractionsModel);
} finally {
retractions.leaveCriticalSection();
}
}
} finally {
source.leaveCriticalSection();
}
}
/**
* Splits values for a given data property URI on a supplied regex and
* asserts each value using newPropertyURI. New statements returned in
* a Jena Model. Split values may be optionally trim()ed.
* @param inModel Input Jena model
* @param propertyURI URI for property
* @param splitRegex Regex for split
* @param newPropertyURI URI for new property
* @param trim Flag to trim property
* @return outModel
*/
public Model splitPropertyValues(Model inModel, String propertyURI, String splitRegex, String newPropertyURI, boolean trim) {
Model outModel = ModelFactory.createDefaultModel();
Pattern delimiterPattern = Pattern.compile(splitRegex);
Property theProp = ResourceFactory.createProperty(propertyURI);
Property newProp = ResourceFactory.createProperty(newPropertyURI);
inModel.enterCriticalSection(Lock.READ);
try {
StmtIterator stmtIt = inModel.listStatements( (Resource)null, theProp, (RDFNode)null );
try {
while(stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
Resource subj = stmt.getSubject();
RDFNode obj = stmt.getObject();
if (obj.isLiteral()) {
Literal lit = (Literal) obj;
String unsplitStr = lit.getLexicalForm();
String[] splitPieces = delimiterPattern.split(unsplitStr);
for (String splitPiece : splitPieces) {
String newLexicalForm = splitPiece;
if (trim) {
newLexicalForm = newLexicalForm.trim();
}
if (newLexicalForm.length() > 0) {
Literal newLiteral = null;
if (lit.getDatatype() != null) {
newLiteral = outModel.createTypedLiteral(newLexicalForm, lit.getDatatype());
} else {
if (lit.getLanguage() != null) {
newLiteral = outModel.createLiteral(newLexicalForm, lit.getLanguage());
} else {
newLiteral = outModel.createLiteral(newLexicalForm);
}
}
outModel.add(subj, newProp, newLiteral);
}
}
}
}
} finally {
stmtIt.close();
}
} finally {
inModel.leaveCriticalSection();
}
return outModel;
}
/**
* A simple resource smusher based on a supplied inverse-functional property.
* A new model containing only resources about the smushed statements is returned.
* @param inModel Input Jena model
* @param prop Property
*/
public Model smushResources(Model inModel, Property prop) {
Model outModel = ModelFactory.createDefaultModel();
outModel.add(inModel);
inModel.enterCriticalSection(Lock.READ);
try {
ClosableIterator closeIt = inModel.listObjectsOfProperty(prop);
try {
for (Iterator objIt = closeIt; objIt.hasNext();) {
RDFNode rdfn = (RDFNode) objIt.next();
ClosableIterator closfIt = inModel.listSubjectsWithProperty(prop, rdfn);
try {
boolean first = true;
Resource smushToThisResource = null;
for (Iterator subjIt = closfIt; closfIt.hasNext();) {
Resource subj = (Resource) subjIt.next();
if (first) {
smushToThisResource = subj;
first = false;
continue;
}
ClosableIterator closgIt = inModel.listStatements(subj,(Property)null,(RDFNode)null);
try {
for (Iterator stmtIt = closgIt; stmtIt.hasNext();) {
Statement stmt = (Statement) stmtIt.next();
outModel.remove(stmt.getSubject(), stmt.getPredicate(), stmt.getObject());
outModel.add(smushToThisResource, stmt.getPredicate(), stmt.getObject());
}
} finally {
closgIt.close();
}
closgIt = inModel.listStatements((Resource) null, (Property)null, subj);
try {
for (Iterator stmtIt = closgIt; stmtIt.hasNext();) {
Statement stmt = (Statement) stmtIt.next();
outModel.remove(stmt.getSubject(), stmt.getPredicate(), stmt.getObject());
outModel.add(stmt.getSubject(), stmt.getPredicate(), smushToThisResource);
}
} finally {
closgIt.close();
}
}
} finally {
closfIt.close();
}
}
} finally {
closeIt.close();
}
} finally {
inModel.leaveCriticalSection();
}
return outModel;
}
/**
* Returns a model where redundant individuals that are sameAs one another are smushed
* using URIs in preferred namespaces where possible.
* @param model Jena Model
* @param preferredNamespace Preferred Namespace
*/
public Model dedupAndExtract( Model model, String preferredNamespace ) {
Model extractsModel = ModelFactory.createDefaultModel();
HashMap<String, String> rewriteURIUsing = new HashMap<String, String>();
Iterator haveSameAsIt = model.listSubjectsWithProperty(OWL.sameAs);
while (haveSameAsIt.hasNext()) {
String preferredURI = null;
Resource hasSameAs = (Resource) haveSameAsIt.next();
List<Statement> sameAsList = hasSameAs.listProperties(OWL.sameAs).toList();
if (sameAsList.size()>1) { // if sameAs something other than the same URI (we assume reasoning model)
List<String> sameAsURIs = new LinkedList<String>();
Iterator sameAsStmtIt = sameAsList.iterator();
for (int i=0; i<sameAsList.size(); i++) {
Statement sameAsStmt = (Statement) sameAsStmtIt.next();
if (!sameAsStmt.getObject().isResource()) {
throw new RuntimeException( sameAsStmt.getResource().getURI() + " is sameAs() a literal!" );
}
Resource sameAsRes = (Resource) sameAsStmt.getObject();
if (!sameAsRes.isAnon()) {
sameAsURIs.add(sameAsRes.getURI());
if (preferredNamespace != null && preferredNamespace.equals(sameAsRes.getNameSpace())) {
preferredURI = sameAsRes.getURI();
}
}
if (preferredURI == null) {
preferredURI = sameAsURIs.get(0);
}
for (String s : sameAsURIs) {
rewriteURIUsing.put(s,preferredURI);
}
}
}
}
StmtIterator modelStmtIt = model.listStatements();
while (modelStmtIt.hasNext()) {
Statement origStmt = modelStmtIt.nextStatement();
Resource newSubj = null;
RDFNode newObj = null;
if (!origStmt.getSubject().isAnon()) {
String rewriteURI = rewriteURIUsing.get(origStmt.getSubject().getURI());
if (rewriteURI != null) {
newSubj = extractsModel.getResource(rewriteURI);
}
}
if (origStmt.getObject().isResource() && !origStmt.getResource().isAnon()) {
String rewriteURI = rewriteURIUsing.get(((Resource) origStmt.getObject()).getURI());
if (rewriteURI != null) {
newObj = extractsModel.getResource(rewriteURI);
}
}
if (newSubj == null) {
newSubj = origStmt.getSubject();
}
if (newObj == null) {
newObj = origStmt.getObject();
}
extractsModel.add(newSubj, origStmt.getPredicate(), newObj);
}
return extractsModel;
}
public OntModel generateTBox(Model abox) {
OntModel tboxOntModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
StmtIterator sit = abox.listStatements();
while (sit.hasNext()) {
Statement stmt = sit.nextStatement();
if (RDF.type.equals(stmt.getPredicate())) {
makeClass(stmt.getObject(), tboxOntModel);
} else if (stmt.getObject().isResource()) {
makeObjectProperty(stmt.getPredicate(), tboxOntModel);
} else if (stmt.getObject().isLiteral()) {
makeDatatypeProperty(stmt.getPredicate(), tboxOntModel);
}
}
return tboxOntModel;
}
private void makeClass(RDFNode node, OntModel tboxOntModel) {
if (!node.isResource() || node.isAnon()) {
return;
}
Resource typeRes = (Resource) node;
if (tboxOntModel.getOntClass(typeRes.getURI()) == null) {
tboxOntModel.createClass(typeRes.getURI());
}
}
private void makeObjectProperty(Property property, OntModel tboxOntModel) {
if (tboxOntModel.getObjectProperty(property.getURI()) == null) {
tboxOntModel.createObjectProperty(property.getURI());
}
}
private void makeDatatypeProperty(Property property, OntModel tboxOntModel) {
if (tboxOntModel.getDatatypeProperty(property.getURI()) == null) {
tboxOntModel.createDatatypeProperty(property.getURI());
}
}
public class MergeResult {
private String resultText;
private Model leftoverModel;
public MergeResult() {}
public MergeResult(String resultText, Model leftoverModel) {
this.resultText = resultText;
this.leftoverModel = leftoverModel;
}
public void setResultText(String resultText) {
this.resultText = resultText;
}
public String getResultText() {
return this.resultText;
}
public void setLeftoverModel(Model leftoverModel) {
this.leftoverModel = leftoverModel;
}
public Model getLeftoverModel() {
return this.leftoverModel;
}
}
/**
* Merges statements about resource uri2 into resource uri1 and delete uri2.
* @param uri1 The resource to merge to
* @param uri2 The resource to merge from
* @param baseOntModel The model containing the relevant statements
* @param tboxOntModel The model containing class and property data
* @param usePrimaryLabelOnly If true, discard rdfs:labels from uri2. Otherwise retain.
*/
public MergeResult doMerge(String uri1, String uri2, OntModel baseOntModel,
OntModel tboxOntModel, boolean usePrimaryLabelOnly){
boolean functionalPresent = false;
Resource res1 = baseOntModel.getResource(uri1); // primary resource
Model res1Model = ModelFactory.createDefaultModel();
Resource res2 = baseOntModel.getResource(uri2); // secondary resource
Model res2Model = ModelFactory.createDefaultModel();
// get statements of both the resources
baseOntModel.enterCriticalSection(Lock.READ);
try {
res1Model.add(
baseOntModel.listStatements(res1, (Property)null, (RDFNode)null));
res2Model.add(
baseOntModel.listStatements(res2, (Property)null, (RDFNode)null));
res2Model.add(
baseOntModel.listStatements(
(Resource)null, (Property)null, (RDFNode)res2));
} finally {
baseOntModel.leaveCriticalSection();
}
// if primary resource has no statements, return
if (res1Model.isEmpty()){
return new MergeResult("resource 1 not present", null);
} else if(res2Model.isEmpty()){
return new MergeResult("resource 2 not present", null);
}
int counter = 0;
Model leftoverModel = ModelFactory.createDefaultModel();
// Iterate through statements of secondary resource
StmtIterator stmtItr2 = res2Model.listStatements(
res2, (Property) null, (RDFNode) null);
while(stmtItr2.hasNext()){
Statement stmt = stmtItr2.nextStatement();
if(isFunctional(stmt.getPredicate(), tboxOntModel)) {
// if the property is null or functional then dump the statement into
// the leftover model, else add it to base, ont and inf models as a
// part of the primary resource.
leftoverModel.add(res2, stmt.getPredicate(), stmt.getObject());
functionalPresent = true;
} else if (stmt.getPredicate().equals(RDFS.label) && usePrimaryLabelOnly) {
// if the checkbox is checked, use primary resource rdfs:labels only
// and dump secondary resource rdfs:labels into leftoverModel
leftoverModel.add(res2, stmt.getPredicate(), stmt.getObject());
functionalPresent = true;
} else {
baseOntModel.enterCriticalSection(Lock.WRITE);
try {
baseOntModel.add(res1, stmt.getPredicate(), stmt.getObject());
counter++;
} finally {
baseOntModel.leaveCriticalSection();
}
}
}
// replace secondary resource with primary resource in all the statements
// where secondary resource is present as an object.
StmtIterator stmtItr3 = res2Model.listStatements(
(Resource) null, (Property) null, res2);
while (stmtItr3.hasNext()){
Statement stmt = stmtItr3.nextStatement();
Resource sRes = stmt.getSubject();
Property sProp = stmt.getPredicate();
baseOntModel.enterCriticalSection(Lock.WRITE);
try {
baseOntModel.add(sRes, sProp, res1);
counter++;
} finally {
baseOntModel.leaveCriticalSection();
}
}
// Remove all the statements of secondary resource
baseOntModel.enterCriticalSection(Lock.WRITE);
try {
baseOntModel.remove(res2Model);
} finally {
baseOntModel.leaveCriticalSection();
}
MergeResult result = new MergeResult();
if (!leftoverModel.isEmpty()) {
result.setLeftoverModel(leftoverModel);
}
if (counter > 0 && functionalPresent) {
result.setResultText("merged " + counter +
" statements. Some statements could not be merged.");
} else if(counter>0 && !functionalPresent) {
result.setResultText("merged " + counter + " statements.");
} else if (counter==0) {
result.setResultText("No statements merged");
}
return result;
}
private boolean isFunctional(Property property, OntModel tboxOntModel) {
tboxOntModel.enterCriticalSection(Lock.READ);
try {
return (tboxOntModel.contains(
property, RDF.type, OWL.FunctionalProperty));
} finally {
tboxOntModel.leaveCriticalSection();
}
}
public void doPermanentURI(String oldModel, String newModel, String oldNamespace,
String newNamespace, ModelMaker maker,
VitroRequest vreq) {
if(newNamespace.isEmpty()){
throw new RuntimeException("new namespace must be specified");
}
WebappDaoFactory wdf = vreq.getUnfilteredWebappDaoFactory();
Model m = maker.getModel(oldModel);
Model saveModel = maker.getModel(newModel);
Model tempModel = ModelFactory.createDefaultModel();
ResIterator rsItr = null;
ArrayList<String> urlCheck = new ArrayList<String>();
boolean urlFound = false;
if(!oldModel.equals(newModel)){
StmtIterator stmtItr = m.listStatements();
while(stmtItr.hasNext()){
Statement stmt = stmtItr.nextStatement();
tempModel.add(stmt);
}
rsItr = tempModel.listResourcesWithProperty((Property)null);
} else{
rsItr = m.listResourcesWithProperty((Property)null);
}
String uri = null;
while(rsItr.hasNext()){
Resource res = rsItr.next();
if(res.getNameSpace().equals(oldNamespace)){
do{
uri = getUnusedURI(newNamespace,wdf);
if(!urlCheck.contains(uri)){
urlCheck.add(uri);
urlFound = true;
}
}while(!urlFound);
urlFound = false;
ResourceUtils.renameResource(res, uri);
}
}
boolean statementDone = false;
if(!oldModel.equals(newModel)){
StmtIterator stmtItr = tempModel.listStatements();
while(stmtItr.hasNext()){
statementDone = false;
Statement stmt = stmtItr.nextStatement();
Resource sRes = stmt.getSubject();
Resource oRes = null;
if(sRes.getNameSpace().equals(newNamespace)){
saveModel.add(stmt);
statementDone = true;
}
try{
oRes = (Resource) stmt.getObject();
if(oRes.getNameSpace().equals(newNamespace) && !statementDone){
saveModel.add(stmt);
statementDone = true;
}
} catch(Exception e){
continue;
}
}
}
}
public String getUnusedURI(String newNamespace,WebappDaoFactory wdf){
String uri = null;
String errMsg = null;
Random random = new Random();
boolean uriIsGood = false;
int attempts = 0;
while(!uriIsGood && attempts < 30 ){
uri = newNamespace + "n" + random.nextInt( Math.min(Integer.MAX_VALUE,(int)Math.pow(2,attempts + 13)) );
errMsg = wdf.checkURI(uri);
if( errMsg != null)
uri = null;
else
uriIsGood = true;
attempts++;
}
return uri;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec.language.bm;
import org.apache.commons.codec.language.bm.Languages.LanguageSet;
import org.apache.commons.codec.language.bm.Rule.Phoneme;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
/**
* Converts words into potential phonetic representations.
* <p>
* This is a two-stage process. Firstly, the word is converted into a phonetic representation that takes
* into account the likely source language. Next, this phonetic representation is converted into a
* pan-European 'average' representation, allowing comparison between different versions of essentially
* the same word from different languages.
* <p>
* This class is intentionally immutable and thread-safe.
* If you wish to alter the settings for a PhoneticEngine, you
* must make a new one with the updated settings.
* <p>
* Ported from phoneticengine.php
*
* @since 1.6
* @version $Id: PhoneticEngine.java 1634418 2014-10-27 00:43:25Z ggregory $
*/
public class PhoneticEngine {
/**
* Utility for manipulating a set of phonemes as they are being built up. Not intended for use outside
* this package, and probably not outside the {@link PhoneticEngine} class.
*
* @since 1.6
*/
static final class PhonemeBuilder {
/**
* An empty builder where all phonemes must come from some set of languages. This will contain a single
* phoneme of zero characters. This can then be appended to. This should be the only way to create a new
* phoneme from scratch.
*
* @param languages the set of languages
* @return a new, empty phoneme builder
*/
public static PhonemeBuilder empty(final LanguageSet languages) {
return new PhonemeBuilder(new Phoneme("", languages));
}
private final Set<Phoneme> phonemes;
private PhonemeBuilder(final Phoneme phoneme) {
this.phonemes = new LinkedHashSet<Phoneme>();
this.phonemes.add(phoneme);
}
private PhonemeBuilder(final Set<Phoneme> phonemes) {
this.phonemes = phonemes;
}
/**
* Creates a new phoneme builder containing all phonemes in this one extended by <code>str</code>.
*
* @param str the characters to append to the phonemes
*/
public void append(final CharSequence str) {
for (final Phoneme ph : this.phonemes) {
ph.append(str);
}
}
/**
* Applies the given phoneme expression to all phonemes in this phoneme builder.
* <p>
* This will lengthen phonemes that have compatible language sets to the expression, and drop those that are
* incompatible.
*
* @param phonemeExpr the expression to apply
* @param maxPhonemes the maximum number of phonemes to build up
*/
public void apply(final Rule.PhonemeExpr phonemeExpr, final int maxPhonemes) {
final Set<Phoneme> newPhonemes = new LinkedHashSet<Phoneme>(maxPhonemes);
EXPR: for (final Phoneme left : this.phonemes) {
for (final Phoneme right : phonemeExpr.getPhonemes()) {
final LanguageSet languages = left.getLanguages().restrictTo(right.getLanguages());
if (!languages.isEmpty()) {
final Phoneme join = new Phoneme(left, right, languages);
if (newPhonemes.size() < maxPhonemes) {
newPhonemes.add(join);
if (newPhonemes.size() >= maxPhonemes) {
break EXPR;
}
}
}
}
}
this.phonemes.clear();
this.phonemes.addAll(newPhonemes);
}
/**
* Gets underlying phoneme set. Please don't mutate.
*
* @return the phoneme set
*/
public Set<Phoneme> getPhonemes() {
return this.phonemes;
}
/**
* Stringifies the phoneme set. This produces a single string of the strings of each phoneme,
* joined with a pipe. This is explicitly provided in place of toString as it is a potentially
* expensive operation, which should be avoided when debugging.
*
* @return the stringified phoneme set
*/
public String makeString() {
final StringBuilder sb = new StringBuilder();
for (final Phoneme ph : this.phonemes) {
if (sb.length() > 0) {
sb.append("|");
}
sb.append(ph.getPhonemeText());
}
return sb.toString();
}
}
/**
* A function closure capturing the application of a list of rules to an input sequence at a particular offset.
* After invocation, the values <code>i</code> and <code>found</code> are updated. <code>i</code> points to the
* index of the next char in <code>input</code> that must be processed next (the input up to that index having been
* processed already), and <code>found</code> indicates if a matching rule was found or not. In the case where a
* matching rule was found, <code>phonemeBuilder</code> is replaced with a new builder containing the phonemes
* updated by the matching rule.
*
* Although this class is not thread-safe (it has mutable unprotected fields), it is not shared between threads
* as it is constructed as needed by the calling methods.
* @since 1.6
*/
private static final class RulesApplication {
private final Map<String, List<Rule>> finalRules;
private final CharSequence input;
private PhonemeBuilder phonemeBuilder;
private int i;
private final int maxPhonemes;
private boolean found;
public RulesApplication(final Map<String, List<Rule>> finalRules, final CharSequence input,
final PhonemeBuilder phonemeBuilder, final int i, final int maxPhonemes) {
if (finalRules == null) {
throw new NullPointerException("The finalRules argument must not be null");
}
this.finalRules = finalRules;
this.phonemeBuilder = phonemeBuilder;
this.input = input;
this.i = i;
this.maxPhonemes = maxPhonemes;
}
public int getI() {
return this.i;
}
public PhonemeBuilder getPhonemeBuilder() {
return this.phonemeBuilder;
}
/**
* Invokes the rules. Loops over the rules list, stopping at the first one that has a matching context
* and pattern. Then applies this rule to the phoneme builder to produce updated phonemes. If there was no
* match, <code>i</code> is advanced one and the character is silently dropped from the phonetic spelling.
*
* @return <code>this</code>
*/
public RulesApplication invoke() {
this.found = false;
int patternLength = 1;
final List<Rule> rules = this.finalRules.get(input.subSequence(i, i+patternLength));
if (rules != null) {
for (final Rule rule : rules) {
final String pattern = rule.getPattern();
patternLength = pattern.length();
if (rule.patternAndContextMatches(this.input, this.i)) {
this.phonemeBuilder.apply(rule.getPhoneme(), maxPhonemes);
this.found = true;
break;
}
}
}
if (!this.found) {
patternLength = 1;
}
this.i += patternLength;
return this;
}
public boolean isFound() {
return this.found;
}
}
private static final Map<NameType, Set<String>> NAME_PREFIXES = new EnumMap<NameType, Set<String>>(NameType.class);
static {
NAME_PREFIXES.put(NameType.ASHKENAZI,
Collections.unmodifiableSet(
new HashSet<String>(Arrays.asList("bar", "ben", "da", "de", "van", "von"))));
NAME_PREFIXES.put(NameType.SEPHARDIC,
Collections.unmodifiableSet(
new HashSet<String>(Arrays.asList("al", "el", "da", "dal", "de", "del", "dela", "de la",
"della", "des", "di", "do", "dos", "du", "van", "von"))));
NAME_PREFIXES.put(NameType.GENERIC,
Collections.unmodifiableSet(
new HashSet<String>(Arrays.asList("da", "dal", "de", "del", "dela", "de la", "della",
"des", "di", "do", "dos", "du", "van", "von"))));
}
/**
* Joins some strings with an internal separator.
* @param strings Strings to join
* @param sep String to separate them with
* @return a single String consisting of each element of <code>strings</code> interleaved by <code>sep</code>
*/
private static String join(final Iterable<String> strings, final String sep) {
final StringBuilder sb = new StringBuilder();
final Iterator<String> si = strings.iterator();
if (si.hasNext()) {
sb.append(si.next());
}
while (si.hasNext()) {
sb.append(sep).append(si.next());
}
return sb.toString();
}
private static final int DEFAULT_MAX_PHONEMES = 20;
private final Lang lang;
private final NameType nameType;
private final RuleType ruleType;
private final boolean concat;
private final int maxPhonemes;
/**
* Generates a new, fully-configured phonetic engine.
*
* @param nameType
* the type of names it will use
* @param ruleType
* the type of rules it will apply
* @param concat
* if it will concatenate multiple encodings
*/
public PhoneticEngine(final NameType nameType, final RuleType ruleType, final boolean concat) {
this(nameType, ruleType, concat, DEFAULT_MAX_PHONEMES);
}
/**
* Generates a new, fully-configured phonetic engine.
*
* @param nameType
* the type of names it will use
* @param ruleType
* the type of rules it will apply
* @param concat
* if it will concatenate multiple encodings
* @param maxPhonemes
* the maximum number of phonemes that will be handled
* @since 1.7
*/
public PhoneticEngine(final NameType nameType, final RuleType ruleType, final boolean concat,
final int maxPhonemes) {
if (ruleType == RuleType.RULES) {
throw new IllegalArgumentException("ruleType must not be " + RuleType.RULES);
}
this.nameType = nameType;
this.ruleType = ruleType;
this.concat = concat;
this.lang = Lang.instance(nameType);
this.maxPhonemes = maxPhonemes;
}
/**
* Applies the final rules to convert from a language-specific phonetic representation to a
* language-independent representation.
*
* @param phonemeBuilder the current phonemes
* @param finalRules the final rules to apply
* @return the resulting phonemes
*/
private PhonemeBuilder applyFinalRules(final PhonemeBuilder phonemeBuilder,
final Map<String, List<Rule>> finalRules) {
if (finalRules == null) {
throw new NullPointerException("finalRules can not be null");
}
if (finalRules.isEmpty()) {
return phonemeBuilder;
}
final Map<Phoneme, Phoneme> phonemes =
new TreeMap<Phoneme, Phoneme>(Phoneme.COMPARATOR);
for (final Phoneme phoneme : phonemeBuilder.getPhonemes()) {
PhonemeBuilder subBuilder = PhonemeBuilder.empty(phoneme.getLanguages());
final String phonemeText = phoneme.getPhonemeText().toString();
for (int i = 0; i < phonemeText.length();) {
final RulesApplication rulesApplication =
new RulesApplication(finalRules, phonemeText, subBuilder, i, maxPhonemes).invoke();
final boolean found = rulesApplication.isFound();
subBuilder = rulesApplication.getPhonemeBuilder();
if (!found) {
// not found, appending as-is
subBuilder.append(phonemeText.subSequence(i, i + 1));
}
i = rulesApplication.getI();
}
// the phonemes map orders the phonemes only based on their text, but ignores the language set
// when adding new phonemes, check for equal phonemes and merge their language set, otherwise
// phonemes with the same text but different language set get lost
for (final Phoneme newPhoneme : subBuilder.getPhonemes()) {
if (phonemes.containsKey(newPhoneme)) {
final Phoneme oldPhoneme = phonemes.remove(newPhoneme);
final Phoneme mergedPhoneme = oldPhoneme.mergeWithLanguage(newPhoneme.getLanguages());
phonemes.put(mergedPhoneme, mergedPhoneme);
} else {
phonemes.put(newPhoneme, newPhoneme);
}
}
}
return new PhonemeBuilder(phonemes.keySet());
}
/**
* Encodes a string to its phonetic representation.
*
* @param input
* the String to encode
* @return the encoding of the input
*/
public String encode(final String input) {
final LanguageSet languageSet = this.lang.guessLanguages(input);
return encode(input, languageSet);
}
/**
* Encodes an input string into an output phonetic representation, given a set of possible origin languages.
*
* @param input
* String to phoneticise; a String with dashes or spaces separating each word
* @param languageSet
* set of possible origin languages
* @return a phonetic representation of the input; a String containing '-'-separated phonetic representations of the
* input
*/
public String encode(String input, final LanguageSet languageSet) {
final Map<String, List<Rule>> rules = Rule.getInstanceMap(this.nameType, RuleType.RULES, languageSet);
// rules common across many (all) languages
final Map<String, List<Rule>> finalRules1 = Rule.getInstanceMap(this.nameType, this.ruleType, "common");
// rules that apply to a specific language that may be ambiguous or wrong if applied to other languages
final Map<String, List<Rule>> finalRules2 = Rule.getInstanceMap(this.nameType, this.ruleType, languageSet);
// tidy the input
// lower case is a locale-dependent operation
input = input.toLowerCase(Locale.ENGLISH).replace('-', ' ').trim();
if (this.nameType == NameType.GENERIC) {
if (input.length() >= 2 && input.substring(0, 2).equals("d'")) { // check for d'
final String remainder = input.substring(2);
final String combined = "d" + remainder;
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
for (final String l : NAME_PREFIXES.get(this.nameType)) {
// handle generic prefixes
if (input.startsWith(l + " ")) {
// check for any prefix in the words list
final String remainder = input.substring(l.length() + 1); // input without the prefix
final String combined = l + remainder; // input with prefix without space
return "(" + encode(remainder) + ")-(" + encode(combined) + ")";
}
}
}
final List<String> words = Arrays.asList(input.split("\\s+"));
final List<String> words2 = new ArrayList<String>();
// special-case handling of word prefixes based upon the name type
switch (this.nameType) {
case SEPHARDIC:
for (final String aWord : words) {
final String[] parts = aWord.split("'");
final String lastPart = parts[parts.length - 1];
words2.add(lastPart);
}
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case ASHKENAZI:
words2.addAll(words);
words2.removeAll(NAME_PREFIXES.get(this.nameType));
break;
case GENERIC:
words2.addAll(words);
break;
default:
throw new IllegalStateException("Unreachable case: " + this.nameType);
}
if (this.concat) {
// concat mode enabled
input = join(words2, " ");
} else if (words2.size() == 1) {
// not a multi-word name
input = words.iterator().next();
} else {
// encode each word in a multi-word name separately (normally used for approx matches)
final StringBuilder result = new StringBuilder();
for (final String word : words2) {
result.append("-").append(encode(word));
}
// return the result without the leading "-"
return result.substring(1);
}
PhonemeBuilder phonemeBuilder = PhonemeBuilder.empty(languageSet);
// loop over each char in the input - we will handle the increment manually
for (int i = 0; i < input.length();) {
final RulesApplication rulesApplication =
new RulesApplication(rules, input, phonemeBuilder, i, maxPhonemes).invoke();
i = rulesApplication.getI();
phonemeBuilder = rulesApplication.getPhonemeBuilder();
}
// Apply the general rules
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules1);
// Apply the language-specific rules
phonemeBuilder = applyFinalRules(phonemeBuilder, finalRules2);
return phonemeBuilder.makeString();
}
/**
* Gets the Lang language guessing rules being used.
*
* @return the Lang in use
*/
public Lang getLang() {
return this.lang;
}
/**
* Gets the NameType being used.
*
* @return the NameType in use
*/
public NameType getNameType() {
return this.nameType;
}
/**
* Gets the RuleType being used.
*
* @return the RuleType in use
*/
public RuleType getRuleType() {
return this.ruleType;
}
/**
* Gets if multiple phonetic encodings are concatenated or if just the first one is kept.
*
* @return true if multiple phonetic encodings are returned, false if just the first is
*/
public boolean isConcat() {
return this.concat;
}
/**
* Gets the maximum number of phonemes the engine will calculate for a given input.
*
* @return the maximum number of phonemes
* @since 1.7
*/
public int getMaxPhonemes() {
return this.maxPhonemes;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport.netty;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.ReleasablePagedBytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.math.MathUtils;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.netty.NettyUtils;
import org.elasticsearch.common.netty.OpenChannelsHandler;
import org.elasticsearch.common.netty.ReleaseChannelFutureListener;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.network.NetworkUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.PortsRange;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.KeyedLock;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BindTransportException;
import org.elasticsearch.transport.BytesTransportRequest;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.NodeNotConnectedException;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportServiceAdapter;
import org.elasticsearch.transport.support.TransportStatus;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.FixedReceiveBufferSizePredictorFactory;
import org.jboss.netty.channel.ReceiveBufferSizePredictorFactory;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.channel.socket.nio.NioWorkerPool;
import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory;
import org.jboss.netty.util.HashedWheelTimer;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.UnknownHostException;
import java.nio.channels.CancelledKeyException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.elasticsearch.common.network.NetworkService.TcpSettings.*;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.transport.NetworkExceptionHelper.isCloseConnectionException;
import static org.elasticsearch.common.transport.NetworkExceptionHelper.isConnectException;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
/**
* There are 4 types of connections per node, low/med/high/ping. Low if for batch oriented APIs (like recovery or
* batch) with high payload that will cause regular request. (like search or single index) to take
* longer. Med is for the typical search / single doc index. And High for things like cluster state. Ping is reserved for
* sending out ping requests to other nodes.
*/
public class NettyTransport extends AbstractLifecycleComponent<Transport> implements Transport {
static {
NettyUtils.setup();
}
public static final String HTTP_SERVER_WORKER_THREAD_NAME_PREFIX = "http_server_worker";
public static final String HTTP_SERVER_BOSS_THREAD_NAME_PREFIX = "http_server_boss";
public static final String TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX = "transport_client_worker";
public static final String TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX = "transport_client_boss";
public static final String WORKER_COUNT = "transport.netty.worker_count";
public static final String CONNECTIONS_PER_NODE_RECOVERY = "transport.connections_per_node.recovery";
public static final String CONNECTIONS_PER_NODE_BULK = "transport.connections_per_node.bulk";
public static final String CONNECTIONS_PER_NODE_REG = "transport.connections_per_node.reg";
public static final String CONNECTIONS_PER_NODE_STATE = "transport.connections_per_node.state";
public static final String CONNECTIONS_PER_NODE_PING = "transport.connections_per_node.ping";
public static final String PING_SCHEDULE = "transport.ping_schedule"; // the scheduled internal ping interval setting
public static final TimeValue DEFAULT_PING_SCHEDULE = TimeValue.timeValueMillis(-1); // the default ping schedule, defaults to disabled (-1)
public static final String DEFAULT_PORT_RANGE = "9300-9400";
public static final String DEFAULT_PROFILE = "default";
protected final NetworkService networkService;
protected final Version version;
protected final boolean blockingClient;
protected final TimeValue connectTimeout;
protected final ByteSizeValue maxCumulationBufferCapacity;
protected final int maxCompositeBufferComponents;
protected final boolean compress;
protected final ReceiveBufferSizePredictorFactory receiveBufferSizePredictorFactory;
protected final int workerCount;
protected final ByteSizeValue receivePredictorMin;
protected final ByteSizeValue receivePredictorMax;
protected final int connectionsPerNodeRecovery;
protected final int connectionsPerNodeBulk;
protected final int connectionsPerNodeReg;
protected final int connectionsPerNodeState;
protected final int connectionsPerNodePing;
private final TimeValue pingSchedule;
protected final BigArrays bigArrays;
protected final ThreadPool threadPool;
protected volatile OpenChannelsHandler serverOpenChannels;
protected volatile ClientBootstrap clientBootstrap;
// node id to actual channel
protected final ConcurrentMap<DiscoveryNode, NodeChannels> connectedNodes = newConcurrentMap();
protected final Map<String, ServerBootstrap> serverBootstraps = newConcurrentMap();
protected final Map<String, List<Channel>> serverChannels = newConcurrentMap();
protected final ConcurrentMap<String, BoundTransportAddress> profileBoundAddresses = newConcurrentMap();
protected volatile TransportServiceAdapter transportServiceAdapter;
protected volatile BoundTransportAddress boundAddress;
protected final KeyedLock<String> connectionLock = new KeyedLock<>();
protected final NamedWriteableRegistry namedWriteableRegistry;
// this lock is here to make sure we close this transport and disconnect all the client nodes
// connections while no connect operations is going on... (this might help with 100% CPU when stopping the transport?)
private final ReadWriteLock globalLock = new ReentrantReadWriteLock();
// package visibility for tests
final ScheduledPing scheduledPing;
@Inject
public NettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, Version version, NamedWriteableRegistry namedWriteableRegistry) {
super(settings);
this.threadPool = threadPool;
this.networkService = networkService;
this.bigArrays = bigArrays;
this.version = version;
if (settings.getAsBoolean("netty.epollBugWorkaround", false)) {
System.setProperty("org.jboss.netty.epollBugWorkaround", "true");
}
this.workerCount = settings.getAsInt(WORKER_COUNT, EsExecutors.boundedNumberOfProcessors(settings) * 2);
this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", settings.getAsBoolean(TCP_BLOCKING_CLIENT, settings.getAsBoolean(TCP_BLOCKING, false)));
this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT)));
this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null);
this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1);
this.compress = settings.getAsBoolean(TransportSettings.TRANSPORT_TCP_COMPRESS, false);
this.connectionsPerNodeRecovery = this.settings.getAsInt("transport.netty.connections_per_node.recovery", settings.getAsInt(CONNECTIONS_PER_NODE_RECOVERY, 2));
this.connectionsPerNodeBulk = this.settings.getAsInt("transport.netty.connections_per_node.bulk", settings.getAsInt(CONNECTIONS_PER_NODE_BULK, 3));
this.connectionsPerNodeReg = this.settings.getAsInt("transport.netty.connections_per_node.reg", settings.getAsInt(CONNECTIONS_PER_NODE_REG, 6));
this.connectionsPerNodeState = this.settings.getAsInt("transport.netty.connections_per_node.high", settings.getAsInt(CONNECTIONS_PER_NODE_STATE, 1));
this.connectionsPerNodePing = this.settings.getAsInt("transport.netty.connections_per_node.ping", settings.getAsInt(CONNECTIONS_PER_NODE_PING, 1));
// we want to have at least 1 for reg/state/ping
if (this.connectionsPerNodeReg == 0) {
throw new IllegalArgumentException("can't set [connection_per_node.reg] to 0");
}
if (this.connectionsPerNodePing == 0) {
throw new IllegalArgumentException("can't set [connection_per_node.ping] to 0");
}
if (this.connectionsPerNodeState == 0) {
throw new IllegalArgumentException("can't set [connection_per_node.state] to 0");
}
long defaultReceiverPredictor = 512 * 1024;
if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) {
// we can guess a better default...
long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / workerCount);
defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024));
}
// See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one
this.receivePredictorMin = this.settings.getAsBytesSize("transport.netty.receive_predictor_min", this.settings.getAsBytesSize("transport.netty.receive_predictor_size", new ByteSizeValue(defaultReceiverPredictor)));
this.receivePredictorMax = this.settings.getAsBytesSize("transport.netty.receive_predictor_max", this.settings.getAsBytesSize("transport.netty.receive_predictor_size", new ByteSizeValue(defaultReceiverPredictor)));
if (receivePredictorMax.bytes() == receivePredictorMin.bytes()) {
receiveBufferSizePredictorFactory = new FixedReceiveBufferSizePredictorFactory((int) receivePredictorMax.bytes());
} else {
receiveBufferSizePredictorFactory = new AdaptiveReceiveBufferSizePredictorFactory((int) receivePredictorMin.bytes(), (int) receivePredictorMin.bytes(), (int) receivePredictorMax.bytes());
}
this.scheduledPing = new ScheduledPing();
this.pingSchedule = settings.getAsTime(PING_SCHEDULE, DEFAULT_PING_SCHEDULE);
if (pingSchedule.millis() > 0) {
threadPool.schedule(pingSchedule, ThreadPool.Names.GENERIC, scheduledPing);
}
this.namedWriteableRegistry = namedWriteableRegistry;
}
public Settings settings() {
return this.settings;
}
@Override
public void transportServiceAdapter(TransportServiceAdapter service) {
this.transportServiceAdapter = service;
}
TransportServiceAdapter transportServiceAdapter() {
return transportServiceAdapter;
}
ThreadPool threadPool() {
return threadPool;
}
@Override
protected void doStart() {
boolean success = false;
try {
clientBootstrap = createClientBootstrap();
if (settings.getAsBoolean("network.server", true)) {
final OpenChannelsHandler openChannels = new OpenChannelsHandler(logger);
this.serverOpenChannels = openChannels;
// extract default profile first and create standard bootstrap
Map<String, Settings> profiles = settings.getGroups("transport.profiles", true);
if (!profiles.containsKey(DEFAULT_PROFILE)) {
profiles = Maps.newHashMap(profiles);
profiles.put(DEFAULT_PROFILE, Settings.EMPTY);
}
Settings fallbackSettings = createFallbackSettings();
Settings defaultSettings = profiles.get(DEFAULT_PROFILE);
// loop through all profiles and start them up, special handling for default one
for (Map.Entry<String, Settings> entry : profiles.entrySet()) {
Settings profileSettings = entry.getValue();
String name = entry.getKey();
if (!Strings.hasLength(name)) {
logger.info("transport profile configured without a name. skipping profile with settings [{}]", profileSettings.toDelimitedString(','));
continue;
} else if (DEFAULT_PROFILE.equals(name)) {
profileSettings = settingsBuilder()
.put(profileSettings)
.put("port", profileSettings.get("port", this.settings.get("transport.tcp.port", DEFAULT_PORT_RANGE)))
.build();
} else if (profileSettings.get("port") == null) {
// if profile does not have a port, skip it
logger.info("No port configured for profile [{}], not binding", name);
continue;
}
// merge fallback settings with default settings with profile settings so we have complete settings with default values
Settings mergedSettings = settingsBuilder()
.put(fallbackSettings)
.put(defaultSettings)
.put(profileSettings)
.build();
createServerBootstrap(name, mergedSettings);
bindServerBootstrap(name, mergedSettings);
}
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(DEFAULT_PROFILE).get(0).getLocalAddress();
int publishPort = settings.getAsInt("transport.netty.publish_port", settings.getAsInt("transport.publish_port", boundAddress.getPort()));
String publishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
this.boundAddress = new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress));
}
success = true;
} finally {
if (success == false) {
doStop();
}
}
}
@Override
public Map<String, BoundTransportAddress> profileBoundAddresses() {
return ImmutableMap.copyOf(profileBoundAddresses);
}
private InetSocketAddress createPublishAddress(String publishHost, int publishPort) {
try {
return new InetSocketAddress(networkService.resolvePublishHostAddress(publishHost), publishPort);
} catch (Exception e) {
throw new BindTransportException("Failed to resolve publish address", e);
}
}
private ClientBootstrap createClientBootstrap() {
if (blockingClient) {
clientBootstrap = new ClientBootstrap(new OioClientSocketChannelFactory(Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX))));
} else {
int bossCount = settings.getAsInt("transport.netty.boss_count", 1);
clientBootstrap = new ClientBootstrap(new NioClientSocketChannelFactory(
Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX)),
bossCount,
new NioWorkerPool(Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX)), workerCount),
new HashedWheelTimer(daemonThreadFactory(settings, "transport_client_timer"))));
}
clientBootstrap.setPipelineFactory(configureClientChannelPipelineFactory());
clientBootstrap.setOption("connectTimeoutMillis", connectTimeout.millis());
String tcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
if (!"default".equals(tcpNoDelay)) {
clientBootstrap.setOption("tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null));
}
String tcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
if (!"default".equals(tcpKeepAlive)) {
clientBootstrap.setOption("keepAlive", Booleans.parseBoolean(tcpKeepAlive, null));
}
ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) {
clientBootstrap.setOption("sendBufferSize", tcpSendBufferSize.bytes());
}
ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) {
clientBootstrap.setOption("receiveBufferSize", tcpReceiveBufferSize.bytes());
}
clientBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
clientBootstrap.setOption("reuseAddress", reuseAddress);
return clientBootstrap;
}
private Settings createFallbackSettings() {
Settings.Builder fallbackSettingsBuilder = settingsBuilder();
String fallbackBindHost = settings.get("transport.netty.bind_host", settings.get("transport.bind_host", settings.get("transport.host")));
if (fallbackBindHost != null) {
fallbackSettingsBuilder.put("bind_host", fallbackBindHost);
}
String fallbackPublishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
if (fallbackPublishHost != null) {
fallbackSettingsBuilder.put("publish_host", fallbackPublishHost);
}
String fallbackTcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
if (fallbackTcpNoDelay != null) {
fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay);
}
String fallbackTcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
if (fallbackTcpKeepAlive != null) {
fallbackSettingsBuilder.put("tcp_keep_alive", fallbackTcpKeepAlive);
}
boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
fallbackSettingsBuilder.put("reuse_address", fallbackReuseAddress);
ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
if (fallbackTcpSendBufferSize != null) {
fallbackSettingsBuilder.put("tcp_send_buffer_size", fallbackTcpSendBufferSize);
}
ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
if (fallbackTcpBufferSize != null) {
fallbackSettingsBuilder.put("tcp_receive_buffer_size", fallbackTcpBufferSize);
}
return fallbackSettingsBuilder.build();
}
private void bindServerBootstrap(final String name, final Settings settings) {
// Bind and start to accept incoming connections.
InetAddress hostAddresses[];
String bindHost = settings.get("bind_host");
try {
hostAddresses = networkService.resolveBindHostAddress(bindHost);
} catch (IOException e) {
throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e);
}
if (logger.isDebugEnabled()) {
String[] addresses = new String[hostAddresses.length];
for (int i = 0; i < hostAddresses.length; i++) {
addresses[i] = NetworkAddress.format(hostAddresses[i]);
}
logger.debug("binding server bootstrap to: {}", addresses);
}
for (InetAddress hostAddress : hostAddresses) {
bindServerBootstrap(name, hostAddress, settings);
}
}
private void bindServerBootstrap(final String name, final InetAddress hostAddress, Settings settings) {
String port = settings.get("port");
PortsRange portsRange = new PortsRange(port);
final AtomicReference<Exception> lastException = new AtomicReference<>();
final AtomicReference<InetSocketAddress> boundSocket = new AtomicReference<>();
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
@Override
public boolean onPortNumber(int portNumber) {
try {
Channel channel = serverBootstraps.get(name).bind(new InetSocketAddress(hostAddress, portNumber));
synchronized (serverChannels) {
List<Channel> list = serverChannels.get(name);
if (list == null) {
list = new ArrayList<>();
serverChannels.put(name, list);
}
list.add(channel);
boundSocket.set((InetSocketAddress)channel.getLocalAddress());
}
} catch (Exception e) {
lastException.set(e);
return false;
}
return true;
}
});
if (!success) {
throw new BindTransportException("Failed to bind to [" + port + "]", lastException.get());
}
if (!DEFAULT_PROFILE.equals(name)) {
InetSocketAddress boundAddress = boundSocket.get();
int publishPort = settings.getAsInt("publish_port", boundAddress.getPort());
String publishHost = settings.get("publish_host", boundAddress.getHostString());
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
// TODO: support real multihoming with publishing. Today we use putIfAbsent so only the prioritized address is published
profileBoundAddresses.putIfAbsent(name, new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress)));
}
logger.info("Bound profile [{}] to address {{}}", name, NetworkAddress.format(boundSocket.get()));
}
private void createServerBootstrap(String name, Settings settings) {
boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", this.settings.getAsBoolean(TCP_BLOCKING_SERVER, this.settings.getAsBoolean(TCP_BLOCKING, false)));
String port = settings.get("port");
String bindHost = settings.get("bind_host");
String publishHost = settings.get("publish_host");
String tcpNoDelay = settings.get("tcp_no_delay");
String tcpKeepAlive = settings.get("tcp_keep_alive");
boolean reuseAddress = settings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress());
ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_DEFAULT_SEND_BUFFER_SIZE);
ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_DEFAULT_RECEIVE_BUFFER_SIZE);
logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]",
name, workerCount, port, bindHost, publishHost, compress, connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, connectionsPerNodePing, receivePredictorMin, receivePredictorMax);
final ThreadFactory bossFactory = daemonThreadFactory(this.settings, HTTP_SERVER_BOSS_THREAD_NAME_PREFIX, name);
final ThreadFactory workerFactory = daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX, name);
ServerBootstrap serverBootstrap;
if (blockingServer) {
serverBootstrap = new ServerBootstrap(new OioServerSocketChannelFactory(
Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory)
));
} else {
serverBootstrap = new ServerBootstrap(new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory),
workerCount));
}
serverBootstrap.setPipelineFactory(configureServerChannelPipelineFactory(name, settings));
if (!"default".equals(tcpNoDelay)) {
serverBootstrap.setOption("child.tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null));
}
if (!"default".equals(tcpKeepAlive)) {
serverBootstrap.setOption("child.keepAlive", Booleans.parseBoolean(tcpKeepAlive, null));
}
if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) {
serverBootstrap.setOption("child.sendBufferSize", tcpSendBufferSize.bytes());
}
if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) {
serverBootstrap.setOption("child.receiveBufferSize", tcpReceiveBufferSize.bytes());
}
serverBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
serverBootstrap.setOption("child.receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
serverBootstrap.setOption("reuseAddress", reuseAddress);
serverBootstrap.setOption("child.reuseAddress", reuseAddress);
serverBootstraps.put(name, serverBootstrap);
}
@Override
protected void doStop() {
final CountDownLatch latch = new CountDownLatch(1);
// make sure we run it on another thread than a possible IO handler thread
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
globalLock.writeLock().lock();
try {
for (Iterator<NodeChannels> it = connectedNodes.values().iterator(); it.hasNext(); ) {
NodeChannels nodeChannels = it.next();
it.remove();
nodeChannels.close();
}
Iterator<Map.Entry<String, List<Channel>>> serverChannelIterator = serverChannels.entrySet().iterator();
while (serverChannelIterator.hasNext()) {
Map.Entry<String, List<Channel>> serverChannelEntry = serverChannelIterator.next();
String name = serverChannelEntry.getKey();
List<Channel> serverChannels = serverChannelEntry.getValue();
for (Channel serverChannel : serverChannels) {
try {
serverChannel.close().awaitUninterruptibly();
} catch (Throwable t) {
logger.debug("Error closing serverChannel for profile [{}]", t, name);
}
}
serverChannelIterator.remove();
}
if (serverOpenChannels != null) {
serverOpenChannels.close();
serverOpenChannels = null;
}
Iterator<Map.Entry<String, ServerBootstrap>> serverBootstrapIterator = serverBootstraps.entrySet().iterator();
while (serverBootstrapIterator.hasNext()) {
Map.Entry<String, ServerBootstrap> serverBootstrapEntry = serverBootstrapIterator.next();
String name = serverBootstrapEntry.getKey();
ServerBootstrap serverBootstrap = serverBootstrapEntry.getValue();
try {
serverBootstrap.releaseExternalResources();
} catch (Throwable t) {
logger.debug("Error closing serverBootstrap for profile [{}]", t, name);
}
serverBootstrapIterator.remove();
}
for (Iterator<NodeChannels> it = connectedNodes.values().iterator(); it.hasNext(); ) {
NodeChannels nodeChannels = it.next();
it.remove();
nodeChannels.close();
}
if (clientBootstrap != null) {
clientBootstrap.releaseExternalResources();
clientBootstrap = null;
}
} finally {
globalLock.writeLock().unlock();
latch.countDown();
}
}
});
try {
latch.await(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// ignore
}
}
@Override
protected void doClose() {
}
@Override
public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws Exception {
return parse(address, settings.get("transport.profiles.default.port",
settings.get("transport.netty.port",
settings.get("transport.tcp.port",
DEFAULT_PORT_RANGE))), perAddressLimit);
}
// this code is a take on guava's HostAndPort, like a HostAndPortRange
// pattern for validating ipv6 bracked addresses.
// not perfect, but PortsRange should take care of any port range validation, not a regex
private static final Pattern BRACKET_PATTERN = Pattern.compile("^\\[(.*:.*)\\](?::([\\d\\-]*))?$");
/** parse a hostname+port range spec into its equivalent addresses */
static TransportAddress[] parse(String hostPortString, String defaultPortRange, int perAddressLimit) throws UnknownHostException {
Objects.requireNonNull(hostPortString);
String host;
String portString = null;
if (hostPortString.startsWith("[")) {
// Parse a bracketed host, typically an IPv6 literal.
Matcher matcher = BRACKET_PATTERN.matcher(hostPortString);
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid bracketed host/port range: " + hostPortString);
}
host = matcher.group(1);
portString = matcher.group(2); // could be null
} else {
int colonPos = hostPortString.indexOf(':');
if (colonPos >= 0 && hostPortString.indexOf(':', colonPos + 1) == -1) {
// Exactly 1 colon. Split into host:port.
host = hostPortString.substring(0, colonPos);
portString = hostPortString.substring(colonPos + 1);
} else {
// 0 or 2+ colons. Bare hostname or IPv6 literal.
host = hostPortString;
// 2+ colons and not bracketed: exception
if (colonPos >= 0) {
throw new IllegalArgumentException("IPv6 addresses must be bracketed: " + hostPortString);
}
}
}
// if port isn't specified, fill with the default
if (portString == null || portString.isEmpty()) {
portString = defaultPortRange;
}
// generate address for each port in the range
Set<InetAddress> addresses = new HashSet<>(Arrays.asList(InetAddress.getAllByName(host)));
List<TransportAddress> transportAddresses = new ArrayList<>();
int[] ports = new PortsRange(portString).ports();
int limit = Math.min(ports.length, perAddressLimit);
for (int i = 0; i < limit; i++) {
for (InetAddress address : addresses) {
transportAddresses.add(new InetSocketTransportAddress(address, ports[i]));
}
}
return transportAddresses.toArray(new TransportAddress[transportAddresses.size()]);
}
@Override
public boolean addressSupported(Class<? extends TransportAddress> address) {
return InetSocketTransportAddress.class.equals(address);
}
@Override
public BoundTransportAddress boundAddress() {
return this.boundAddress;
}
protected void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
if (!lifecycle.started()) {
// ignore
return;
}
if (isCloseConnectionException(e.getCause())) {
logger.trace("close connection exception caught on transport layer [{}], disconnecting from relevant node", e.getCause(), ctx.getChannel());
// close the channel, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
} else if (isConnectException(e.getCause())) {
logger.trace("connect exception caught on transport layer [{}]", e.getCause(), ctx.getChannel());
// close the channel as safe measure, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
} else if (e.getCause() instanceof CancelledKeyException) {
logger.trace("cancelled key exception caught on transport layer [{}], disconnecting from relevant node", e.getCause(), ctx.getChannel());
// close the channel as safe measure, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
} else if (e.getCause() instanceof SizeHeaderFrameDecoder.HttpOnTransportException) {
// in case we are able to return data, serialize the exception content and sent it back to the client
if (ctx.getChannel().isOpen()) {
ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(e.getCause().getMessage().getBytes(Charsets.UTF_8));
ChannelFuture channelFuture = ctx.getChannel().write(buffer);
channelFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
future.getChannel().close();
}
});
}
} else {
logger.warn("exception caught on transport layer [{}], closing connection", e.getCause(), ctx.getChannel());
// close the channel, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
}
}
TransportAddress wrapAddress(SocketAddress socketAddress) {
return new InetSocketTransportAddress((InetSocketAddress) socketAddress);
}
@Override
public long serverOpen() {
OpenChannelsHandler channels = serverOpenChannels;
return channels == null ? 0 : channels.numberOfOpenChannels();
}
@Override
public List<String> getLocalAddresses() {
List<String> local = new ArrayList<>();
local.add("127.0.0.1");
// check if v6 is supported, if so, v4 will also work via mapped addresses.
if (NetworkUtils.SUPPORTS_V6) {
local.add("[::1]"); // may get ports appended!
}
return local;
}
@Override
public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
Channel targetChannel = nodeChannel(node, options);
if (compress) {
options.withCompress(true);
}
byte status = 0;
status = TransportStatus.setRequest(status);
ReleasableBytesStreamOutput bStream = new ReleasableBytesStreamOutput(bigArrays);
boolean addedReleaseListener = false;
try {
bStream.skip(NettyHeader.HEADER_SIZE);
StreamOutput stream = bStream;
// only compress if asked, and, the request is not bytes, since then only
// the header part is compressed, and the "body" can't be extracted as compressed
if (options.compress() && (!(request instanceof BytesTransportRequest))) {
status = TransportStatus.setCompress(status);
stream = CompressorFactory.defaultCompressor().streamOutput(stream);
}
// we pick the smallest of the 2, to support both backward and forward compatibility
// note, this is the only place we need to do this, since from here on, we use the serialized version
// as the version to use also when the node receiving this request will send the response with
Version version = Version.smallest(this.version, node.version());
stream.setVersion(version);
stream.writeString(action);
ReleasablePagedBytesReference bytes;
ChannelBuffer buffer;
// it might be nice to somehow generalize this optimization, maybe a smart "paged" bytes output
// that create paged channel buffers, but its tricky to know when to do it (where this option is
// more explicit).
if (request instanceof BytesTransportRequest) {
BytesTransportRequest bRequest = (BytesTransportRequest) request;
assert node.version().equals(bRequest.version());
bRequest.writeThin(stream);
stream.close();
bytes = bStream.bytes();
ChannelBuffer headerBuffer = bytes.toChannelBuffer();
ChannelBuffer contentBuffer = bRequest.bytes().toChannelBuffer();
buffer = ChannelBuffers.wrappedBuffer(NettyUtils.DEFAULT_GATHERING, headerBuffer, contentBuffer);
} else {
request.writeTo(stream);
stream.close();
bytes = bStream.bytes();
buffer = bytes.toChannelBuffer();
}
NettyHeader.writeHeader(buffer, requestId, status, version);
ChannelFuture future = targetChannel.write(buffer);
ReleaseChannelFutureListener listener = new ReleaseChannelFutureListener(bytes);
future.addListener(listener);
addedReleaseListener = true;
transportServiceAdapter.onRequestSent(node, requestId, action, request, options);
} finally {
if (!addedReleaseListener) {
Releasables.close(bStream.bytes());
}
}
}
@Override
public boolean nodeConnected(DiscoveryNode node) {
return connectedNodes.containsKey(node);
}
@Override
public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException {
connectToNode(node, true);
}
@Override
public void connectToNode(DiscoveryNode node) {
connectToNode(node, false);
}
public void connectToNode(DiscoveryNode node, boolean light) {
if (!lifecycle.started()) {
throw new IllegalStateException("can't add nodes to a stopped transport");
}
if (node == null) {
throw new ConnectTransportException(null, "can't connect to a null node");
}
globalLock.readLock().lock();
try {
connectionLock.acquire(node.id());
try {
if (!lifecycle.started()) {
throw new IllegalStateException("can't add nodes to a stopped transport");
}
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null) {
return;
}
try {
if (light) {
nodeChannels = connectToChannelsLight(node);
} else {
nodeChannels = new NodeChannels(new Channel[connectionsPerNodeRecovery], new Channel[connectionsPerNodeBulk], new Channel[connectionsPerNodeReg], new Channel[connectionsPerNodeState], new Channel[connectionsPerNodePing]);
try {
connectToChannels(nodeChannels, node);
} catch (Throwable e) {
logger.trace("failed to connect to [{}], cleaning dangling connections", e, node);
nodeChannels.close();
throw e;
}
}
// we acquire a connection lock, so no way there is an existing connection
nodeChannels.start();
connectedNodes.put(node, nodeChannels);
if (logger.isDebugEnabled()) {
logger.debug("connected to node [{}]", node);
}
transportServiceAdapter.raiseNodeConnected(node);
} catch (ConnectTransportException e) {
throw e;
} catch (Exception e) {
throw new ConnectTransportException(node, "general node connection failure", e);
}
} finally {
connectionLock.release(node.id());
}
} finally {
globalLock.readLock().unlock();
}
}
protected NodeChannels connectToChannelsLight(DiscoveryNode node) {
InetSocketAddress address = ((InetSocketTransportAddress) node.address()).address();
ChannelFuture connect = clientBootstrap.connect(address);
connect.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connect.isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connect.getCause());
}
Channel[] channels = new Channel[1];
channels[0] = connect.getChannel();
channels[0].getCloseFuture().addListener(new ChannelCloseListener(node));
return new NodeChannels(channels, channels, channels, channels, channels);
}
protected void connectToChannels(NodeChannels nodeChannels, DiscoveryNode node) {
ChannelFuture[] connectRecovery = new ChannelFuture[nodeChannels.recovery.length];
ChannelFuture[] connectBulk = new ChannelFuture[nodeChannels.bulk.length];
ChannelFuture[] connectReg = new ChannelFuture[nodeChannels.reg.length];
ChannelFuture[] connectState = new ChannelFuture[nodeChannels.state.length];
ChannelFuture[] connectPing = new ChannelFuture[nodeChannels.ping.length];
InetSocketAddress address = ((InetSocketTransportAddress) node.address()).address();
for (int i = 0; i < connectRecovery.length; i++) {
connectRecovery[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectBulk.length; i++) {
connectBulk[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectReg.length; i++) {
connectReg[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectState.length; i++) {
connectState[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectPing.length; i++) {
connectPing[i] = clientBootstrap.connect(address);
}
try {
for (int i = 0; i < connectRecovery.length; i++) {
connectRecovery[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectRecovery[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectRecovery[i].getCause());
}
nodeChannels.recovery[i] = connectRecovery[i].getChannel();
nodeChannels.recovery[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectBulk.length; i++) {
connectBulk[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectBulk[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectBulk[i].getCause());
}
nodeChannels.bulk[i] = connectBulk[i].getChannel();
nodeChannels.bulk[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectReg.length; i++) {
connectReg[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectReg[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectReg[i].getCause());
}
nodeChannels.reg[i] = connectReg[i].getChannel();
nodeChannels.reg[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectState.length; i++) {
connectState[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectState[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectState[i].getCause());
}
nodeChannels.state[i] = connectState[i].getChannel();
nodeChannels.state[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectPing.length; i++) {
connectPing[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectPing[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectPing[i].getCause());
}
nodeChannels.ping[i] = connectPing[i].getChannel();
nodeChannels.ping[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
if (nodeChannels.recovery.length == 0) {
if (nodeChannels.bulk.length > 0) {
nodeChannels.recovery = nodeChannels.bulk;
} else {
nodeChannels.recovery = nodeChannels.reg;
}
}
if (nodeChannels.bulk.length == 0) {
nodeChannels.bulk = nodeChannels.reg;
}
} catch (RuntimeException e) {
// clean the futures
List<ChannelFuture> futures = new ArrayList<>();
futures.addAll(Arrays.asList(connectRecovery));
futures.addAll(Arrays.asList(connectBulk));
futures.addAll(Arrays.asList(connectReg));
futures.addAll(Arrays.asList(connectState));
futures.addAll(Arrays.asList(connectPing));
for (ChannelFuture future : Collections.unmodifiableList(futures)) {
future.cancel();
if (future.getChannel() != null && future.getChannel().isOpen()) {
try {
future.getChannel().close();
} catch (Exception e1) {
// ignore
}
}
}
throw e;
}
}
@Override
public void disconnectFromNode(DiscoveryNode node) {
connectionLock.acquire(node.id());
try {
NodeChannels nodeChannels = connectedNodes.remove(node);
if (nodeChannels != null) {
try {
logger.debug("disconnecting from [{}] due to explicit disconnect call", node);
nodeChannels.close();
} finally {
logger.trace("disconnected from [{}] due to explicit disconnect call", node);
transportServiceAdapter.raiseNodeDisconnected(node);
}
}
} finally {
connectionLock.release(node.id());
}
}
/**
* Disconnects from a node, only if the relevant channel is found to be part of the node channels.
*/
protected boolean disconnectFromNode(DiscoveryNode node, Channel channel, String reason) {
// this might be called multiple times from all the node channels, so do a lightweight
// check outside of the lock
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null && nodeChannels.hasChannel(channel)) {
connectionLock.acquire(node.id());
try {
nodeChannels = connectedNodes.get(node);
// check again within the connection lock, if its still applicable to remove it
if (nodeChannels != null && nodeChannels.hasChannel(channel)) {
connectedNodes.remove(node);
try {
logger.debug("disconnecting from [{}], {}", node, reason);
nodeChannels.close();
} finally {
logger.trace("disconnected from [{}], {}", node, reason);
transportServiceAdapter.raiseNodeDisconnected(node);
}
return true;
}
} finally {
connectionLock.release(node.id());
}
}
return false;
}
/**
* Disconnects from a node if a channel is found as part of that nodes channels.
*/
protected void disconnectFromNodeChannel(final Channel channel, final Throwable failure) {
threadPool().generic().execute(new Runnable() {
@Override
public void run() {
for (DiscoveryNode node : connectedNodes.keySet()) {
if (disconnectFromNode(node, channel, ExceptionsHelper.detailedMessage(failure))) {
// if we managed to find this channel and disconnect from it, then break, no need to check on
// the rest of the nodes
break;
}
}
}
});
}
protected Channel nodeChannel(DiscoveryNode node, TransportRequestOptions options) throws ConnectTransportException {
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels == null) {
throw new NodeNotConnectedException(node, "Node not connected");
}
return nodeChannels.channel(options.type());
}
public ChannelPipelineFactory configureClientChannelPipelineFactory() {
return new ClientChannelPipelineFactory(this);
}
protected static class ClientChannelPipelineFactory implements ChannelPipelineFactory {
protected final NettyTransport nettyTransport;
public ClientChannelPipelineFactory(NettyTransport nettyTransport) {
this.nettyTransport = nettyTransport;
}
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline channelPipeline = Channels.pipeline();
SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder();
if (nettyTransport.maxCumulationBufferCapacity != null) {
if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) {
sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE);
} else {
sizeHeader.setMaxCumulationBufferCapacity((int) nettyTransport.maxCumulationBufferCapacity.bytes());
}
}
if (nettyTransport.maxCompositeBufferComponents != -1) {
sizeHeader.setMaxCumulationBufferComponents(nettyTransport.maxCompositeBufferComponents);
}
channelPipeline.addLast("size", sizeHeader);
// using a dot as a prefix means, this cannot come from any settings parsed
channelPipeline.addLast("dispatcher", new MessageChannelHandler(nettyTransport, nettyTransport.logger, ".client"));
return channelPipeline;
}
}
public ChannelPipelineFactory configureServerChannelPipelineFactory(String name, Settings settings) {
return new ServerChannelPipelineFactory(this, name, settings);
}
protected static class ServerChannelPipelineFactory implements ChannelPipelineFactory {
protected final NettyTransport nettyTransport;
protected final String name;
protected final Settings settings;
public ServerChannelPipelineFactory(NettyTransport nettyTransport, String name, Settings settings) {
this.nettyTransport = nettyTransport;
this.name = name;
this.settings = settings;
}
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline channelPipeline = Channels.pipeline();
channelPipeline.addLast("openChannels", nettyTransport.serverOpenChannels);
SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder();
if (nettyTransport.maxCumulationBufferCapacity != null) {
if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) {
sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE);
} else {
sizeHeader.setMaxCumulationBufferCapacity((int) nettyTransport.maxCumulationBufferCapacity.bytes());
}
}
if (nettyTransport.maxCompositeBufferComponents != -1) {
sizeHeader.setMaxCumulationBufferComponents(nettyTransport.maxCompositeBufferComponents);
}
channelPipeline.addLast("size", sizeHeader);
channelPipeline.addLast("dispatcher", new MessageChannelHandler(nettyTransport, nettyTransport.logger, name));
return channelPipeline;
}
}
protected class ChannelCloseListener implements ChannelFutureListener {
private final DiscoveryNode node;
private ChannelCloseListener(DiscoveryNode node) {
this.node = node;
}
@Override
public void operationComplete(final ChannelFuture future) throws Exception {
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null && nodeChannels.hasChannel(future.getChannel())) {
threadPool().generic().execute(new Runnable() {
@Override
public void run() {
disconnectFromNode(node, future.getChannel(), "channel closed event");
}
});
}
}
}
public static class NodeChannels {
List<Channel> allChannels = Collections.emptyList();
private Channel[] recovery;
private final AtomicInteger recoveryCounter = new AtomicInteger();
private Channel[] bulk;
private final AtomicInteger bulkCounter = new AtomicInteger();
private Channel[] reg;
private final AtomicInteger regCounter = new AtomicInteger();
private Channel[] state;
private final AtomicInteger stateCounter = new AtomicInteger();
private Channel[] ping;
private final AtomicInteger pingCounter = new AtomicInteger();
public NodeChannels(Channel[] recovery, Channel[] bulk, Channel[] reg, Channel[] state, Channel[] ping) {
this.recovery = recovery;
this.bulk = bulk;
this.reg = reg;
this.state = state;
this.ping = ping;
}
public void start() {
List<Channel> newAllChannels = new ArrayList<>();
newAllChannels.addAll(Arrays.asList(recovery));
newAllChannels.addAll(Arrays.asList(bulk));
newAllChannels.addAll(Arrays.asList(reg));
newAllChannels.addAll(Arrays.asList(state));
newAllChannels.addAll(Arrays.asList(ping));
this.allChannels = Collections.unmodifiableList(newAllChannels);
}
public boolean hasChannel(Channel channel) {
for (Channel channel1 : allChannels) {
if (channel.equals(channel1)) {
return true;
}
}
return false;
}
public Channel channel(TransportRequestOptions.Type type) {
if (type == TransportRequestOptions.Type.REG) {
return reg[MathUtils.mod(regCounter.incrementAndGet(), reg.length)];
} else if (type == TransportRequestOptions.Type.STATE) {
return state[MathUtils.mod(stateCounter.incrementAndGet(), state.length)];
} else if (type == TransportRequestOptions.Type.PING) {
return ping[MathUtils.mod(pingCounter.incrementAndGet(), ping.length)];
} else if (type == TransportRequestOptions.Type.BULK) {
return bulk[MathUtils.mod(bulkCounter.incrementAndGet(), bulk.length)];
} else if (type == TransportRequestOptions.Type.RECOVERY) {
return recovery[MathUtils.mod(recoveryCounter.incrementAndGet(), recovery.length)];
} else {
throw new IllegalArgumentException("no type channel for [" + type + "]");
}
}
public synchronized void close() {
List<ChannelFuture> futures = new ArrayList<>();
for (Channel channel : allChannels) {
try {
if (channel != null && channel.isOpen()) {
futures.add(channel.close());
}
} catch (Exception e) {
//ignore
}
}
for (ChannelFuture future : futures) {
future.awaitUninterruptibly();
}
}
}
class ScheduledPing extends AbstractRunnable {
final CounterMetric successfulPings = new CounterMetric();
final CounterMetric failedPings = new CounterMetric();
@Override
protected void doRun() throws Exception {
if (lifecycle.stoppedOrClosed()) {
return;
}
for (Map.Entry<DiscoveryNode, NodeChannels> entry : connectedNodes.entrySet()) {
DiscoveryNode node = entry.getKey();
NodeChannels channels = entry.getValue();
for (Channel channel : channels.allChannels) {
try {
ChannelFuture future = channel.write(NettyHeader.pingHeader());
future.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
successfulPings.inc();
}
});
} catch (Throwable t) {
if (channel.isOpen()) {
logger.debug("[{}] failed to send ping transport message", t, node);
failedPings.inc();
} else {
logger.trace("[{}] failed to send ping transport message (channel closed)", t, node);
}
}
}
}
threadPool.schedule(pingSchedule, ThreadPool.Names.GENERIC, this);
}
@Override
public void onFailure(Throwable t) {
if (lifecycle.stoppedOrClosed()) {
logger.trace("[{}] failed to send ping transport message", t);
} else {
logger.warn("[{}] failed to send ping transport message", t);
}
}
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can
* obtain a copy of the License at
* https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
* or packager/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at packager/legal/LICENSE.txt.
*
* GPL Classpath Exception:
* Oracle designates this particular file as subject to the "Classpath"
* exception as provided by Oracle in the GPL Version 2 section of the License
* file that accompanied this code.
*
* Modifications:
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
* "Portions Copyright [year] [name of copyright owner]"
*
* Contributor(s):
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package com.sun.org.apache.xerces.internal.utils;
import com.sun.org.apache.xerces.internal.impl.Constants;
import com.sun.org.apache.xerces.internal.utils.XMLSecurityManager.Limit;
import java.util.Formatter;
import java.util.HashMap;
import java.util.Map;
/**
* A helper for analyzing entity expansion limits
*
* @author Joe Wang Oracle Corp.
*
*/
public final class XMLLimitAnalyzer {
/**
* Map old property names with the new ones
*/
public static enum NameMap {
ENTITY_EXPANSION_LIMIT(Constants.SP_ENTITY_EXPANSION_LIMIT, Constants.ENTITY_EXPANSION_LIMIT),
MAX_OCCUR_NODE_LIMIT(Constants.SP_MAX_OCCUR_LIMIT, Constants.MAX_OCCUR_LIMIT),
ELEMENT_ATTRIBUTE_LIMIT(Constants.SP_ELEMENT_ATTRIBUTE_LIMIT, Constants.ELEMENT_ATTRIBUTE_LIMIT);
final String newName;
final String oldName;
NameMap(String newName, String oldName) {
this.newName = newName;
this.oldName = oldName;
}
String getOldName(String newName) {
if (newName.equals(this.newName)) {
return oldName;
}
return null;
}
}
/**
* Max value accumulated for each property
*/
private final int[] values;
/**
* Names of the entities corresponding to their max values
*/
private final String[] names;
/**
* Total value of accumulated entities
*/
private final int[] totalValue;
/**
* Maintain values of the top 10 elements in the process of parsing
*/
private final Map[] caches;
private String entityStart, entityEnd;
/**
* Default constructor. Establishes default values for known security
* vulnerabilities.
*/
public XMLLimitAnalyzer() {
values = new int[Limit.values().length];
totalValue = new int[Limit.values().length];
names = new String[Limit.values().length];
caches = new Map[Limit.values().length];
}
/**
* Add the value to the current max count for the specified property
* To find the max value of all entities, set no limit
*
* @param limit the type of the property
* @param entityName the name of the entity
* @param value the value of the entity
*/
public void addValue(Limit limit, String entityName, int value) {
addValue(limit.ordinal(), entityName, value);
}
/**
* Add the value to the current count by the index of the property
* @param index the index of the property
* @param entityName the name of the entity
* @param value the value of the entity
*/
public void addValue(int index, String entityName, int value) {
if (index == Limit.ENTITY_EXPANSION_LIMIT.ordinal() ||
index == Limit.MAX_OCCUR_NODE_LIMIT.ordinal() ||
index == Limit.ELEMENT_ATTRIBUTE_LIMIT.ordinal()) {
totalValue[index] += value;
return;
}
Map<String, Integer> cache;
if (caches[index] == null) {
cache = new HashMap<String, Integer>(10);
caches[index] = cache;
} else {
cache = caches[index];
}
int accumulatedValue = value;
if (cache.containsKey(entityName)) {
accumulatedValue += cache.get(entityName).intValue();
cache.put(entityName, Integer.valueOf(accumulatedValue));
} else {
cache.put(entityName, Integer.valueOf(value));
}
if (accumulatedValue > values[index]) {
values[index] = accumulatedValue;
names[index] = entityName;
}
if (index == Limit.GENERAL_ENTITY_SIZE_LIMIT.ordinal() ||
index == Limit.PARAMETER_ENTITY_SIZE_LIMIT.ordinal()) {
totalValue[Limit.TOTAL_ENTITY_SIZE_LIMIT.ordinal()] += value;
}
}
/**
* Return the value of the current max count for the specified property
*
* @param limit the property
* @return the value of the property
*/
public int getValue(Limit limit) {
return values[limit.ordinal()];
}
public int getValue(int index) {
return values[index];
}
/**
* Return the total value accumulated so far
*
* @param limit the property
* @return the accumulated value of the property
*/
public int getTotalValue(Limit limit) {
return totalValue[limit.ordinal()];
}
public int getTotalValue(int index) {
return totalValue[index];
}
/**
* Return the current max value (count or length) by the index of a property
* @param index the index of a property
* @return count of a property
*/
public int getValueByIndex(int index) {
return values[index];
}
public void startEntity(String name) {
entityStart = name;
}
public boolean isTracking(String name) {
if (entityStart == null) {
return false;
}
return entityStart.equals(name);
}
/**
* Stop tracking the entity
* @param limit the limit property
* @param name the name of an entity
*/
public void endEntity(Limit limit, String name) {
entityStart = "";
Map<String, Integer> cache = caches[limit.ordinal()];
if (cache != null) {
cache.remove(name);
}
}
public void debugPrint(XMLSecurityManager securityManager) {
Formatter formatter = new Formatter();
System.out.println(formatter.format("%30s %15s %15s %15s %30s",
"Property","Limit","Total size","Size","Entity Name"));
for (Limit limit : Limit.values()) {
formatter = new Formatter();
System.out.println(formatter.format("%30s %15d %15d %15d %30s",
limit.name(),
securityManager.getLimit(limit),
totalValue[limit.ordinal()],
values[limit.ordinal()],
names[limit.ordinal()]));
}
}
}
| |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.operation;
import io.crate.core.collections.RowN;
import io.crate.metadata.*;
import io.crate.operation.aggregation.FunctionExpression;
import io.crate.operation.aggregation.impl.AggregationImplModule;
import io.crate.operation.aggregation.impl.AverageAggregation;
import io.crate.operation.aggregation.impl.CountAggregation;
import io.crate.operation.collect.CollectExpression;
import io.crate.planner.RowGranularity;
import io.crate.planner.symbol.Aggregation;
import io.crate.planner.symbol.Function;
import io.crate.planner.symbol.InputColumn;
import io.crate.planner.symbol.Symbol;
import io.crate.test.integration.CrateUnitTest;
import io.crate.types.DataType;
import io.crate.types.DataTypes;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.inject.multibindings.MapBinder;
import org.junit.Before;
import org.junit.Test;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
public class ImplementationSymbolVisitorTest extends CrateUnitTest {
private ImplementationSymbolVisitor visitor;
static class MultiplyFunction extends Scalar<Long, Object> {
public final static String NAME = "dummy_multiply";
public static FunctionInfo INFO = new FunctionInfo(
new FunctionIdent(NAME, Arrays.<DataType>asList(DataTypes.LONG)),
DataTypes.LONG
);
private AtomicBoolean compiled = new AtomicBoolean(false);
@Override
public Long evaluate(Input<Object>... args) {
return (Long)args[0].value() * 2L;
}
@Override
public FunctionInfo info() {
return INFO;
}
@Override
public Symbol normalizeSymbol(Function symbol) {
throw new UnsupportedOperationException();
}
@Override
public Scalar<Long, Object> compile(List<Symbol> arguments) {
compiled.set(true);
return this;
}
}
class TestScalarFunctionModule extends AbstractModule {
@Override
protected void configure() {
MapBinder<FunctionIdent, FunctionImplementation> functionBinder =
MapBinder.newMapBinder(binder(), FunctionIdent.class, FunctionImplementation.class);
functionBinder.addBinding(MultiplyFunction.INFO.ident()).to(MultiplyFunction.class);
}
}
@Before
public void setupVisitor() {
Injector injector = new ModulesBuilder().add(
new AggregationImplModule(),
new TestScalarFunctionModule()
).createInjector();
visitor = new ImplementationSymbolVisitor(
null,
injector.getInstance(Functions.class),
RowGranularity.DOC
);
}
@Test
public void testAggregationSymbolsInputReuse() throws Exception {
FunctionInfo countInfo = new FunctionInfo(
new FunctionIdent(CountAggregation.NAME, Arrays.<DataType>asList(DataTypes.STRING)), DataTypes.LONG);
FunctionInfo avgInfo = new FunctionInfo(
new FunctionIdent(AverageAggregation.NAME, Arrays.<DataType>asList(DataTypes.INTEGER)), DataTypes.DOUBLE);
List<Symbol> aggregations = Arrays.<Symbol>asList(
Aggregation.finalAggregation(avgInfo, Arrays.<Symbol>asList(new InputColumn(0)), Aggregation.Step.ITER),
Aggregation.finalAggregation(countInfo, Arrays.<Symbol>asList(new InputColumn(0)), Aggregation.Step.ITER)
);
ImplementationSymbolVisitor.Context context = visitor.extractImplementations(aggregations);
Input<?> inputCount = context.aggregations.get(0).inputs()[0];
Input<?> inputAverage = context.aggregations.get(1).inputs()[0];
assertSame(inputCount, inputAverage);
}
@Test
public void testProcessGroupByProjectionSymbols() throws Exception {
// select x, y * 2 ... group by x, y * 2
// keys: [ in(0), multiply(in(1)) ]
Function multiply = new Function(
MultiplyFunction.INFO, Arrays.<Symbol>asList(new InputColumn(1))
);
List<Symbol> keys = Arrays.asList(new InputColumn(0, DataTypes.LONG), multiply);
ImplementationSymbolVisitor.Context context = visitor.extractImplementations(keys);
assertThat(context.collectExpressions().size(), is(2));
// keyExpressions: [ in0, in1 ]
CollectExpression[] keyExpressions = context.collectExpressions().toArray(new CollectExpression[2]);
RowN row = new RowN(new Object[]{1L, 2L});
keyExpressions[0].setNextRow(row);
keyExpressions[1].setNextRow(row);
assertThat((Long) keyExpressions[0].value(), is(1L));
assertThat((Long) keyExpressions[1].value(), is(2L)); // raw input value
// inputs: [ x, multiply ]
List<Input<?>> inputs = context.topLevelInputs();
assertThat(inputs.size(), is(2));
assertThat((Long)inputs.get(0).value(), is(1L));
assertThat((Long) inputs.get(1).value(), is(4L)); // multiplied value
}
@Test
public void testProcessGroupByProjectionSymbolsAggregation() throws Exception {
// select count(x), x, y * 2 ... group by x, y * 2
// keys: [ in(0), multiply(in(1)) ]
Function multiply = new Function(
MultiplyFunction.INFO, Arrays.<Symbol>asList(new InputColumn(1))
);
List<Symbol> keys = Arrays.asList(new InputColumn(0, DataTypes.LONG), multiply);
// values: [ count(in(0)) ]
List<Aggregation> values = Arrays.asList(Aggregation.partialAggregation(
new FunctionInfo(new FunctionIdent(CountAggregation.NAME, Arrays.<DataType>asList(DataTypes.LONG)), DataTypes.LONG),
DataTypes.LONG,
Arrays.<Symbol>asList(new InputColumn(0))
));
ImplementationSymbolVisitor.Context context = visitor.extractImplementations(keys);
// inputs: [ x, multiply ]
List<Input<?>> keyInputs = context.topLevelInputs();
for (Aggregation value : values) {
visitor.process(value, context);
}
AggregationContext[] aggregations = context.aggregations();
assertThat(aggregations.length, is(1));
// collectExpressions: [ in0, in1 ]
assertThat(context.collectExpressions().size(), is(2));
List<Input<?>> allInputs = context.topLevelInputs();
assertThat(allInputs.size(), is(2)); // only 2 because count is no input
CollectExpression[] collectExpressions = context.collectExpressions().toArray(new CollectExpression[2]);
RowN row = new RowN(new Object[]{1L, 2L});
collectExpressions[0].setNextRow(row);
collectExpressions[1].setNextRow(row);
assertThat((Long) collectExpressions[0].value(), is(1L));
assertThat((Long) collectExpressions[1].value(), is(2L)); // raw input value
assertThat(keyInputs.size(), is(2));
assertThat((Long)keyInputs.get(0).value(), is(1L));
assertThat((Long) keyInputs.get(1).value(), is(4L)); // multiplied value
}
@Test
public void testCompiled() throws Exception {
Function multiply = new Function(
MultiplyFunction.INFO, Arrays.<Symbol>asList(new InputColumn(0))
);
ImplementationSymbolVisitor.Context context = visitor.extractImplementations(Arrays.asList(multiply));
assertThat(context.topLevelInputs().get(0), is(instanceOf(FunctionExpression.class)));
FunctionExpression expression = (FunctionExpression) context.topLevelInputs().get(0);
Field f = expression.getClass().getDeclaredField("functionImplementation");
f.setAccessible(true);
FunctionImplementation impl = (FunctionImplementation)f.get(expression);
assertThat(impl, is(instanceOf(MultiplyFunction.class)));
assertThat(((MultiplyFunction)impl).compiled.get(), is(true));
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.pubsub.spi.v1;
import static com.google.cloud.pubsub.spi.v1.StatusUtil.isRetryable;
import com.google.api.gax.grpc.FlowController;
import com.google.api.stats.Distribution;
import com.google.auth.Credentials;
import com.google.cloud.Clock;
import com.google.cloud.pubsub.spi.v1.MessageDispatcher.AckProcessor;
import com.google.cloud.pubsub.spi.v1.MessageDispatcher.PendingModifyAckDeadline;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.AbstractService;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.pubsub.v1.AcknowledgeRequest;
import com.google.pubsub.v1.GetSubscriptionRequest;
import com.google.pubsub.v1.ModifyAckDeadlineRequest;
import com.google.pubsub.v1.PullRequest;
import com.google.pubsub.v1.PullResponse;
import com.google.pubsub.v1.SubscriberGrpc;
import com.google.pubsub.v1.SubscriberGrpc.SubscriberFutureStub;
import com.google.pubsub.v1.Subscription;
import io.grpc.Channel;
import io.grpc.StatusRuntimeException;
import io.grpc.auth.MoreCallCredentials;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.joda.time.Duration;
/**
* Implementation of {@link AbstractSubscriberConnection} based on Cloud Pub/Sub pull and
* acknowledge operations.
*/
final class PollingSubscriberConnection extends AbstractService implements AckProcessor {
private static final int MAX_PER_REQUEST_CHANGES = 1000;
private static final Duration DEFAULT_TIMEOUT = Duration.standardSeconds(10);
private static final int DEFAULT_MAX_MESSAGES = 1000;
private static final Duration INITIAL_BACKOFF = Duration.millis(100); // 100ms
private static final Duration MAX_BACKOFF = Duration.standardSeconds(10); // 10s
private static final Logger logger =
Logger.getLogger(PollingSubscriberConnection.class.getName());
private final String subscription;
private final ScheduledExecutorService executor;
private final SubscriberFutureStub stub;
private final MessageDispatcher messageDispatcher;
public PollingSubscriberConnection(
String subscription,
Credentials credentials,
MessageReceiver receiver,
Duration ackExpirationPadding,
Distribution ackLatencyDistribution,
Channel channel,
FlowController flowController,
ScheduledExecutorService executor,
Clock clock) {
this.subscription = subscription;
this.executor = executor;
stub =
SubscriberGrpc.newFutureStub(channel)
.withCallCredentials(MoreCallCredentials.from(credentials));
messageDispatcher =
new MessageDispatcher(
receiver,
this,
ackExpirationPadding,
ackLatencyDistribution,
flowController,
executor,
clock);
messageDispatcher.setMessageDeadlineSeconds(Subscriber.MIN_ACK_DEADLINE_SECONDS);
}
@Override
protected void doStart() {
logger.log(Level.INFO, "Starting subscriber.");
initialize();
notifyStarted();
}
private void initialize() {
ListenableFuture<Subscription> subscriptionInfo =
stub.withDeadlineAfter(DEFAULT_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS)
.getSubscription(
GetSubscriptionRequest.newBuilder().setSubscription(subscription).build());
Futures.addCallback(
subscriptionInfo,
new FutureCallback<Subscription>() {
@Override
public void onSuccess(Subscription result) {
messageDispatcher.setMessageDeadlineSeconds(result.getAckDeadlineSeconds());
pullMessages(INITIAL_BACKOFF);
}
@Override
public void onFailure(Throwable cause) {
notifyFailed(cause);
}
});
}
@Override
protected void doStop() {
messageDispatcher.stop();
notifyStopped();
}
private void pullMessages(final Duration backoff) {
ListenableFuture<PullResponse> pullResult =
stub.withDeadlineAfter(DEFAULT_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS)
.pull(
PullRequest.newBuilder()
.setSubscription(subscription)
.setMaxMessages(DEFAULT_MAX_MESSAGES)
.setReturnImmediately(true)
.build());
Futures.addCallback(
pullResult,
new FutureCallback<PullResponse>() {
@Override
public void onSuccess(PullResponse pullResponse) {
messageDispatcher.processReceivedMessages(pullResponse.getReceivedMessagesList());
if (pullResponse.getReceivedMessagesCount() == 0) {
// No messages in response, possibly caught up in backlog, we backoff to avoid
// slamming the server.
executor.schedule(
new Runnable() {
@Override
public void run() {
Duration newBackoff = backoff.multipliedBy(2);
if (newBackoff.isLongerThan(MAX_BACKOFF)) {
newBackoff = MAX_BACKOFF;
}
pullMessages(newBackoff);
}
},
backoff.getMillis(),
TimeUnit.MILLISECONDS);
return;
}
pullMessages(INITIAL_BACKOFF);
}
@Override
public void onFailure(Throwable cause) {
if (!(cause instanceof StatusRuntimeException)
|| isRetryable(((StatusRuntimeException) cause).getStatus())) {
logger.log(Level.SEVERE, "Failed to pull messages (recoverable): ", cause);
executor.schedule(
new Runnable() {
@Override
public void run() {
Duration newBackoff = backoff.multipliedBy(2);
if (newBackoff.isLongerThan(MAX_BACKOFF)) {
newBackoff = MAX_BACKOFF;
}
pullMessages(newBackoff);
}
},
backoff.getMillis(),
TimeUnit.MILLISECONDS);
return;
}
notifyFailed(cause);
}
});
}
@Override
public void sendAckOperations(
List<String> acksToSend, List<PendingModifyAckDeadline> ackDeadlineExtensions) {
// Send the modify ack deadlines in bundles as not to exceed the max request
// size.
List<List<PendingModifyAckDeadline>> modifyAckDeadlineChunks =
Lists.partition(ackDeadlineExtensions, MAX_PER_REQUEST_CHANGES);
for (List<PendingModifyAckDeadline> modAckChunk : modifyAckDeadlineChunks) {
for (PendingModifyAckDeadline modifyAckDeadline : modAckChunk) {
stub.withDeadlineAfter(DEFAULT_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS)
.modifyAckDeadline(
ModifyAckDeadlineRequest.newBuilder()
.setSubscription(subscription)
.addAllAckIds(modifyAckDeadline.ackIds)
.setAckDeadlineSeconds(modifyAckDeadline.deadlineExtensionSeconds)
.build());
}
}
for (List<String> ackChunk : Lists.partition(acksToSend, MAX_PER_REQUEST_CHANGES)) {
stub.withDeadlineAfter(DEFAULT_TIMEOUT.getMillis(), TimeUnit.MILLISECONDS)
.acknowledge(
AcknowledgeRequest.newBuilder()
.setSubscription(subscription)
.addAllAckIds(ackChunk)
.build());
}
}
}
| |
/*******************************************************************************
*
* Copyright (c) 2004-2009 Oracle Corporation.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
*
* Kohsuke Kawaguchi
*
*
*******************************************************************************/
package hudson.maven;
import hudson.FilePath;
import hudson.Launcher;
import hudson.EnvVars;
import hudson.maven.agent.Maven21Interceptor;
import hudson.model.BuildListener;
import hudson.model.Hudson;
import hudson.model.JDK;
import hudson.model.Run.RunnerAbortedException;
import hudson.model.TaskListener;
import hudson.remoting.Callable;
import hudson.remoting.Which;
import hudson.tasks.Maven.MavenInstallation;
import hudson.util.ArgumentListBuilder;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.logging.Logger;
import hudson.maven.agent.Main;
/**
* Launches the maven process.
*
* @author Kohsuke Kawaguchi
*/
final class MavenProcessFactory extends AbstractMavenProcessFactory implements ProcessCache.Factory {
MavenProcessFactory(MavenModuleSet mms, Launcher launcher, EnvVars envVars, FilePath workDir) {
super( mms, launcher, envVars, workDir );
}
/**
* Builds the command line argument list to launch the maven process.
*
* UGLY.
*/
protected ArgumentListBuilder buildMavenAgentCmdLine(BuildListener listener,int tcpPort) throws IOException, InterruptedException {
MavenInstallation mvn = getMavenInstallation(listener);
if(mvn==null) {
listener.error("Maven version is not configured for this project. Can't determine which Maven to run");
throw new RunnerAbortedException();
}
if(mvn.getHome()==null) {
listener.error("Maven '%s' doesn't have its home set",mvn.getName());
throw new RunnerAbortedException();
}
// find classworlds.jar
String classWorldsJar = getLauncher().getChannel().call(new GetClassWorldsJar(mvn.getHome(),listener));
boolean isMaster = getCurrentNode()== Hudson.getInstance();
FilePath slaveRoot=null;
if(!isMaster)
slaveRoot = getCurrentNode().getRootPath();
ArgumentListBuilder args = new ArgumentListBuilder();
JDK jdk = getJava(listener);
if(jdk==null) {
args.add("java");
} else {
args.add(jdk.getHome()+"/bin/java"); // use JDK.getExecutable() here ?
}
if(debugPort!=0)
args.add("-Xrunjdwp:transport=dt_socket,server=y,address="+debugPort);
if(yjp)
args.add("-agentlib:yjpagent=tracing");
args.addTokenized(getMavenOpts());
args.add( "-cp" );
String classPath =
( isMaster ? Which.jarFile( Main.class ).getAbsolutePath()
: slaveRoot.child( "maven-agent.jar" ).getRemote() )
+ ( getLauncher().isUnix() ? ":" : ";" )
+ ( isMaster ? classWorldsJar : slaveRoot.child( "classworlds.jar" ).getRemote() );
args.add( classPath );
//+classWorldsJar);
args.add(Main.class.getName());
// M2_HOME
args.add(mvn.getHome());
// remoting.jar
String remotingJar = getLauncher().getChannel().call(new GetRemotingJar());
if(remotingJar==null) {// this shouldn't be possible, but there are still reports indicating this, so adding a probe here.
listener.error("Failed to determine the location of slave.jar");
throw new RunnerAbortedException();
}
args.add(remotingJar);
// interceptor.jar
args.add(isMaster?
Which.jarFile(hudson.maven.agent.AbortException.class).getAbsolutePath():
slaveRoot.child("maven-interceptor.jar").getRemote());
// TCP/IP port to establish the remoting infrastructure
args.add(tcpPort);
// if this is Maven 2.1, interceptor override
if(mvn.isMaven2_1(getLauncher())) {
args.add(isMaster?
Which.jarFile(Maven21Interceptor.class).getAbsolutePath():
slaveRoot.child("maven2.1-interceptor.jar").getRemote());
}
return args;
}
/**
* Finds classworlds.jar
*/
private static final class GetClassWorldsJar implements Callable<String,IOException> {
private final String mvnHome;
private final TaskListener listener;
private GetClassWorldsJar(String mvnHome, TaskListener listener) {
this.mvnHome = mvnHome;
this.listener = listener;
}
public String call() throws IOException {
File home = new File(mvnHome);
File bootDir = new File(home, "core/boot");
File[] classworlds = bootDir.listFiles(CLASSWORLDS_FILTER);
if(classworlds==null || classworlds.length==0) {
// Maven 2.0.6 puts it to a different place
bootDir = new File(home, "boot");
classworlds = bootDir.listFiles(CLASSWORLDS_FILTER);
if(classworlds==null || classworlds.length==0) {
listener.error(Messages.MavenProcessFactory_ClassWorldsNotFound(home));
throw new RunnerAbortedException();
}
}
return classworlds[0].getAbsolutePath();
}
}
/**
* Locates classworlds jar file.
*
* Note that Maven 3.0 changed the name to plexus-classworlds
*
* <pre>
* $ find tools/ -name "*classworlds*.jar"
* tools/maven/boot/classworlds-1.1.jar
* tools/maven-2.2.1/boot/classworlds-1.1.jar
* tools/maven-3.0-alpha-2/boot/plexus-classworlds-1.3.jar
* tools/maven-3.0-alpha-3/boot/plexus-classworlds-2.2.2.jar
* tools/maven-3.0-alpha-4/boot/plexus-classworlds-2.2.2.jar
* tools/maven-3.0-alpha-5/boot/plexus-classworlds-2.2.2.jar
* tools/maven-3.0-alpha-6/boot/plexus-classworlds-2.2.2.jar
* </pre>
*/
private static final FilenameFilter CLASSWORLDS_FILTER = new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.contains("classworlds") && name.endsWith(".jar");
}
};
//-------------------------------------------------
// Some of those fields are used for maven 3 too
//-------------------------------------------------
/**
* Set true to produce debug output.
*/
public static boolean debug = false;
/**
* If not 0, launch Maven with a debugger port.
*/
public static int debugPort;
public static boolean profile = Boolean.getBoolean("hudson.maven.profile");
/**
* If true, launch Maven with YJP offline profiler agent.
*/
public static boolean yjp = Boolean.getBoolean("hudson.maven.yjp");
static {
String port = System.getProperty("hudson.maven.debugPort");
if(port!=null)
debugPort = Integer.parseInt(port);
}
public static int socketTimeOut = Integer.parseInt( System.getProperty( "hudson.maven.socketTimeOut", Integer.toString( 30*1000 ) ) );
private static final Logger LOGGER = Logger.getLogger(MavenProcessFactory.class.getName());
}
| |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.mapreduce.testlib;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import java.util.Map;
import java.util.Random;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.kiji.mapreduce.HFileLoader;
import org.kiji.mapreduce.KijiMRTestLayouts;
import org.kiji.mapreduce.KijiMapReduceJob;
import org.kiji.mapreduce.gather.KijiGatherJobBuilder;
import org.kiji.mapreduce.output.MapReduceJobOutputs;
import org.kiji.schema.Kiji;
import org.kiji.schema.KijiDataRequest;
import org.kiji.schema.KijiDataRequestBuilder;
import org.kiji.schema.KijiRowData;
import org.kiji.schema.KijiRowScanner;
import org.kiji.schema.KijiTable;
import org.kiji.schema.KijiTableReader;
import org.kiji.schema.KijiTableWriter;
import org.kiji.schema.testutil.AbstractKijiIntegrationTest;
/** Tests bulk-importers. */
public class IntegrationTestTableMapper extends AbstractKijiIntegrationTest {
private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestTableMapper.class);
private Configuration mConf = null;
private FileSystem mFS = null;
private Kiji mKiji = null;
private KijiTable mInputTable = null;
private KijiTable mOutputTable = null;
/**
* Generates a random HDFS path.
*
* @param prefix Prefix for the random file name.
* @return a random HDFS path.
* @throws Exception on error.
*/
private Path makeRandomPath(String prefix) throws Exception {
Preconditions.checkNotNull(mFS);
final Path base = new Path(FileSystem.getDefaultUri(mConf));
final Random random = new Random(System.nanoTime());
return new Path(base, String.format("/%s-%s", prefix, random.nextLong()));
}
/**
* Reads a table into a map from Kiji row keys to KijiRowData.
*
* @param table Kiji table to read from.
* @param kdr Kiji data request.
* @return a map of the rows.
* @throws Exception on error.
*/
private static Map<String, KijiRowData> toRowMap(KijiTable table, KijiDataRequest kdr)
throws Exception {
final KijiTableReader reader = table.openTableReader();
try {
final KijiRowScanner scanner = reader.getScanner(kdr);
try {
final Map<String, KijiRowData> rows = Maps.newHashMap();
for (KijiRowData row : scanner) {
rows.put(Bytes.toString((byte[]) row.getEntityId().getComponentByIndex(0)), row);
}
return rows;
} finally {
scanner.close();
}
} finally {
reader.close();
}
}
private void populateInputTable() throws Exception {
final KijiTable table = mInputTable;
final KijiTableWriter writer = table.openTableWriter();
writer.put(table.getEntityId("1"), "info", "first_name", "Marsellus");
writer.put(table.getEntityId("1"), "info", "last_name", "Wallace");
writer.put(table.getEntityId("1"), "info", "zip_code", 94110);
writer.put(table.getEntityId("2"), "info", "first_name", "Vincent");
writer.put(table.getEntityId("2"), "info", "last_name", "Vega");
writer.put(table.getEntityId("2"), "info", "zip_code", 94110);
writer.put(table.getEntityId("3"), "info", "first_name", "Jules");
writer.put(table.getEntityId("3"), "info", "last_name", "Winnfield");
writer.put(table.getEntityId("3"), "info", "zip_code", 93221);
writer.close();
}
@Before
public final void setupIntegrationTestTableMapper() throws Exception {
mConf = getConf();
mFS = FileSystem.get(mConf);
mKiji = Kiji.Factory.open(getKijiURI(), mConf);
final String inputTableName = "input";
final String outputTableName = "output";
mKiji.createTable(KijiMRTestLayouts.getTestLayout(inputTableName));
mKiji.createTable(KijiMRTestLayouts.getTestLayout(outputTableName));
mInputTable = mKiji.openTable(inputTableName);
mOutputTable = mKiji.openTable(outputTableName);
populateInputTable();
}
@After
public final void teardownIntegrationTestTableMapper() throws Exception {
mInputTable.release();
mOutputTable.release();
mKiji.release();
// NOTE: fs should get closed here, but doesn't because of a bug with FileSystem that
// causes it to close other thread's filesystem objects. For more information
// see: https://issues.apache.org/jira/browse/HADOOP-7973
mInputTable = null;
mOutputTable = null;
mKiji = null;
mFS = null;
mConf = null;
}
@Test
public void testSimpleTableMapperDirect() throws Exception {
final KijiMapReduceJob mrjob = KijiGatherJobBuilder.create()
.withConf(mConf)
.withGatherer(SimpleTableMapperAsGatherer.class)
.withInputTable(mInputTable.getURI())
.withOutput(MapReduceJobOutputs.newDirectKijiTableMapReduceJobOutput(mOutputTable.getURI()))
.build();
assertTrue(mrjob.run());
validateOutputTable();
}
@Test
public void testSimpleTableMapperHFiles() throws Exception {
final Path hfileDirPath = this.makeRandomPath("hfile-output");
try {
final KijiMapReduceJob mrjob = KijiGatherJobBuilder.create()
.withConf(mConf)
.withGatherer(SimpleTableMapperAsGatherer.class)
.withInputTable(mInputTable.getURI())
.withOutput(MapReduceJobOutputs.newHFileMapReduceJobOutput(
mOutputTable.getURI(), hfileDirPath, 1))
.build();
assertTrue(mrjob.run());
final HFileLoader loader = HFileLoader.create(mConf);
// There is only one reducer, hence one HFile shard:
final Path hfilePath = new Path(hfileDirPath, "part-r-00000.hfile");
loader.load(hfilePath, mOutputTable);
validateOutputTable();
} finally {
mFS.delete(hfileDirPath, true);
}
}
private void validateOutputTable() throws Exception {
final KijiDataRequestBuilder okdrb = KijiDataRequest.builder();
okdrb.newColumnsDef().withMaxVersions(3).addFamily("primitives");
final KijiDataRequest okdr = okdrb.build();
final Map<String, KijiRowData> rows = toRowMap(mOutputTable, okdr);
assertEquals(2, rows.size());
final Collection<CharSequence> peopleIn94110 =
rows.get("94110").<CharSequence>getValues("primitives", "string").values();
assertEquals(2, peopleIn94110.size());
final Collection<CharSequence> peopleIn93221 =
rows.get("93221").<CharSequence>getValues("primitives", "string").values();
assertEquals(1, peopleIn93221.size());
assertEquals("Jules Winnfield", peopleIn93221.iterator().next().toString());
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.enhancedbookmarks;
import android.test.UiThreadTest;
import android.test.suitebuilder.annotation.SmallTest;
import org.chromium.base.ThreadUtils;
import org.chromium.base.annotations.SuppressFBWarnings;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.chrome.browser.ChromeSwitches;
import org.chromium.chrome.browser.bookmark.BookmarksBridge.BookmarkItem;
import org.chromium.chrome.browser.enhancedbookmarks.EnhancedBookmarksModel.AddBookmarkCallback;
import org.chromium.chrome.browser.profiles.Profile;
import org.chromium.components.bookmarks.BookmarkId;
import org.chromium.content.browser.test.NativeLibraryTestBase;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Stack;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* Tests for {@link EnhancedBookmarksModel}, the data layer of Enhanced Bookmarks.
*/
public class EnhancedBookmarksModelTest extends NativeLibraryTestBase {
private static final int TIMEOUT_MS = 5000;
private EnhancedBookmarksModel mBookmarksModel;
private BookmarkId mMobileNode;
private BookmarkId mOtherNode;
private BookmarkId mDesktopNode;
@Override
protected void setUp() throws Exception {
super.setUp();
loadNativeLibraryAndInitBrowserProcess();
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
Profile profile = Profile.getLastUsedProfile();
mBookmarksModel = new EnhancedBookmarksModel(profile);
mBookmarksModel.loadEmptyPartnerBookmarkShimForTesting();
}
});
CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return mBookmarksModel.isBookmarkModelLoaded();
}
});
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mMobileNode = mBookmarksModel.getMobileFolderId();
mDesktopNode = mBookmarksModel.getDesktopFolderId();
mOtherNode = mBookmarksModel.getOtherFolderId();
}
});
}
@UiThreadTest
@SmallTest
@Feature({"Bookmark"})
public void testGetAllBookmarkIDsOrderedByCreationDate() throws InterruptedException {
BookmarkId folderA = mBookmarksModel.addFolder(mMobileNode, 0, "a");
BookmarkId folderB = mBookmarksModel.addFolder(mDesktopNode, 0, "b");
Stack<BookmarkId> stack = new Stack<BookmarkId>();
stack.push(addBookmark(folderA, 0, "a", "http://www.medium.com"));
// If add bookmarks too fast, eventually some bookmarks will have the same timestamp, which
// confuses the bookmark model.
Thread.sleep(20);
stack.push(addBookmark(folderB, 0, "b", "http://aurimas.com"));
Thread.sleep(20);
stack.push(addBookmark(mMobileNode, 0, "c", "http://www.aurimas.com"));
Thread.sleep(20);
stack.push(addBookmark(mDesktopNode, 0, "d", "http://www.aurimas.org"));
Thread.sleep(20);
stack.push(addBookmark(mOtherNode, 0, "e", "http://www.google.com"));
Thread.sleep(20);
stack.push(addBookmark(folderA, 0, "f", "http://www.newt.com"));
Thread.sleep(20);
stack.push(addBookmark(folderB, 0, "g", "http://kkimlabs.com"));
List<BookmarkId> bookmarks = mBookmarksModel.getAllBookmarkIDsOrderedByCreationDate();
assertEquals(stack.size(), bookmarks.size());
for (BookmarkId returnedBookmark : bookmarks) {
assertEquals(stack.pop(), returnedBookmark);
}
}
@UiThreadTest
@SmallTest
@Feature({"Bookmark"})
public void testBookmarkPropertySetters() {
BookmarkId folderA = mBookmarksModel.addFolder(mMobileNode, 0, "a");
BookmarkId bookmarkA = addBookmark(mDesktopNode, 0, "a", "http://a.com");
BookmarkId bookmarkB = addBookmark(mMobileNode, 0, "a", "http://a.com");
BookmarkId bookmarkC = addBookmark(mOtherNode, 0, "a", "http://a.com");
BookmarkId bookmarkD = addBookmark(folderA, 0, "a", "http://a.com");
mBookmarksModel.setBookmarkTitle(folderA, "hauri");
assertEquals("hauri", mBookmarksModel.getBookmarkTitle(folderA));
mBookmarksModel.setBookmarkTitle(bookmarkA, "auri");
mBookmarksModel.setBookmarkUrl(bookmarkA, "http://auri.org/");
verifyBookmark(bookmarkA, "auri", "http://auri.org/", false, mDesktopNode);
mBookmarksModel.setBookmarkTitle(bookmarkB, "lauri");
mBookmarksModel.setBookmarkUrl(bookmarkB, "http://lauri.org/");
verifyBookmark(bookmarkB, "lauri", "http://lauri.org/", false, mMobileNode);
mBookmarksModel.setBookmarkTitle(bookmarkC, "mauri");
mBookmarksModel.setBookmarkUrl(bookmarkC, "http://mauri.org/");
verifyBookmark(bookmarkC, "mauri", "http://mauri.org/", false, mOtherNode);
mBookmarksModel.setBookmarkTitle(bookmarkD, "kauri");
mBookmarksModel.setBookmarkUrl(bookmarkD, "http://kauri.org/");
verifyBookmark(bookmarkD, "kauri", "http://kauri.org/", false, folderA);
}
@UiThreadTest
@SmallTest
@Feature({"Bookmark" })
@SuppressFBWarnings("DLS_DEAD_LOCAL_STORE")
public void testMoveBookmarks() {
BookmarkId bookmarkA = addBookmark(mDesktopNode, 0, "a", "http://a.com");
BookmarkId bookmarkB = addBookmark(mOtherNode, 0, "b", "http://b.com");
BookmarkId bookmarkC = addBookmark(mMobileNode, 0, "c", "http://c.com");
BookmarkId folderA = mBookmarksModel.addFolder(mOtherNode, 0, "fa");
BookmarkId folderB = mBookmarksModel.addFolder(mDesktopNode, 0, "fb");
BookmarkId folderC = mBookmarksModel.addFolder(mMobileNode, 0, "fc");
BookmarkId bookmarkAA = addBookmark(folderA, 0, "aa", "http://aa.com");
BookmarkId bookmarkCA = addBookmark(folderC, 0, "ca", "http://ca.com");
BookmarkId folderAA = mBookmarksModel.addFolder(folderA, 0, "faa");
HashSet<BookmarkId> movedBookmarks = new HashSet<BookmarkId>(6);
movedBookmarks.add(bookmarkA);
movedBookmarks.add(bookmarkB);
movedBookmarks.add(bookmarkC);
movedBookmarks.add(folderC);
movedBookmarks.add(folderB);
movedBookmarks.add(bookmarkAA);
mBookmarksModel.moveBookmarks(new ArrayList<BookmarkId>(movedBookmarks), folderAA);
// Order of the moved bookmarks is not tested.
verifyBookmarkListNoOrder(mBookmarksModel.getChildIDs(folderAA, true, true),
movedBookmarks);
}
@UiThreadTest
@SmallTest
@Feature({"Bookmark"})
public void testGetChildIDs() {
BookmarkId folderA = mBookmarksModel.addFolder(mMobileNode, 0, "fa");
HashSet<BookmarkId> expectedChildren = new HashSet<>();
expectedChildren.add(addBookmark(folderA, 0, "a", "http://a.com"));
expectedChildren.add(addBookmark(folderA, 0, "a", "http://a.com"));
expectedChildren.add(addBookmark(folderA, 0, "a", "http://a.com"));
expectedChildren.add(addBookmark(folderA, 0, "a", "http://a.com"));
BookmarkId folderAA = mBookmarksModel.addFolder(folderA, 0, "faa");
// urls only
verifyBookmarkListNoOrder(mBookmarksModel.getChildIDs(folderA, false, true),
expectedChildren);
// folders only
verifyBookmarkListNoOrder(mBookmarksModel.getChildIDs(folderA, true, false),
new HashSet<BookmarkId>(Arrays.asList(folderAA)));
// folders and urls
expectedChildren.add(folderAA);
verifyBookmarkListNoOrder(mBookmarksModel.getChildIDs(folderA, true, true),
expectedChildren);
}
// Moved from BookmarksBridgeTest
@UiThreadTest
@SmallTest
@Feature({"Bookmark"})
public void testAddBookmarksAndFolders() {
BookmarkId bookmarkA = addBookmark(mDesktopNode, 0, "a", "http://a.com");
verifyBookmark(bookmarkA, "a", "http://a.com/", false, mDesktopNode);
BookmarkId bookmarkB = addBookmark(mOtherNode, 0, "b", "http://b.com");
verifyBookmark(bookmarkB, "b", "http://b.com/", false, mOtherNode);
BookmarkId bookmarkC = addBookmark(mMobileNode, 0, "c", "http://c.com");
verifyBookmark(bookmarkC, "c", "http://c.com/", false, mMobileNode);
BookmarkId folderA = mBookmarksModel.addFolder(mOtherNode, 0, "fa");
verifyBookmark(folderA, "fa", null, true, mOtherNode);
BookmarkId folderB = mBookmarksModel.addFolder(mDesktopNode, 0, "fb");
verifyBookmark(folderB, "fb", null, true, mDesktopNode);
BookmarkId folderC = mBookmarksModel.addFolder(mMobileNode, 0, "fc");
verifyBookmark(folderC, "fc", null, true, mMobileNode);
BookmarkId bookmarkAA = addBookmark(folderA, 0, "aa", "http://aa.com");
verifyBookmark(bookmarkAA, "aa", "http://aa.com/", false, folderA);
BookmarkId folderAA = mBookmarksModel.addFolder(folderA, 0, "faa");
verifyBookmark(folderAA, "faa", null, true, folderA);
}
@UiThreadTest
@SmallTest
@CommandLineFlags.Add({ChromeSwitches.ENABLE_OFFLINE_PAGES})
@Feature({"Bookmark"})
public void testOfflineBridgeLoaded() {
assertTrue(mBookmarksModel.getOfflinePageBridge() != null);
assertTrue(mBookmarksModel.getOfflinePageBridge().isOfflinePageModelLoaded());
}
private BookmarkId addBookmark(final BookmarkId parent, final int index, final String title,
final String url) {
final AtomicReference<BookmarkId> result = new AtomicReference<BookmarkId>();
final Semaphore semaphore = new Semaphore(0);
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mBookmarksModel.addBookmarkAsync(
parent, index, title, url, null, new AddBookmarkCallback() {
@Override
public void onBookmarkAdded(
final BookmarkId bookmarkId, int saveResult) {
result.set(bookmarkId);
semaphore.release();
}
});
}
});
try {
if (semaphore.tryAcquire(TIMEOUT_MS, TimeUnit.MILLISECONDS)) {
return result.get();
} else {
return null;
}
} catch (InterruptedException e) {
return null;
}
}
private void verifyBookmark(BookmarkId idToVerify, String expectedTitle,
String expectedUrl, boolean isFolder, BookmarkId expectedParent) {
assertNotNull(idToVerify);
BookmarkItem item = mBookmarksModel.getBookmarkById(idToVerify);
assertEquals(expectedTitle, item.getTitle());
assertEquals(isFolder, item.isFolder());
if (!isFolder) assertEquals(expectedUrl, item.getUrl());
assertEquals(expectedParent, item.getParentId());
}
/**
* Before using this helper method, always make sure @param listToVerify does not contain
* duplicates.
*/
private void verifyBookmarkListNoOrder(List<BookmarkId> listToVerify,
HashSet<BookmarkId> expectedIds) {
HashSet<BookmarkId> expectedIdsCopy = new HashSet<>(expectedIds);
assertEquals(expectedIdsCopy.size(), listToVerify.size());
for (BookmarkId id : listToVerify) {
assertNotNull(id);
assertTrue("List contains wrong element: ", expectedIdsCopy.contains(id));
expectedIdsCopy.remove(id);
}
assertTrue("List does not contain some expected bookmarks: ", expectedIdsCopy.isEmpty());
}
}
| |
/**
* Copyright (c) 2006-2009, Redv.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Redv.com nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Created on 2008-5-29 17:08:36
*/
package cn.net.openid.jos.web;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openid4java.association.AssociationException;
import org.openid4java.message.AuthFailure;
import org.openid4java.message.AuthRequest;
import org.openid4java.message.AuthSuccess;
import org.openid4java.message.DirectError;
import org.openid4java.message.Message;
import org.openid4java.message.MessageException;
import org.openid4java.message.MessageExtension;
import org.openid4java.message.ax.AxMessage;
import org.openid4java.message.ax.FetchRequest;
import org.openid4java.message.ax.FetchResponse;
import org.openid4java.message.sreg.SRegMessage;
import org.openid4java.message.sreg.SRegRequest;
import org.openid4java.message.sreg.SRegResponse;
import org.openid4java.server.ServerException;
import org.openid4java.server.ServerManager;
import cn.net.openid.jos.domain.Persona;
import cn.net.openid.jos.domain.Site;
import cn.net.openid.jos.domain.User;
import cn.net.openid.jos.service.JosService;
/**
* Approving request processor.
*
* @author Sutra Zhou
*/
public class ApprovingRequestProcessor {
/**
* The logger.
*/
private static final Log LOG = LogFactory
.getLog(ApprovingRequestProcessor.class);
/**
* The value of allow auto.
*/
public static final int ALLOW_AUTO = 0;
/**
* The value of allow once.
*/
public static final int ALLOW_ONCE = 1;
/**
* The value of allow forever.
*/
public static final int ALLOW_FOREVER = 2;
/**
* The value of deny.
*/
public static final int DENY = -1;
/**
* The JOS service.
*/
private final JosService josService;
/**
* The OpenID server manager.
*/
private final ServerManager serverManager;
/**
* The HTTP Servlet request.
*/
private final HttpServletRequest httpReq;
/**
* The HTTP Servlet response.
*/
private final HttpServletResponse httpResp;
/**
* The user session.
*/
private final UserSession userSession;
/**
* The user.
*/
private final User user;
/**
* The approving request.
*/
private final ApprovingRequest checkIdRequest;
/**
* The OpenID authentication request.
*/
private final AuthRequest authRequest;
/**
* The realm.
*/
private final String realm;
/**
* Construct a new {@link ApprovingRequestProcessor}.
*
* @param httpReq
* the http request
* @param httpResp
* the http response
* @param josService
* the jos service
* @param serverManager
* the serverManager
* @param checkIdRequest
* the approving request
*/
public ApprovingRequestProcessor(final HttpServletRequest httpReq,
final HttpServletResponse httpResp, final JosService josService,
final ServerManager serverManager,
final ApprovingRequest checkIdRequest) {
this.httpReq = httpReq;
this.httpResp = httpResp;
this.josService = josService;
this.serverManager = serverManager;
this.userSession = WebUtils.getOrCreateUserSession(this.httpReq
.getSession());
this.user = userSession.getUser();
this.checkIdRequest = checkIdRequest;
this.authRequest = checkIdRequest.getAuthRequest();
this.realm = this.authRequest.getRealm();
}
/**
* Execute OpenID checkId.
*
* @throws IOException
* if HTTP error
*/
public void checkId() throws IOException {
if (this.isLoggedInUserOwnClaimedId()) {
this.checkApproval();
} else {
// redirect to login page.
String url = "login?token="
+ userSession.addApprovingRequest(checkIdRequest);
httpResp.sendRedirect(url);
}
}
/**
* If user logged in, do check site, otherwise redirect to login page.
*
* @param allowType
* the allow type: once, forever or deny
* @param persona
* the persona selected
* @throws IOException
* if HTTP error
*/
public void checkId(final int allowType, final Persona persona)
throws IOException {
if (this.isLoggedInUserOwnClaimedId()) {
switch (allowType) {
case ALLOW_ONCE:
josService.allow(this.user, this.realm, persona, false);
this.redirectToReturnToPage(true, persona);
break;
case ALLOW_FOREVER:
josService.allow(this.user, this.realm, persona, true);
this.redirectToReturnToPage(true, persona);
break;
case DENY:
default:
this.redirectToReturnToPage(false, null);
break;
}
} else {
// redirect to login page.
String url = "login?token="
+ userSession.addApprovingRequest(checkIdRequest);
httpResp.sendRedirect(url);
}
}
/**
* Check the logged in user own the claimed ID.
*
* @return true if own, otherwise false.
*/
private boolean isLoggedInUserOwnClaimedId() {
boolean ret;
if (userSession.isLoggedIn() /*
&& this.authRequest.getIdentity().equals(
userSession.getUser().getIdentifier()) */) {
ret = true;
} else {
ret = false;
}
return ret;
}
/**
* If this site is always approve, redirect to return_to page, otherwise
* redirect to approving page.
*
* @throws IOException if HTTP error
*/
private void checkApproval() throws IOException {
boolean approved;
Site site = josService.getSite(user, authRequest.getRealm());
if (site != null && site.isAlwaysApprove()) {
boolean sreg = authRequest.hasExtension(SRegMessage.OPENID_NS_SREG);
if ((sreg && site.getPersona() != null)
|| (!sreg && site.getPersona() == null)) {
approved = true;
} else {
approved = false;
}
} else {
approved = false;
}
/*
if (site.getRealm().getUrl().equals("http://localhost:8080/openid/response")) {
approved = true;
} else {
approved = false;
}
*/
if (approved) {
josService.updateApproval(user, authRequest.getRealm());
// return to `return_to' page.
redirectToReturnToPage(true, site.getPersona());
} else {
// redirect to approving page.
String url = "approving?token="
+ userSession.addApprovingRequest(checkIdRequest);
//String url = "/";
httpResp.sendRedirect(url);
}
}
/**
* Redirect to return_to page.
*
* @param approved is approved
* @param persona the persona to send to the request realm
* @throws IOException if HTTP error
*/
private void redirectToReturnToPage(final boolean approved,
final Persona persona) throws IOException {
Message response;
// interact with the user and obtain data needed to continue
String userSelectedClaimedId = null;
Boolean authenticatedAndApproved = approved;
String opLocalId = authRequest.getIdentity() + user.getUsername();
// if the user chose a different claimed_id than the one in request
if (userSelectedClaimedId != null
&& userSelectedClaimedId.equals(authRequest.getClaimed())) {
// TODO
// opLocalId = lookupLocalId(userSelectedClaimedId);
}
boolean signNow = false;
response = this.serverManager.authResponse(authRequest, opLocalId,
userSelectedClaimedId, authenticatedAndApproved.booleanValue(),
signNow);
if (response instanceof DirectError) {
directResponse(response.keyValueFormEncoding());
} else if (response instanceof AuthFailure) {
httpResp.sendRedirect(response.getDestinationUrl(true));
} else {
if (authenticatedAndApproved) {
try {
addExtension(response, persona);
addSRegExtension(response, persona);
} catch (MessageException e) {
LOG.error("", e);
}
}
try {
serverManager.sign((AuthSuccess) response);
} catch (ServerException e) {
LOG.error("", e);
} catch (AssociationException e) {
LOG.error("", e);
}
// caller will need to decide which of the following to use:
// option1: GET HTTP-redirect to the return_to URL
String destUrl = response.getDestinationUrl(true);
httpResp.sendRedirect(destUrl);
// option2: HTML FORM Redirection
// RequestDispatcher dispatcher =
// getServletContext().getRequestDispatcher("formredirection.jsp");
// httpReq.setAttribute("prameterMap",
// response.getParameterMap());
// httpReq.setAttribute("destinationUrl",
// response.getDestinationUrl(false));
// dispatcher.forward(request, response);
// return null;
}
userSession.removeApprovingRequest(checkIdRequest.getToken());
}
/**
* Write string to the HTTP response out stream.
*
* @param response
* the response string to write to
* @return always returns null
* @throws IOException
* if write failed
*/
private String directResponse(final String response) throws IOException {
WebUtils.writeResponse(httpResp, response);
return null;
}
/**
* Add Simple Register extension message to the response message.
*
* @param response
* the response message to add to
* @param persona
* the persona
* @throws MessageException
* if add extension failed
*/
private void addSRegExtension(final Message response, final Persona persona)
throws MessageException {
if (authRequest.hasExtension(SRegMessage.OPENID_NS_SREG)) {
MessageExtension ext = authRequest
.getExtension(SRegMessage.OPENID_NS_SREG);
if (ext instanceof SRegRequest) {
SRegRequest sregReq = (SRegRequest) ext;
// data released by the user
if (persona != null) {
Map<String, String> userDataSReg = persona.toMap();
SRegResponse sregResp = SRegResponse.createSRegResponse(
sregReq, userDataSReg);
// (alternatively) manually add attribute values
// sregResp.addAttribute("email", email);
response.addExtension(sregResp);
}
} else {
throw new UnsupportedOperationException("TODO");
}
}
}
/**
* Add extension to the response message.
*
* @param response
* the response message to add to
* @throws MessageException
* if add failed
*/
@SuppressWarnings("unchecked")
private void addExtension(final Message response, final Persona persona) throws MessageException {
if (authRequest.hasExtension(AxMessage.OPENID_NS_AX) && persona != null) {
MessageExtension ext = authRequest
.getExtension(AxMessage.OPENID_NS_AX);
if (ext instanceof FetchRequest) {
FetchRequest fetchReq = (FetchRequest) ext;
Map<String, String> required = fetchReq.getAttributes(true);
Map<String, String> optional = fetchReq.getAttributes(false);
Map userDataExt = new HashMap();
FetchResponse fetchResp = FetchResponse
.createFetchResponse(fetchReq, userDataExt);
for (String entry : required.values()) {
if (entry.equals("http://axschema.org/contact/email")) {
fetchResp.addAttribute("email", "http://axschema.org/contact/email",
persona.getEmail());
} else if (entry.equals("http://axschema.org/namePerson")) {
fetchResp.addAttribute("fullname", "http://axschema.org/namePerson",
persona.getFullname());
} else if (entry.equals("http://axschema.org/namePerson/first")) {
fetchResp.addAttribute("first", "http://axschema.org/namePerson/first",
persona.getFullname().substring(0, persona.getFullname().indexOf(' ')));
} else if (entry.equals("http://axschema.org/namePerson/last")) {
fetchResp.addAttribute("last", "http://axschema.org/namePerson/last",
persona.getFullname().substring(persona.getFullname().indexOf(' ') + 1));
} else if (entry.equals("http://axschema.org/namePerson/friendly")) {
fetchResp.addAttribute("nickname", "http://axschema.org/namePerson/friendly",
persona.getNickname());
}
}
response.addExtension(fetchResp);
} else { // if (ext instanceof StoreRequest)
throw new UnsupportedOperationException("TODO");
}
}
}
}
| |
package amai.org.conventions.updates;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import androidx.recyclerview.widget.DefaultItemAnimator;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.swiperefreshlayout.widget.SwipeRefreshLayout;
import com.google.firebase.analytics.FirebaseAnalytics;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import amai.org.conventions.R;
import amai.org.conventions.ThemeAttributes;
import amai.org.conventions.model.Update;
import amai.org.conventions.model.conventions.Convention;
import amai.org.conventions.navigation.NavigationActivity;
import amai.org.conventions.networking.UpdatesRefresher;
import amai.org.conventions.notifications.PushNotification;
public class UpdatesActivity extends NavigationActivity implements SwipeRefreshLayout.OnRefreshListener {
private SwipeRefreshLayout swipeRefreshLayout;
private RecyclerView recyclerView;
private UpdatesAdapter updatesAdapter;
private View noUpdates;
private View updatesLayout;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setToolbarAndContentContainerBackground(ThemeAttributes.getDrawable(this, R.attr.updatesBackgroundDrawable));
setToolbarTitle(getResources().getString(R.string.updates));
setContentInContentContainer(R.layout.activity_updates, false);
resolveUiElements();
// Initialize the recycler view.
updatesAdapter = new UpdatesAdapter();
recyclerView.setLayoutManager(new LinearLayoutManager(this));
recyclerView.setAdapter(updatesAdapter);
// Enable animations and listen to when they start and finish to ensure the recycler view
// has a background during the animation
recyclerView.setItemAnimator(new DefaultItemAnimator() {
@Override
public void onAddStarting(RecyclerView.ViewHolder item) {
setUpdatesBackground();
super.onAddStarting(item);
}
@Override
public void onChangeStarting(RecyclerView.ViewHolder item, boolean oldItem) {
setUpdatesBackground();
super.onChangeStarting(item, oldItem);
}
@Override
public void onMoveStarting(RecyclerView.ViewHolder item) {
setUpdatesBackground();
super.onMoveStarting(item);
}
@Override
public void onRemoveStarting(RecyclerView.ViewHolder item) {
setUpdatesBackground();
super.onRemoveStarting(item);
}
@Override
public void onAnimationFinished(RecyclerView.ViewHolder viewHolder) {
super.onAnimationFinished(viewHolder);
removeUpdatesBackground();
}
});
// Initialize the updates list based on the model cache.
List<Update> updates = Convention.getInstance().getUpdates();
initializeUpdatesList(updates, updates.size()); // All items are new in this list
setUpdatesVisibility();
retrieveUpdatesList(false);
}
@Override
public void onRefresh() {
FirebaseAnalytics
.getInstance(this)
.logEvent("pull_to_refresh", null);
Convention.getInstance().clearNewFlagFromAllUpdates();
setUpdatesBackground();
updatesAdapter.notifyItemRangeChanged(0, Convention.getInstance().getUpdates().size());
retrieveUpdatesList(true);
}
private void resolveUiElements() {
swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.updates_swipe_layout);
swipeRefreshLayout.setOnRefreshListener(this);
swipeRefreshLayout.setColorSchemeColors(ThemeAttributes.getColor(this, R.attr.swipeToRefreshColor));
swipeRefreshLayout.setProgressBackgroundColorSchemeColor(ThemeAttributes.getColor(this, R.attr.swipeToRefreshBackgroundColor));
noUpdates = findViewById(R.id.no_updates);
recyclerView = (RecyclerView) findViewById(R.id.updates_list);
updatesLayout = findViewById(R.id.updates_layout);
}
private void setUpdatesVisibility() {
boolean showMessage = updatesAdapter.getItemCount() == 0;
noUpdates.setVisibility(showMessage ? View.VISIBLE : View.GONE);
updatesLayout.setVisibility(showMessage ? View.GONE : View.VISIBLE);
}
private void retrieveUpdatesList(final boolean showError) {
final UpdatesRefresher refresher = UpdatesRefresher.getInstance(UpdatesActivity.this);
// Workaround (Android issue #77712) - SwipeRefreshLayout indicator does not appear when the `setRefreshing(true)` is called before
// the `SwipeRefreshLayout#onMeasure()`, so we post the setRefreshing call to the layout queue.
refresher.setIsRefreshInProgress(true);
swipeRefreshLayout.post(new Runnable() {
@Override
public void run() {
swipeRefreshLayout.setRefreshing(refresher.isRefreshInProgress());
}
});
// Refresh, and don't allow new updates notification to occur due to this refresh.
// Only force refresh if it's due to user interaction (in that case we also show an error).
refresher.refreshFromServer(false, showError, new UpdatesRefresher.OnUpdateFinishedListener() {
@Override
public void onSuccess(int newUpdatesNumber) {
updateRefreshingFlag();
initializeUpdatesList(Convention.getInstance().getUpdates(), newUpdatesNumber);
setUpdatesVisibility();
// If we don't do that, the recycler view will show the previous items and the user will have to scroll manually
recyclerView.scrollToPosition(0);
}
@Override
public void onError(Exception error) {
updateRefreshingFlag();
if (showError) {
Toast.makeText(UpdatesActivity.this, R.string.update_refresh_failed, Toast.LENGTH_LONG).show();
}
}
private void updateRefreshingFlag() {
swipeRefreshLayout.setRefreshing(false);
}
});
}
private void initializeUpdatesList(List<Update> updates, int newItemsNumber) {
Collections.sort(updates, new Comparator<Update>() {
@Override
public int compare(Update lhs, Update rhs) {
// Sort the updates so the latest message would appear first.
return rhs.getDate().compareTo(lhs.getDate());
}
});
setUpdatesBackground();
updatesAdapter.setUpdates(updates, newItemsNumber);
}
/**
* This method must be called before recycler view animation starts and adapter updates to prevent flickering
*/
private void setUpdatesBackground() {
updatesLayout.setBackgroundColor(ThemeAttributes.getColor(this, R.attr.updatesBackground));
}
/**
* This method must be called after recycler view animation ends to remove overdraw
*/
private void removeUpdatesBackground() {
// Ensure we only remove the background after the last animation finished running
recyclerView.getItemAnimator().isRunning(new RecyclerView.ItemAnimator.ItemAnimatorFinishedListener() {
@Override
public void onAnimationsFinished() {
updatesLayout.setBackground(null);
}
});
}
@Override
protected void onPause() {
super.onPause();
// Remove new flag for viewed updates
Convention.getInstance().clearNewFlagFromAllUpdates();
}
@Override
protected void onPushNotificationReceived(PushNotification pushNotification) {
String messageId = pushNotification.messageId;
int updatePosition = UpdatesAdapter.UPDATE_NOT_FOUND;
if (messageId != null) {
updatePosition = updatesAdapter.focusOn(messageId);
}
if (updatePosition == UpdatesAdapter.UPDATE_NOT_FOUND) {
super.onPushNotificationReceived(pushNotification); // Default implementation (popup)
} else {
// Scroll to update
recyclerView.scrollToPosition(updatePosition);
}
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.testutil;
import com.facebook.buck.io.DefaultProjectFilesystemDelegate;
import com.facebook.buck.io.MoreFiles;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.timing.FakeClock;
import com.facebook.buck.util.environment.Platform;
import com.facebook.buck.util.sha1.Sha1HashCode;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import com.google.common.io.ByteStreams;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.math.BigInteger;
import java.nio.file.CopyOption;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.NoSuchFileException;
import java.nio.file.NotLinkException;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.FileTime;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.annotation.Nullable;
// TODO(natthu): Implement methods that throw UnsupportedOperationException.
public class FakeProjectFilesystem extends ProjectFilesystem {
private static final Random RANDOM = new Random();
private static final Path DEFAULT_ROOT = Paths.get(".").toAbsolutePath().normalize();
private static final BasicFileAttributes DEFAULT_FILE_ATTRIBUTES =
new BasicFileAttributes() {
@Override
@Nullable
public FileTime lastModifiedTime() {
return null;
}
@Override
@Nullable
public FileTime lastAccessTime() {
return null;
}
@Override
@Nullable
public FileTime creationTime() {
return null;
}
@Override
public boolean isRegularFile() {
return true;
}
@Override
public boolean isDirectory() {
return false;
}
@Override
public boolean isSymbolicLink() {
return false;
}
@Override
public boolean isOther() {
return false;
}
@Override
public long size() {
return 0;
}
@Override
@Nullable
public Object fileKey() {
return null;
}
};
private static final BasicFileAttributes DEFAULT_DIR_ATTRIBUTES =
new BasicFileAttributes() {
@Override
@Nullable
public FileTime lastModifiedTime() {
return null;
}
@Override
@Nullable
public FileTime lastAccessTime() {
return null;
}
@Override
@Nullable
public FileTime creationTime() {
return null;
}
@Override
public boolean isRegularFile() {
return false;
}
@Override
public boolean isDirectory() {
return true;
}
@Override
public boolean isSymbolicLink() {
return false;
}
@Override
public boolean isOther() {
return false;
}
@Override
public long size() {
return 0;
}
@Override
@Nullable
public Object fileKey() {
return null;
}
};
private final Map<Path, byte[]> fileContents;
private final Map<Path, ImmutableSet<FileAttribute<?>>> fileAttributes;
private final Map<Path, FileTime> fileLastModifiedTimes;
private final Map<Path, Path> symLinks;
private final Set<Path> directories;
private final Clock clock;
/**
* @return A project filesystem in a temp directory that will be deleted recursively on jvm exit.
*/
public static ProjectFilesystem createRealTempFilesystem() {
final Path tempDir;
try {
tempDir = Files.createTempDirectory("pfs");
} catch (IOException e) {
throw new RuntimeException(e);
}
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
MoreFiles.deleteRecursively(tempDir);
} catch (IOException e) { // NOPMD
// Swallow. At least we tried, right?
}
}));
return new FakeProjectFilesystem(tempDir);
}
public static ProjectFilesystem createJavaOnlyFilesystem() {
return createJavaOnlyFilesystem("/opt/src/buck");
}
public static ProjectFilesystem createJavaOnlyFilesystem(String rootPath) {
boolean isWindows = Platform.detect() == Platform.WINDOWS;
Configuration configuration = isWindows ? Configuration.windows() : Configuration.unix();
rootPath = isWindows ? "C:" + rootPath : rootPath;
FileSystem vfs = Jimfs.newFileSystem(configuration);
Path root = vfs.getPath(rootPath);
try {
Files.createDirectories(root);
} catch (IOException e) {
throw new RuntimeException(e);
}
return new ProjectFilesystem(root) {
@Override
public Path resolve(Path path) {
// Avoid resolving paths from different Java FileSystems.
return super.resolve(path.toString());
}
};
}
public FakeProjectFilesystem() {
this(DEFAULT_ROOT);
}
public FakeProjectFilesystem(Path root) {
this(new FakeClock(0), root, ImmutableSet.of());
}
public FakeProjectFilesystem(Clock clock) {
this(clock, DEFAULT_ROOT, ImmutableSet.of());
}
public FakeProjectFilesystem(Set<Path> files) {
this(new FakeClock(0), DEFAULT_ROOT, files);
}
public FakeProjectFilesystem(Clock clock, Path root, Set<Path> files) {
// For testing, we always use a DefaultProjectFilesystemDelegate so that the logic being
// exercised is always the same, even if a test using FakeProjectFilesystem is used on EdenFS.
super(root, new DefaultProjectFilesystemDelegate(root));
// We use LinkedHashMap to preserve insertion order, so the
// behavior of this test is consistent across versions. (It also lets
// us write tests which explicitly test iterating over entries in
// different orders.)
fileContents = new LinkedHashMap<>();
fileLastModifiedTimes = new LinkedHashMap<>();
FileTime modifiedTime = FileTime.fromMillis(clock.currentTimeMillis());
for (Path file : files) {
fileContents.put(file, new byte[0]);
fileLastModifiedTimes.put(file, modifiedTime);
}
fileAttributes = new LinkedHashMap<>();
symLinks = new LinkedHashMap<>();
directories = new LinkedHashSet<>();
directories.add(Paths.get(""));
for (Path file : files) {
Path dir = file.getParent();
while (dir != null) {
directories.add(dir);
dir = dir.getParent();
}
}
this.clock = Preconditions.checkNotNull(clock);
// Generally, tests don't care whether files exist.
ignoreValidityOfPaths = true;
}
@Override
protected boolean shouldVerifyConstructorArguments() {
return false;
}
public FakeProjectFilesystem setIgnoreValidityOfPaths(boolean shouldIgnore) {
this.ignoreValidityOfPaths = shouldIgnore;
return this;
}
private byte[] getFileBytes(Path path) {
return Preconditions.checkNotNull(fileContents.get(MorePaths.normalize(path)));
}
private void rmFile(Path path) {
fileContents.remove(MorePaths.normalize(path));
fileAttributes.remove(MorePaths.normalize(path));
fileLastModifiedTimes.remove(MorePaths.normalize(path));
}
public ImmutableSet<FileAttribute<?>> getFileAttributesAtPath(Path path) {
return Preconditions.checkNotNull(fileAttributes.get(path));
}
public void clear() {
fileContents.clear();
fileAttributes.clear();
fileLastModifiedTimes.clear();
symLinks.clear();
directories.clear();
}
public BasicFileAttributes readBasicAttributes(
Path pathRelativeToProjectRoot) throws IOException {
if (!exists(pathRelativeToProjectRoot)) {
throw new NoSuchFileException(pathRelativeToProjectRoot.toString());
}
return isFile(pathRelativeToProjectRoot)
? FakeFileAttributes.forFileWithSize(pathRelativeToProjectRoot, 0)
: FakeFileAttributes.forDirectory(pathRelativeToProjectRoot);
}
@Override
public <A extends BasicFileAttributes> A readAttributes(
Path pathRelativeToProjectRoot,
Class<A> type,
LinkOption... options) throws IOException {
if (type == BasicFileAttributes.class) {
return type.cast(readBasicAttributes(pathRelativeToProjectRoot));
}
throw new UnsupportedOperationException("cannot mock instance of: " + type);
}
@Override
public boolean exists(Path path, LinkOption... options) {
return isFile(path) || isDirectory(path);
}
@Override
public long getFileSize(Path path) throws IOException {
if (!exists(path)) {
throw new NoSuchFileException(path.toString());
}
return getFileBytes(path).length;
}
@Override
public void deleteFileAtPath(Path path) throws IOException {
if (exists(path)) {
rmFile(path);
} else {
throw new NoSuchFileException(path.toString());
}
}
@Override
public boolean deleteFileAtPathIfExists(Path path) throws IOException {
if (exists(path)) {
rmFile(path);
return true;
} else {
return false;
}
}
@Override
public boolean isFile(Path path) {
return fileContents.containsKey(MorePaths.normalize(path));
}
@Override
public boolean isHidden(Path path) throws IOException {
return isFile(path) && path.getFileName().toString().startsWith(".");
}
@Override
public boolean isDirectory(Path path, LinkOption... linkOptions) {
return directories.contains(MorePaths.normalize(path));
}
@Override
public boolean isExecutable(Path child) {
return false;
}
/**
* Does not support symlinks.
*/
@Override
public final ImmutableCollection<Path> getDirectoryContents(final Path pathRelativeToProjectRoot)
throws IOException {
Preconditions.checkState(isDirectory(pathRelativeToProjectRoot));
return FluentIterable
.from(fileContents.keySet())
.append(directories)
.filter(
input -> {
if (input.equals(Paths.get(""))) {
return false;
}
return MorePaths.getParentOrEmpty(input).equals(pathRelativeToProjectRoot);
})
.toSortedList(Comparator.naturalOrder());
}
@Override
public ImmutableCollection<Path> getZipMembers(Path archivePath) throws IOException {
// We can't use ZipFile here because it won't work with streams. We don't use ZipInputStream
// in the real ProjectFilesystem because it reads the entire zip file to list entries, whereas
// ZipFile just looks at the directory if it exists.
try (ZipInputStream zipInputStream = new ZipInputStream(newFileInputStream(archivePath))) {
ImmutableList.Builder<Path> resultBuilder = ImmutableList.builder();
ZipEntry zipEntry;
while ((zipEntry = zipInputStream.getNextEntry()) != null) {
resultBuilder.add(Paths.get(zipEntry.getName()));
}
return resultBuilder.build();
}
}
@Override
public ImmutableSortedSet<Path> getMtimeSortedMatchingDirectoryContents(
final Path pathRelativeToProjectRoot,
String globPattern)
throws IOException {
Preconditions.checkState(isDirectory(pathRelativeToProjectRoot));
final PathMatcher pathMatcher = FileSystems.getDefault().getPathMatcher("glob:" + globPattern);
return FluentIterable.from(fileContents.keySet()).filter(
input -> input.getParent().equals(pathRelativeToProjectRoot) &&
pathMatcher.matches(input.getFileName()))
.toSortedSet(
Ordering
.natural()
.onResultOf(
new Function<Path, FileTime>() {
@Override
public FileTime apply(Path path) {
try {
return getLastModifiedTimeFetcher().getLastModifiedTime(path);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
})
.compound(Ordering.natural())
.reverse());
}
@Override
public void walkFileTree(Path searchRoot, FileVisitor<Path> fileVisitor) throws IOException {
walkRelativeFileTree(searchRoot, fileVisitor);
}
@Override
public long getLastModifiedTime(Path path) throws IOException {
Path normalizedPath = MorePaths.normalize(path);
if (!exists(normalizedPath)) {
throw new NoSuchFileException(path.toString());
}
return Preconditions.checkNotNull(fileLastModifiedTimes.get(normalizedPath)).toMillis();
}
@Override
public Path setLastModifiedTime(Path path, FileTime time) throws IOException {
Path normalizedPath = MorePaths.normalize(path);
if (!exists(normalizedPath)) {
throw new NoSuchFileException(path.toString());
}
fileLastModifiedTimes.put(normalizedPath, time);
return normalizedPath;
}
@Override
public void deleteRecursively(Path path) throws IOException {
if (!exists(path)) {
throw new NoSuchFileException(path.toString());
}
deleteRecursivelyIfExists(path);
}
@Override
public void deleteRecursivelyIfExists(Path path) throws IOException {
Path normalizedPath = MorePaths.normalize(path);
for (Iterator<Path> iterator = fileContents.keySet().iterator(); iterator.hasNext();) {
Path subPath = iterator.next();
if (subPath.startsWith(normalizedPath)) {
fileAttributes.remove(MorePaths.normalize(subPath));
fileLastModifiedTimes.remove(MorePaths.normalize(subPath));
iterator.remove();
}
}
for (Iterator<Path> iterator = symLinks.keySet().iterator(); iterator.hasNext();) {
Path subPath = iterator.next();
if (subPath.startsWith(normalizedPath)) {
iterator.remove();
}
}
fileLastModifiedTimes.remove(path);
directories.remove(path);
}
@Override
public void mkdirs(Path path) throws IOException {
for (Path parent = path; parent != null; parent = parent.getParent()) {
directories.add(parent);
fileLastModifiedTimes.put(parent, FileTime.fromMillis(clock.currentTimeMillis()));
}
}
@Override
public Path createNewFile(Path path) throws IOException {
writeBytesToPath(new byte[0], path);
return path;
}
@Override
public void writeLinesToPath(
Iterable<String> lines,
Path path,
FileAttribute<?>... attrs) throws IOException {
StringBuilder builder = new StringBuilder();
if (!Iterables.isEmpty(lines)) {
Joiner.on('\n').appendTo(builder, lines);
builder.append('\n');
}
writeContentsToPath(builder.toString(), path, attrs);
}
@Override
public void writeContentsToPath(
String contents,
Path path,
FileAttribute<?>... attrs) throws IOException {
writeBytesToPath(contents.getBytes(Charsets.UTF_8), path, attrs);
}
@Override
public void writeBytesToPath(
byte[] bytes,
Path path,
FileAttribute<?>... attrs) throws IOException {
Path normalizedPath = MorePaths.normalize(path);
fileContents.put(normalizedPath, Preconditions.checkNotNull(bytes));
fileAttributes.put(normalizedPath, ImmutableSet.copyOf(attrs));
Path directory = normalizedPath.getParent();
while (directory != null) {
directories.add(directory);
directory = directory.getParent();
}
fileLastModifiedTimes.put(normalizedPath, FileTime.fromMillis(clock.currentTimeMillis()));
}
@Override
public OutputStream newFileOutputStream(
final Path pathRelativeToProjectRoot,
final FileAttribute<?>... attrs) throws IOException {
return new ByteArrayOutputStream() {
@Override
public void close() throws IOException {
super.close();
writeToMap();
}
@Override
public void flush() throws IOException {
super.flush();
writeToMap();
}
private void writeToMap() throws IOException {
writeBytesToPath(toByteArray(), pathRelativeToProjectRoot, attrs);
}
};
}
/**
* Does not support symlinks.
*/
@Override
public InputStream newFileInputStream(Path pathRelativeToProjectRoot)
throws IOException {
byte[] contents = fileContents.get(normalizePathToProjectRoot(pathRelativeToProjectRoot));
return new ByteArrayInputStream(contents);
}
private Path normalizePathToProjectRoot(Path pathRelativeToProjectRoot)
throws NoSuchFileException {
if (!exists(pathRelativeToProjectRoot)) {
throw new NoSuchFileException(pathRelativeToProjectRoot.toString());
}
return MorePaths.normalize(pathRelativeToProjectRoot);
}
@Override
public void copyToPath(final InputStream inputStream, Path path, CopyOption... options)
throws IOException {
writeBytesToPath(ByteStreams.toByteArray(inputStream), path);
}
/**
* Does not support symlinks.
*/
@Override
public Optional<String> readFileIfItExists(Path path) {
if (!exists(path)) {
return Optional.empty();
}
return Optional.of(new String(getFileBytes(path), Charsets.UTF_8));
}
/**
* Does not support symlinks.
*/
@Override
public Optional<Reader> getReaderIfFileExists(Path path) {
Optional<String> content = readFileIfItExists(path);
if (!content.isPresent()) {
return Optional.empty();
}
return Optional.of((Reader) new StringReader(content.get()));
}
/**
* Does not support symlinks.
*/
@Override
public Optional<String> readFirstLine(Path path) {
List<String> lines;
try {
lines = readLines(path);
} catch (IOException e) {
return Optional.empty();
}
return Optional.ofNullable(Iterables.get(lines, 0, null));
}
/**
* Does not support symlinks.
*/
@Override
public List<String> readLines(Path path) throws IOException {
Optional<String> contents = readFileIfItExists(path);
if (!contents.isPresent() || contents.get().isEmpty()) {
return ImmutableList.of();
}
String content = contents.get();
content = content.endsWith("\n") ? content.substring(0, content.length() - 1) : content;
return Splitter.on('\n').splitToList(content);
}
@Override
public Manifest getJarManifest(Path path) throws IOException {
try (JarInputStream jar = new JarInputStream(newFileInputStream(path))) {
Manifest result = jar.getManifest();
if (result != null) {
return result;
}
// JarInputStream will only find the manifest if it's the first entry, but we have code that
// puts it elsewhere. We must search. Fortunately, this is test code! So we can be slow!
JarEntry entry;
while ((entry = jar.getNextJarEntry()) != null) {
if (JarFile.MANIFEST_NAME.equals(entry.getName())) {
result = new Manifest();
result.read(jar);
return result;
}
}
}
return null;
}
/**
* Does not support symlinks.
*/
@Override
public Sha1HashCode computeSha1(Path pathRelativeToProjectRootOrJustAbsolute) throws IOException {
if (!exists(pathRelativeToProjectRootOrJustAbsolute)) {
throw new NoSuchFileException(pathRelativeToProjectRootOrJustAbsolute.toString());
}
// Because this class is a fake, the file contents may not be available as a stream, so we load
// all of the contents into memory as a byte[] and then hash them.
byte[] fileContents = getFileBytes(pathRelativeToProjectRootOrJustAbsolute);
HashCode hashCode = Hashing.sha1().newHasher().putBytes(fileContents).hash();
return Sha1HashCode.fromHashCode(hashCode);
}
@Override
public void copy(Path source, Path target, CopySourceMode sourceMode) throws IOException {
Path normalizedSourcePath = MorePaths.normalize(source);
Path normalizedTargetPath = MorePaths.normalize(target);
switch (sourceMode) {
case FILE:
ImmutableSet<FileAttribute<?>> attrs = fileAttributes.get(normalizedSourcePath);
writeBytesToPath(
fileContents.get(normalizedSourcePath),
normalizedTargetPath,
attrs.toArray(new FileAttribute[attrs.size()]));
break;
case DIRECTORY_CONTENTS_ONLY:
case DIRECTORY_AND_CONTENTS:
throw new UnsupportedOperationException();
}
}
/**
* TODO(natthu): (1) Also traverse the directories. (2) Do not ignore return value of
* {@code fileVisitor}.
*/
@Override
public final void walkRelativeFileTree(
Path path,
EnumSet<FileVisitOption> visitOptions,
FileVisitor<Path> fileVisitor) throws IOException {
if (!isDirectory(path)) {
fileVisitor.visitFile(path, DEFAULT_FILE_ATTRIBUTES);
return;
}
ImmutableCollection<Path> ents = getDirectoryContents(path);
for (Path ent : ents) {
if (!isDirectory(ent)) {
FileVisitResult result = fileVisitor.visitFile(ent, DEFAULT_FILE_ATTRIBUTES);
if (result == FileVisitResult.SKIP_SIBLINGS) {
return;
}
} else {
FileVisitResult result = fileVisitor.preVisitDirectory(ent, DEFAULT_DIR_ATTRIBUTES);
if (result == FileVisitResult.SKIP_SIBLINGS) {
return;
}
if (result != FileVisitResult.SKIP_SUBTREE) {
walkRelativeFileTree(ent, fileVisitor);
fileVisitor.postVisitDirectory(ent, null);
}
}
}
}
@Override
public void copyFolder(Path source, Path target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void copyFile(Path source, Path target) throws IOException {
writeContentsToPath(readFileIfItExists(source).get(), target);
}
@Override
public void createSymLink(Path symLink, Path realFile, boolean force) throws IOException {
if (!force) {
if (fileContents.containsKey(symLink) || directories.contains(symLink)) {
throw new FileAlreadyExistsException(symLink.toString());
}
} else {
rmFile(symLink);
deleteRecursivelyIfExists(symLink);
}
symLinks.put(symLink, realFile);
}
@Override
public Set<PosixFilePermission> getPosixFilePermissions(Path path) throws IOException {
return ImmutableSet.of(
PosixFilePermission.OWNER_READ,
PosixFilePermission.OWNER_WRITE,
PosixFilePermission.GROUP_READ,
PosixFilePermission.OTHERS_READ);
}
@Override
public boolean isSymLink(Path path) {
return symLinks.containsKey(path);
}
@Override
public Path readSymLink(Path path) throws IOException {
Path target = symLinks.get(path);
if (target == null) {
throw new NotLinkException(path.toString());
}
return target;
}
@Override
public void touch(Path fileToTouch) throws IOException {
if (exists(fileToTouch)) {
setLastModifiedTime(fileToTouch, FileTime.fromMillis(clock.currentTimeMillis()));
} else {
createNewFile(fileToTouch);
}
}
@Override
public Path createTempFile(
Path directory,
String prefix,
String suffix,
FileAttribute<?>... attrs) throws IOException {
Path path;
do {
String str = new BigInteger(130, RANDOM).toString(32);
path = directory.resolve(prefix + str + suffix);
} while (exists(path));
touch(path);
return path;
}
@Override
public void move(Path source, Path target, CopyOption... options) throws IOException {
fileContents.put(MorePaths.normalize(target), fileContents.remove(MorePaths.normalize(source)));
fileAttributes.put(MorePaths.normalize(target),
fileAttributes.remove(MorePaths.normalize(source)));
fileLastModifiedTimes.put(MorePaths.normalize(target),
fileLastModifiedTimes.remove(MorePaths.normalize(source)));
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.provisioning.api;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.delta.ObjectDelta;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.SchemaDebugUtil;
import com.evolveum.midpoint.util.DebugDumpable;
import com.evolveum.midpoint.util.DebugUtil;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
/**
* Describes an attempt to apply a change to a specific resource object.
*
* @author Radovan Semancik
*/
public class ResourceOperationDescription implements DebugDumpable {
private ObjectDelta<? extends ShadowType> objectDelta;
private PrismObject<? extends ShadowType> currentShadow;
private String sourceChannel;
private PrismObject<ResourceType> resource;
private OperationResult result;
private boolean asynchronous = false;
private int attemptNumber = 0;
/**
* The operation that was about to execute and that has failed.
*/
public ObjectDelta<? extends ShadowType> getObjectDelta() {
return objectDelta;
}
public void setObjectDelta(ObjectDelta<? extends ShadowType> objectDelta) {
this.objectDelta = objectDelta;
}
/**
* Shadow describing the object that was the target of the operation. It may a "temporary" shadow that
* is not yet bound to a specific resource object (e.g. in case of add operation).
*/
public PrismObject<? extends ShadowType> getCurrentShadow() {
return currentShadow;
}
public void setCurrentShadow(PrismObject<? extends ShadowType> currentShadow) {
this.currentShadow = currentShadow;
}
public String getSourceChannel() {
return sourceChannel;
}
public void setSourceChannel(String sourceChannel) {
this.sourceChannel = sourceChannel;
}
public PrismObject<ResourceType> getResource() {
return resource;
}
public void setResource(PrismObject<ResourceType> resource) {
this.resource = resource;
}
/**
* Result of the failed operation.
*/
public OperationResult getResult() {
return result;
}
public void setResult(OperationResult result) {
this.result = result;
}
/**
* True if the operation is asynchronous. I.e. true if the operation
* cannot provide direct return value and therefore the invocation of
* the listener is the only way how to pass operation return value to
* the upper layers.
*
* This may be useful e.g. for decided whether log the message and what
* log level to use (it can be assumed that the error gets logged at least
* once for synchronous operations, but this may be the only chance to
* properly log it for asynchronous operations).
*/
public boolean isAsynchronous() {
return asynchronous;
}
public void setAsynchronous(boolean asynchronous) {
this.asynchronous = asynchronous;
}
public int getAttemptNumber() {
return attemptNumber;
}
public void setAttemptNumber(int attemptNumber) {
this.attemptNumber = attemptNumber;
}
public void checkConsistence() {
if (resource == null) {
throw new IllegalArgumentException("No resource in "+this.getClass().getSimpleName());
}
resource.checkConsistence();
//FIXME: have not to be set always
// if (sourceChannel == null) {
// throw new IllegalArgumentException("No sourceChannel in "+this.getClass().getSimpleName());
// }
if (objectDelta == null && currentShadow == null) {
throw new IllegalArgumentException("Either objectDelta or currentShadow must be set in "+this.getClass().getSimpleName());
}
if (objectDelta != null && !objectDelta.isAdd() && objectDelta.getOid() == null) {
throw new IllegalArgumentException("Delta OID not set in "+this.getClass().getSimpleName());
}
if (objectDelta != null) {
objectDelta.checkConsistence();
}
//shadow does not have oid set, for example the shadow should be added, but is wasn't because of some error
if (currentShadow != null && currentShadow.getOid() == null && objectDelta != null && !objectDelta.isAdd()) {
throw new IllegalArgumentException("Current shadow OID not set in "+this.getClass().getSimpleName());
}
if (currentShadow != null) {
currentShadow.checkConsistence();
}
}
@Override
public String toString() {
return "ResourceOperationDescription(objectDelta=" + objectDelta + ", currentShadow="
+ SchemaDebugUtil.prettyPrint(currentShadow) + ", sourceChannel=" + sourceChannel
+ ", resource=" + resource +
(asynchronous ? ", ASYNC" : "") +
(attemptNumber != 0 ? ", attemptNumber="+attemptNumber : "") +
", result=" + result + ")";
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.util.DebugDumpable#debugDump()
*/
@Override
public String debugDump() {
return debugDump(0);
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.util.DebugDumpable#debugDump(int)
*/
@Override
public String debugDump(int indent) {
StringBuilder sb = new StringBuilder();
SchemaDebugUtil.indentDebugDump(sb, indent);
sb.append("ResourceOperationDescription(");
sb.append(sourceChannel);
sb.append(")\n");
SchemaDebugUtil.indentDebugDump(sb, indent+1);
sb.append("resource:");
if (resource == null) {
sb.append(" null");
} else {
sb.append(resource);
}
sb.append("\n");
SchemaDebugUtil.indentDebugDump(sb, indent+1);
sb.append("objectDelta:");
if (objectDelta == null) {
sb.append(" null");
} else {
sb.append(objectDelta.debugDump(indent+2));
}
sb.append("\n");
SchemaDebugUtil.indentDebugDump(sb, indent+1);
sb.append("currentShadow:");
if (currentShadow == null) {
sb.append(" null\n");
} else {
sb.append("\n");
sb.append(currentShadow.debugDump(indent+2));
}
sb.append("\n");
DebugUtil.debugDumpLabel(sb, "Asynchronous", indent+1);
sb.append(asynchronous);
sb.append("\n");
DebugUtil.debugDumpLabel(sb, "Attempt number", indent+1);
sb.append(attemptNumber);
sb.append("\n");
SchemaDebugUtil.indentDebugDump(sb, indent+1);
sb.append("result:");
if (result == null) {
sb.append(" null\n");
} else {
sb.append("\n");
sb.append(result.debugDump(indent+2));
}
return sb.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.ha;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature;
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
import org.apache.hadoop.hdfs.server.namenode.NNStorage;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
public class TestBootstrapStandby {
private static final Logger LOG =
LoggerFactory.getLogger(TestBootstrapStandby.class);
private static final int maxNNCount = 3;
private static final int STARTING_PORT = 20000;
private MiniDFSCluster cluster;
private NameNode nn0;
@Before
public void setupCluster() throws IOException {
Configuration conf = new Configuration();
// duplicate code with MiniQJMHACluster#createDefaultTopology, but don't want to cross
// dependencies or munge too much code to support it all correctly
MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf("ns1");
for (int i = 0; i < maxNNCount; i++) {
nameservice.addNN(new MiniDFSNNTopology.NNConf("nn" + i).setHttpPort(STARTING_PORT + i + 1));
}
MiniDFSNNTopology topology = new MiniDFSNNTopology().addNameservice(nameservice);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(topology)
.numDataNodes(0)
.build();
cluster.waitActive();
nn0 = cluster.getNameNode(0);
cluster.transitionToActive(0);
// shutdown the other NNs
for (int i = 1; i < maxNNCount; i++) {
cluster.shutdownNameNode(i);
}
}
@After
public void shutdownCluster() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* Test for the base success case. The primary NN
* hasn't made any checkpoints, and we copy the fsimage_0
* file over and start up.
*/
@Test
public void testSuccessfulBaseCase() throws Exception {
removeStandbyNameDirs();
// skip the first NN, its up
for (int index = 1; index < maxNNCount; index++) {
try {
cluster.restartNameNode(index);
fail("Did not throw");
} catch (IOException ioe) {
GenericTestUtils.assertExceptionContains(
"storage directory does not exist or is not accessible", ioe);
}
int expectedCheckpointTxId = (int)NameNodeAdapter.getNamesystem(nn0)
.getFSImage().getMostRecentCheckpointTxId();
int rc = BootstrapStandby.run(new String[] { "-nonInteractive" },
cluster.getConfiguration(index));
assertEquals(0, rc);
// Should have copied over the namespace from the active
FSImageTestUtil.assertNNHasCheckpoints(cluster, index,
ImmutableList.of(expectedCheckpointTxId));
}
// We should now be able to start the standbys successfully.
restartNameNodesFromIndex(1);
}
/**
* Test for downloading a checkpoint made at a later checkpoint
* from the active.
*/
@Test
public void testDownloadingLaterCheckpoint() throws Exception {
// Roll edit logs a few times to inflate txid
nn0.getRpcServer().rollEditLog();
nn0.getRpcServer().rollEditLog();
// Make checkpoint
NameNodeAdapter.enterSafeMode(nn0, false);
NameNodeAdapter.saveNamespace(nn0);
NameNodeAdapter.leaveSafeMode(nn0);
long expectedCheckpointTxId = NameNodeAdapter.getNamesystem(nn0)
.getFSImage().getMostRecentCheckpointTxId();
assertEquals(6, expectedCheckpointTxId);
// advance the current txid
cluster.getFileSystem(0).create(new Path("/test_txid"), (short)1).close();
// obtain the content of seen_txid
URI editsUri = cluster.getSharedEditsDir(0, maxNNCount - 1);
long seen_txid_shared = FSImageTestUtil.getStorageTxId(nn0, editsUri);
for (int i = 1; i < maxNNCount; i++) {
assertEquals(0, forceBootstrap(i));
// Should have copied over the namespace from the active
LOG.info("Checking namenode: " + i);
FSImageTestUtil.assertNNHasCheckpoints(cluster, i,
ImmutableList.of((int) expectedCheckpointTxId));
}
FSImageTestUtil.assertNNFilesMatch(cluster);
// Make sure the seen_txid was not modified by the standby
assertEquals(seen_txid_shared,
FSImageTestUtil.getStorageTxId(nn0, editsUri));
// We should now be able to start the standby successfully.
restartNameNodesFromIndex(1);
}
/**
* Test for the case where the shared edits dir doesn't have
* all of the recent edit logs.
*/
@Test
public void testSharedEditsMissingLogs() throws Exception {
removeStandbyNameDirs();
CheckpointSignature sig = nn0.getRpcServer().rollEditLog();
assertEquals(3, sig.getCurSegmentTxId());
// Should have created edits_1-2 in shared edits dir
URI editsUri = cluster.getSharedEditsDir(0, maxNNCount - 1);
File editsDir = new File(editsUri);
File currentDir = new File(editsDir, "current");
File editsSegment = new File(currentDir,
NNStorage.getFinalizedEditsFileName(1, 2));
GenericTestUtils.assertExists(editsSegment);
GenericTestUtils.assertExists(currentDir);
// Delete the segment.
assertTrue(editsSegment.delete());
// Trying to bootstrap standby should now fail since the edit
// logs aren't available in the shared dir.
LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs(
LoggerFactory.getLogger(BootstrapStandby.class));
try {
assertEquals(BootstrapStandby.ERR_CODE_LOGS_UNAVAILABLE, forceBootstrap(1));
} finally {
logs.stopCapturing();
}
assertTrue(logs.getOutput().contains(
"Unable to read transaction ids 1-3 from the configured shared"));
}
/**
* Show that bootstrapping will fail on a given NameNode if its directories already exist. Its not
* run across all the NN because its testing the state local on each node.
* @throws Exception on unexpected failure
*/
@Test
public void testStandbyDirsAlreadyExist() throws Exception {
// Should not pass since standby dirs exist, force not given
int rc = BootstrapStandby.run(
new String[]{"-nonInteractive"},
cluster.getConfiguration(1));
assertEquals(BootstrapStandby.ERR_CODE_ALREADY_FORMATTED, rc);
// Should pass with -force
assertEquals(0, forceBootstrap(1));
}
/**
* Test that, even if the other node is not active, we are able
* to bootstrap standby from it.
*/
@Test(timeout=30000)
public void testOtherNodeNotActive() throws Exception {
cluster.transitionToStandby(0);
assertSuccessfulBootstrapFromIndex(1);
}
/**
* Test that bootstrapping standby NN is not limited by
* {@link DFSConfigKeys#DFS_IMAGE_TRANSFER_RATE_KEY}, but is limited by
* {@link DFSConfigKeys#DFS_IMAGE_TRANSFER_BOOTSTRAP_STANDBY_RATE_KEY}
* created by HDFS-8808.
*/
@Test(timeout=30000)
public void testRateThrottling() throws Exception {
cluster.getConfiguration(0).setLong(
DFSConfigKeys.DFS_IMAGE_TRANSFER_RATE_KEY, 1);
cluster.restartNameNode(0);
cluster.waitActive();
nn0 = cluster.getNameNode(0);
cluster.transitionToActive(0);
// Any reasonable test machine should be able to transfer 1 byte per MS
// (which is ~1K/s)
final int minXferRatePerMS = 1;
int imageXferBufferSize = DFSUtilClient.getIoFileBufferSize(
new Configuration());
File imageFile = null;
int dirIdx = 0;
while (imageFile == null || imageFile.length() < imageXferBufferSize) {
for (int i = 0; i < 5; i++) {
cluster.getFileSystem(0).mkdirs(new Path("/foo" + dirIdx++));
}
nn0.getRpcServer().rollEditLog();
NameNodeAdapter.enterSafeMode(nn0, false);
NameNodeAdapter.saveNamespace(nn0);
NameNodeAdapter.leaveSafeMode(nn0);
imageFile = FSImageTestUtil.findLatestImageFile(FSImageTestUtil
.getFSImage(nn0).getStorage().getStorageDir(0));
}
final int timeOut = (int)(imageFile.length() / minXferRatePerMS) + 1;
// A very low DFS_IMAGE_TRANSFER_RATE_KEY value won't affect bootstrapping
final AtomicBoolean bootStrapped = new AtomicBoolean(false);
new Thread(
new Runnable() {
@Override
public void run() {
try {
testSuccessfulBaseCase();
bootStrapped.set(true);
} catch (Exception e) {
fail(e.getMessage());
}
}
}
).start();
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return bootStrapped.get();
}
}, 50, timeOut);
shutdownCluster();
setupCluster();
cluster.getConfiguration(0).setLong(
DFSConfigKeys.DFS_IMAGE_TRANSFER_BOOTSTRAP_STANDBY_RATE_KEY, 1);
cluster.restartNameNode(0);
cluster.waitActive();
nn0 = cluster.getNameNode(0);
cluster.transitionToActive(0);
// A very low DFS_IMAGE_TRANSFER_BOOTSTRAP_STANDBY_RATE_KEY value should
// cause timeout
bootStrapped.set(false);
new Thread(
new Runnable() {
@Override
public void run() {
try {
testSuccessfulBaseCase();
bootStrapped.set(true);
} catch (Exception e) {
LOG.info(e.getMessage());
}
}
}
).start();
try {
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return bootStrapped.get();
}
}, 50, timeOut);
fail("Did not timeout");
} catch (TimeoutException e) {
LOG.info("Encountered expected timeout.");
}
}
private void removeStandbyNameDirs() {
for (int i = 1; i < maxNNCount; i++) {
for (URI u : cluster.getNameDirs(i)) {
assertTrue(u.getScheme().equals("file"));
File dir = new File(u.getPath());
LOG.info("Removing standby dir " + dir);
assertTrue(FileUtil.fullyDelete(dir));
}
}
}
private void restartNameNodesFromIndex(int start) throws IOException {
for (int i = start; i < maxNNCount; i++) {
// We should now be able to start the standby successfully.
cluster.restartNameNode(i, false);
}
cluster.waitClusterUp();
cluster.waitActive();
}
/**
* Force boot strapping on a namenode
* @param i index of the namenode to attempt
* @return exit code
* @throws Exception on unexpected failure
*/
private int forceBootstrap(int i) throws Exception {
return BootstrapStandby.run(new String[] { "-force" },
cluster.getConfiguration(i));
}
private void assertSuccessfulBootstrapFromIndex(int start) throws Exception {
for (int i = start; i < maxNNCount; i++) {
assertEquals(0, forceBootstrap(i));
}
}
}
| |
package hu.relek.solve2048.logic;
import hu.relek.numbertable.ArrayNumberTable;
import hu.relek.numbertable.Coordinates;
import hu.relek.numbertable.NumberTable;
import java.util.ArrayList;
import java.util.List;
public class Game2048Table implements PlayerInterface, MachineInterface {
private NumberTable table = new ArrayNumberTable(4, 4);
private int score = 0;
public Game2048Table() {
//empty
}
public Game2048Table(Game2048Table other) {
table = new ArrayNumberTable(other.table);
score = other.score;
}
@Override
public void put(int value, Coordinates cell) {
table.put(value, cell);
}
@Override
public void slide(Direction dir) {
//Check for adjacent equal values from the given direction, disregarding empty cells.
//If found, multiply the former by 2, and set the latter to null.
for (int i = 0; i < 4; i++) {
next:
for (int j = 0; j < 4; j++) {
Coordinates prevCell = null, curCell = null;
switch (dir) {
case UP:
prevCell = table.getFirstFilledInCol(i, j);
if (prevCell == null) {
break next;
}
curCell = table.getFirstFilledInCol(i, prevCell.getY() + 1);
if (curCell == null) {
break next;
}
j = curCell.getY() - 1;
break;
case LEFT:
prevCell = table.getFirstFilledInRow(i, j);
if (prevCell == null) {
break next;
}
curCell = table.getFirstFilledInRow(i, prevCell.getX() + 1);
if (curCell == null) {
break next;
}
j = curCell.getX() - 1;
break;
case DOWN:
prevCell = table.getLastFilledInCol(i, 3 - j);
if (prevCell == null) {
break next;
}
curCell = table.getLastFilledInCol(i, prevCell.getY() - 1);
if (curCell == null) {
break next;
}
j = 3 - curCell.getY() - 1;
break;
case RIGHT:
prevCell = table.getLastFilledInRow(i, 3 - j);
if (prevCell == null) {
break next;
}
curCell = table.getLastFilledInRow(i, prevCell.getX() - 1);
if (curCell == null) {
break next;
}
j = 3 - curCell.getX() - 1;
break;
default:
throw new IllegalArgumentException();
}
Integer prev = table.get(prevCell);
Integer cur = table.get(curCell);
if (prev.equals(cur)) {
table.put(prev * 2, prevCell);
table.put(null, curCell);
score += prev * 2;
}
}
}
//Slide all current numbers to the given direction
compact(dir);
}
private void compact(Direction dir) {
for (int i = 0; i < 4; i++) {
List<Integer> filledOnly = new ArrayList<Integer>();
for (int j = 0; j < 4; j++) {
Coordinates cell;
switch (dir) {
case LEFT:
case RIGHT:
cell = new Coordinates(j, i);
break;
case UP:
case DOWN:
cell = new Coordinates(i, j);
break;
default:
throw new IllegalArgumentException();
}
Integer value = table.get(cell);
if (value != null) {
filledOnly.add(value);
table.put(null, cell);
}
}
for (int j = 0; j < filledOnly.size(); j++) {
switch (dir) {
case LEFT:
table.put(filledOnly.get(j), new Coordinates(j, i));
break;
case RIGHT:
table.put(filledOnly.get(j), new Coordinates(3 - filledOnly.size() + 1 + j, i));
break;
case UP:
table.put(filledOnly.get(j), new Coordinates(i, j));
break;
case DOWN:
table.put(filledOnly.get(j), new Coordinates(i, 3 - filledOnly.size() + 1 + j));
break;
default:
throw new IllegalArgumentException();
}
}
}
}
public boolean canSlide(Direction dir) {
Game2048Table copy = new Game2048Table(this);
copy.slide(dir);
return !this.equals(copy);
}
public boolean isGameOver() {
for (Direction dir : Direction.values()) {
if (canSlide(dir)) {
return false;
}
}
return true;
}
@Override
public List<Coordinates> getFreeCells() {
return table.getFreeCells();
}
@Override
public String toString() {
return table.toString();
}
@Override
public boolean equals(Object obj) {
if (! (obj instanceof Game2048Table)) {
return false;
}
Game2048Table gameTable = (Game2048Table) obj;
return table.equals(gameTable.table);
}
@Override
public int hashCode() {
return table.hashCode();
}
@Override
public Game2048Table takeSnapshot() {
return new Game2048Table(this);
}
public int getScore() {
return score;
}
}
| |
package zendesk.belvedere;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatDialogFragment;
import androidx.core.content.ContextCompat;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.FragmentActivity;
import java.util.ArrayList;
import java.util.List;
import zendesk.belvedere.ui.R;
/**
* This is a {@link DialogFragment} that allows the user to select an image source.
* <p>
* <p>
* Based on the available permissions, this dialog allows the user to select images from a gallery or
* from a camera app.
* </p>
*/
public class BelvedereDialog extends AppCompatDialogFragment {
private static final int REQUEST_ID = 1212;
private final static String STATE_WAITING_FOR_PERMISSION = "waiting_for_permission";
private ListView listView;
private MediaIntent waitingForPermission;
private List<MediaIntent> mediaIntents;
private PermissionStorage preferences;
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.belvedere_dialog, container, false);
listView = view.findViewById(R.id.belvedere_dialog_listview);
return view;
}
@Override
public void onCreate(@Nullable final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
preferences = new PermissionStorage(requireContext());
if (savedInstanceState != null) {
waitingForPermission = savedInstanceState.getParcelable(STATE_WAITING_FOR_PERMISSION);
}
setStyle(STYLE_NO_TITLE, getTheme());
}
@Override
public void onStart() {
super.onStart();
mediaIntents = getMediaIntents();
fillList(mediaIntents);
}
private void askForPermission(MediaIntent mediaIntent) {
this.waitingForPermission = mediaIntent;
requestPermissions(new String[]{mediaIntent.getPermission()}, REQUEST_ID);
}
@Override
public void onRequestPermissionsResult(final int requestCode, @NonNull final String[] permissions, @NonNull final int[] grantResults) {
if (requestCode == REQUEST_ID && waitingForPermission != null && !TextUtils.isEmpty(waitingForPermission.getPermission())) {
if (permissions.length > 0 && permissions[0].equals(waitingForPermission.getPermission())) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
if (getParentFragment() != null) {
waitingForPermission.open(getParentFragment());
} else if (getActivity() != null) {
waitingForPermission.open(getActivity());
}
dismissAllowingStateLoss();
} else if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_DENIED) {
boolean showRationale = shouldShowRequestPermissionRationale(waitingForPermission.getPermission());
if (!showRationale) {
preferences.neverEverAskForThatPermissionAgain(waitingForPermission.getPermission());
mediaIntents = getMediaIntents();
fillList(mediaIntents);
}
}
waitingForPermission = null;
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
@Override
public void onSaveInstanceState(@NonNull final Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(STATE_WAITING_FOR_PERMISSION, waitingForPermission);
}
private void fillList(final List<MediaIntent> intents) {
if (getParentFragment() != null) {
final Fragment parentFragment = getParentFragment();
fillListView(new StartActivity() {
@Override
public void startActivity(final MediaIntent mediaIntent) {
mediaIntent.open(parentFragment);
}
@Override
public Context getContext() {
return parentFragment.getContext();
}
}, intents);
} else if (getActivity() != null) {
final FragmentActivity activity = getActivity();
fillListView(new StartActivity() {
@Override
public void startActivity(final MediaIntent mediaIntent) {
mediaIntent.open(activity);
}
@Override
public Context getContext() {
return activity;
}
}, intents);
} else {
if (isAdded()) {
dismiss();
}
}
}
private void fillListView(final StartActivity activity, final List<MediaIntent> intents) {
listView.setAdapter(new Adapter(activity.getContext(), R.layout.belvedere_dialog_row, intents));
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(@NonNull final AdapterView<?> parent, @NonNull final View view, final int position, final long id) {
if (view.getTag() instanceof MediaIntent) {
openBelvedereIntent((MediaIntent) view.getTag(), activity);
}
}
});
if (intents.size() == 0) {
dismissAllowingStateLoss();
} else if (intents.size() == 1) {
openBelvedereIntent(intents.get(0), activity);
}
}
private void openBelvedereIntent(MediaIntent belvedereIntent, StartActivity startActivity) {
if (TextUtils.isEmpty(belvedereIntent.getPermission())) {
startActivity.startActivity(belvedereIntent);
dismiss();
} else {
askForPermission(belvedereIntent);
}
}
private List<MediaIntent> getMediaIntents() {
List<MediaIntent> intents = BelvedereUi.getUiConfig(requireArguments()).getIntents();
List<MediaIntent> filter = new ArrayList<>();
for (MediaIntent belvedereIntent : intents) {
if (TextUtils.isEmpty(belvedereIntent.getPermission())
|| !preferences.shouldINeverEverAskForThatPermissionAgain(belvedereIntent.getPermission())
|| belvedereIntent.isAvailable()) {
filter.add(belvedereIntent);
}
}
return filter;
}
private static class Adapter extends ArrayAdapter<MediaIntent> {
private Context context;
Adapter(Context context, int resource, List<MediaIntent> objects) {
super(context, resource, objects);
this.context = context;
}
@NonNull
@Override
public View getView(int position, View convertView, @NonNull ViewGroup parent) {
View row = convertView;
if (convertView == null) {
row = LayoutInflater.from(context).inflate(R.layout.belvedere_dialog_row, parent, false);
}
final MediaIntent intent = getItem(position);
final AttachmentSource item = AttachmentSource.from(intent, context);
((ImageView) row.findViewById(R.id.belvedere_dialog_row_image)).setImageDrawable(ContextCompat.getDrawable(context, item.getDrawable()));
((TextView) row.findViewById(R.id.belvedere_dialog_row_text)).setText(item.getText());
row.setTag(intent);
return row;
}
}
private static class AttachmentSource {
private final int drawable;
private final String text;
public static AttachmentSource from(MediaIntent belvedereIntent, Context context) {
if (belvedereIntent.getTarget() == MediaIntent.TARGET_CAMERA) {
return new AttachmentSource(R.drawable.belvedere_ic_camera, context.getString(R.string.belvedere_dialog_camera));
} else if (belvedereIntent.getTarget() == MediaIntent.TARGET_DOCUMENT) {
return new AttachmentSource(R.drawable.belvedere_ic_image, context.getString(R.string.belvedere_dialog_gallery));
} else {
return new AttachmentSource(-1, "");
}
}
private AttachmentSource(int drawable, String text) {
this.drawable = drawable;
this.text = text;
}
public int getDrawable() {
return drawable;
}
public String getText() {
return text;
}
}
private interface StartActivity {
void startActivity(MediaIntent mediaIntent);
Context getContext();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.oozie.process;
import org.apache.commons.lang.StringUtils;
import org.apache.falcon.FalconException;
import org.apache.falcon.LifeCycle;
import org.apache.falcon.Tag;
import org.apache.falcon.entity.CatalogStorage;
import org.apache.falcon.entity.EntityUtil;
import org.apache.falcon.entity.FeedHelper;
import org.apache.falcon.entity.ProcessHelper;
import org.apache.falcon.entity.Storage;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.Frequency;
import org.apache.falcon.entity.v0.SchemaHelper;
import org.apache.falcon.entity.v0.cluster.Cluster;
import org.apache.falcon.entity.v0.feed.Feed;
import org.apache.falcon.entity.v0.feed.LocationType;
import org.apache.falcon.entity.v0.process.Input;
import org.apache.falcon.entity.v0.process.Output;
import org.apache.falcon.entity.v0.process.Process;
import org.apache.falcon.entity.v0.process.Workflow;
import org.apache.falcon.expression.ExpressionHelper;
import org.apache.falcon.oozie.OozieCoordinatorBuilder;
import org.apache.falcon.oozie.OozieEntityBuilder;
import org.apache.falcon.oozie.OozieOrchestrationWorkflowBuilder;
import org.apache.falcon.oozie.coordinator.CONTROLS;
import org.apache.falcon.oozie.coordinator.COORDINATORAPP;
import org.apache.falcon.oozie.coordinator.DATAIN;
import org.apache.falcon.oozie.coordinator.DATAOUT;
import org.apache.falcon.oozie.coordinator.DATASETS;
import org.apache.falcon.oozie.coordinator.INPUTEVENTS;
import org.apache.falcon.oozie.coordinator.OUTPUTEVENTS;
import org.apache.falcon.oozie.coordinator.SYNCDATASET;
import org.apache.falcon.oozie.coordinator.WORKFLOW;
import org.apache.falcon.workflow.WorkflowExecutionArgs;
import org.apache.falcon.workflow.WorkflowExecutionContext;
import org.apache.hadoop.fs.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
/**
* Builds oozie coordinator for process.
*/
public class ProcessExecutionCoordinatorBuilder extends OozieCoordinatorBuilder<Process> {
private static final int THIRTY_MINUTES = 30 * 60 * 1000;
public ProcessExecutionCoordinatorBuilder(Process entity) {
super(entity, LifeCycle.EXECUTION);
}
@Override public List<Properties> buildCoords(Cluster cluster, Path buildPath) throws FalconException {
String coordName = getEntityName();
Path coordPath = getBuildPath(buildPath);
copySharedLibs(cluster, new Path(coordPath, "lib"));
COORDINATORAPP coord = new COORDINATORAPP();
// coord attributes
initializeCoordAttributes(cluster, coord, coordName);
CONTROLS controls = initializeControls(); // controls
coord.setControls(controls);
// Configuration
Properties props = createCoordDefaultConfiguration(cluster, coordName);
initializeInputPaths(cluster, coord, props); // inputs
initializeOutputPaths(cluster, coord, props); // outputs
Workflow processWorkflow = entity.getWorkflow();
propagateUserWorkflowProperties(processWorkflow, props);
// create parent wf
Properties wfProps = OozieOrchestrationWorkflowBuilder.get(entity, cluster, Tag.DEFAULT).build(cluster,
coordPath);
WORKFLOW wf = new WORKFLOW();
wf.setAppPath(getStoragePath(wfProps.getProperty(OozieEntityBuilder.ENTITY_PATH)));
props.putAll(wfProps);
wf.setConfiguration(getConfig(props));
// set coord action to parent wf
org.apache.falcon.oozie.coordinator.ACTION action = new org.apache.falcon.oozie.coordinator.ACTION();
action.setWorkflow(wf);
coord.setAction(action);
Path marshalPath = marshal(cluster, coord, coordPath);
return Arrays.asList(getProperties(marshalPath, coordName));
}
@Override
protected WorkflowExecutionContext.EntityOperations getOperation() {
return WorkflowExecutionContext.EntityOperations.GENERATE;
}
private void initializeCoordAttributes(Cluster cluster, COORDINATORAPP coord, String coordName) {
coord.setName(coordName);
org.apache.falcon.entity.v0.process.Cluster processCluster = ProcessHelper.getCluster(entity,
cluster.getName());
coord.setStart(SchemaHelper.formatDateUTC(processCluster.getValidity().getStart()));
coord.setEnd(SchemaHelper.formatDateUTC(processCluster.getValidity().getEnd()));
coord.setTimezone(entity.getTimezone().getID());
coord.setFrequency("${coord:" + entity.getFrequency().toString() + "}");
}
private CONTROLS initializeControls()
throws FalconException {
CONTROLS controls = new CONTROLS();
controls.setConcurrency(String.valueOf(entity.getParallel()));
controls.setExecution(entity.getOrder().name());
Frequency timeout = entity.getTimeout();
long frequencyInMillis = ExpressionHelper.get().evaluate(entity.getFrequency().toString(), Long.class);
long timeoutInMillis;
if (timeout != null) {
timeoutInMillis = ExpressionHelper.get().
evaluate(entity.getTimeout().toString(), Long.class);
} else {
timeoutInMillis = frequencyInMillis * 6;
if (timeoutInMillis < THIRTY_MINUTES) {
timeoutInMillis = THIRTY_MINUTES;
}
}
controls.setTimeout(String.valueOf(timeoutInMillis / (1000 * 60)));
if (timeoutInMillis / frequencyInMillis * 2 > 0) {
controls.setThrottle(String.valueOf(timeoutInMillis / frequencyInMillis * 2));
}
return controls;
}
private void initializeInputPaths(Cluster cluster, COORDINATORAPP coord, Properties props) throws FalconException {
if (entity.getInputs() == null) {
props.put(WorkflowExecutionArgs.INPUT_FEED_NAMES.getName(), "NONE");
props.put(WorkflowExecutionArgs.INPUT_FEED_PATHS.getName(), IGNORE);
props.put(WorkflowExecutionArgs.INPUT_NAMES.getName(), IGNORE);
return;
}
List<String> inputFeeds = new ArrayList<String>();
List<String> inputNames = new ArrayList<String>();
List<String> inputPaths = new ArrayList<String>();
List<String> inputFeedStorageTypes = new ArrayList<String>();
for (Input input : entity.getInputs().getInputs()) {
Feed feed = EntityUtil.getEntity(EntityType.FEED, input.getFeed());
Storage storage = FeedHelper.createStorage(cluster, feed);
if (!input.isOptional()) {
if (coord.getDatasets() == null) {
coord.setDatasets(new DATASETS());
}
if (coord.getInputEvents() == null) {
coord.setInputEvents(new INPUTEVENTS());
}
SYNCDATASET syncdataset = createDataSet(feed, cluster, storage, input.getName(), LocationType.DATA);
if (syncdataset == null) {
return;
}
coord.getDatasets().getDatasetOrAsyncDataset().add(syncdataset);
DATAIN datain = createDataIn(input);
coord.getInputEvents().getDataIn().add(datain);
}
String inputExpr = null;
if (storage.getType() == Storage.TYPE.FILESYSTEM) {
inputExpr = getELExpression("dataIn('" + input.getName() + "', '" + input.getPartition() + "')");
props.put(input.getName(), inputExpr);
} else if (storage.getType() == Storage.TYPE.TABLE) {
inputExpr = "${coord:dataIn('" + input.getName() + "')}";
propagateCatalogTableProperties(input, (CatalogStorage) storage, props);
}
inputFeeds.add(feed.getName());
inputPaths.add(inputExpr);
inputNames.add(input.getName());
inputFeedStorageTypes.add(storage.getType().name());
}
propagateLateDataProperties(inputFeeds, inputNames, inputPaths, inputFeedStorageTypes, props);
}
private void propagateLateDataProperties(List<String> inputFeeds, List<String> inputNames, List<String> inputPaths,
List<String> inputFeedStorageTypes, Properties props) {
// populate late data handler - should-record action
props.put(WorkflowExecutionArgs.INPUT_FEED_NAMES.getName(), StringUtils.join(inputFeeds, '#'));
props.put(WorkflowExecutionArgs.INPUT_NAMES.getName(), StringUtils.join(inputNames, '#'));
props.put(WorkflowExecutionArgs.INPUT_FEED_PATHS.getName(), StringUtils.join(inputPaths, '#'));
// storage type for each corresponding feed sent as a param to LateDataHandler
// needed to compute usage based on storage type in LateDataHandler
props.put(WorkflowExecutionArgs.INPUT_STORAGE_TYPES.getName(), StringUtils.join(inputFeedStorageTypes, '#'));
}
private SYNCDATASET createDataSet(Feed feed, Cluster cluster, Storage storage,
String datasetName, LocationType locationType) throws FalconException {
SYNCDATASET syncdataset = new SYNCDATASET();
syncdataset.setName(datasetName);
syncdataset.setFrequency("${coord:" + feed.getFrequency().toString() + "}");
String uriTemplate = storage.getUriTemplate(locationType);
if (uriTemplate == null) {
return null;
}
if (storage.getType() == Storage.TYPE.TABLE) {
uriTemplate = uriTemplate.replace("thrift", "hcat"); // Oozie requires this!!!
}
syncdataset.setUriTemplate(uriTemplate);
org.apache.falcon.entity.v0.feed.Cluster feedCluster = FeedHelper.getCluster(feed, cluster.getName());
syncdataset.setInitialInstance(SchemaHelper.formatDateUTC(feedCluster.getValidity().getStart()));
syncdataset.setTimezone(feed.getTimezone().getID());
if (feed.getAvailabilityFlag() == null) {
syncdataset.setDoneFlag("");
} else {
syncdataset.setDoneFlag(feed.getAvailabilityFlag());
}
return syncdataset;
}
private DATAIN createDataIn(Input input) {
DATAIN datain = new DATAIN();
datain.setName(input.getName());
datain.setDataset(input.getName());
datain.setStartInstance(getELExpression(input.getStart()));
datain.setEndInstance(getELExpression(input.getEnd()));
return datain;
}
private String getELExpression(String expr) {
if (expr != null) {
expr = "${" + expr + "}";
}
return expr;
}
private void initializeOutputPaths(Cluster cluster, COORDINATORAPP coord, Properties props) throws FalconException {
if (entity.getOutputs() == null) {
props.put(WorkflowExecutionArgs.OUTPUT_FEED_NAMES.getName(), "NONE");
props.put(WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName(), IGNORE);
return;
}
if (coord.getDatasets() == null) {
coord.setDatasets(new DATASETS());
}
if (coord.getOutputEvents() == null) {
coord.setOutputEvents(new OUTPUTEVENTS());
}
List<String> outputFeeds = new ArrayList<String>();
List<String> outputPaths = new ArrayList<String>();
for (Output output : entity.getOutputs().getOutputs()) {
Feed feed = EntityUtil.getEntity(EntityType.FEED, output.getFeed());
Storage storage = FeedHelper.createStorage(cluster, feed);
SYNCDATASET syncdataset = createDataSet(feed, cluster, storage, output.getName(), LocationType.DATA);
if (syncdataset == null) {
return;
}
coord.getDatasets().getDatasetOrAsyncDataset().add(syncdataset);
DATAOUT dataout = createDataOut(output);
coord.getOutputEvents().getDataOut().add(dataout);
String outputExpr = "${coord:dataOut('" + output.getName() + "')}";
outputFeeds.add(feed.getName());
outputPaths.add(outputExpr);
if (storage.getType() == Storage.TYPE.FILESYSTEM) {
props.put(output.getName(), outputExpr);
propagateFileSystemProperties(output, feed, cluster, coord, storage, props);
} else if (storage.getType() == Storage.TYPE.TABLE) {
propagateCatalogTableProperties(output, (CatalogStorage) storage, props);
}
}
// Output feed name and path for parent workflow
props.put(WorkflowExecutionArgs.OUTPUT_FEED_NAMES.getName(), StringUtils.join(outputFeeds, ','));
props.put(WorkflowExecutionArgs.OUTPUT_FEED_PATHS.getName(), StringUtils.join(outputPaths, ','));
}
private DATAOUT createDataOut(Output output) {
DATAOUT dataout = new DATAOUT();
dataout.setName(output.getName());
dataout.setDataset(output.getName());
dataout.setInstance(getELExpression(output.getInstance()));
return dataout;
}
private void propagateFileSystemProperties(Output output, Feed feed, Cluster cluster, COORDINATORAPP coord,
Storage storage, Properties props) throws FalconException {
// stats and meta paths
createOutputEvent(output, feed, cluster, LocationType.STATS, coord, props, storage);
createOutputEvent(output, feed, cluster, LocationType.META, coord, props, storage);
createOutputEvent(output, feed, cluster, LocationType.TMP, coord, props, storage);
}
//SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
private void createOutputEvent(Output output, Feed feed, Cluster cluster, LocationType locType,
COORDINATORAPP coord, Properties props, Storage storage) throws FalconException {
String name = output.getName();
String type = locType.name().toLowerCase();
SYNCDATASET dataset = createDataSet(feed, cluster, storage, name + type, locType);
if (dataset == null) {
return;
}
coord.getDatasets().getDatasetOrAsyncDataset().add(dataset);
DATAOUT dataout = new DATAOUT();
dataout.setName(name + type);
dataout.setDataset(name + type);
dataout.setInstance(getELExpression(output.getInstance()));
OUTPUTEVENTS outputEvents = coord.getOutputEvents();
if (outputEvents == null) {
outputEvents = new OUTPUTEVENTS();
coord.setOutputEvents(outputEvents);
}
outputEvents.getDataOut().add(dataout);
String outputExpr = "${coord:dataOut('" + name + type + "')}";
props.put(name + "." + type, outputExpr);
}
//RESUME CHECKSTYLE CHECK ParameterNumberCheck
private void propagateUserWorkflowProperties(Workflow processWorkflow, Properties props) {
props.put("userWorkflowName", ProcessHelper.getProcessWorkflowName(
processWorkflow.getName(), entity.getName()));
props.put("userWorkflowVersion", processWorkflow.getVersion());
props.put("userWorkflowEngine", processWorkflow.getEngine().value());
}
protected void propagateCatalogTableProperties(Input input, CatalogStorage tableStorage, Properties props) {
String prefix = "falcon_" + input.getName();
propagateCommonCatalogTableProperties(tableStorage, props, prefix);
props.put(prefix + "_partition_filter_pig",
"${coord:dataInPartitionFilter('" + input.getName() + "', 'pig')}");
props.put(prefix + "_partition_filter_hive",
"${coord:dataInPartitionFilter('" + input.getName() + "', 'hive')}");
props.put(prefix + "_partition_filter_java",
"${coord:dataInPartitionFilter('" + input.getName() + "', 'java')}");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.