gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package co.jackdalton.cleanchat;
import org.bukkit.ChatColor;
import org.bukkit.Bukkit;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.event.player.PlayerChatEvent;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.event.Listener;
import org.bukkit.entity.Player;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.Random;
import java.util.List;
import java.util.ArrayList;
public class cleanchat extends JavaPlugin implements Listener {
private String version = "0.2.6";
String customMessage = "Do not use profanity.";
String ccPrefix = ChatColor.GREEN + "[CleanChat]" + ChatColor.WHITE + " : " + ChatColor.BLUE;
Boolean useCustomMessages = false;
Boolean hideLag = true;
String replacements[] = new String[6];
Boolean replacementsPopulated = false;
List<String> toNotify = new ArrayList<>();
String baseRegex = "\\b(gay|retard|pussy|vagina|penis|(mother|sister)?fuck(er|ing|ed|s)?|shit(er|ing|ed|s|head)?|(dumb|smart)?ass(hole|hat)?|hell|damn(ed|ing)?|cunt|bitch|fag(got)?|(stank|stink)?dick(head|butt)?|cock(head|sucker)?|nigger|nigga|niglet|bastard|sex(y|ier|iest|ing|ed)?|taint|tit(ty|ties|s)?|kabo|h3ll|dam)\\b";
public void initNotifications() {
//toNotify.add("Stale_Muffins");
//toNotify.add("Phoenix_TimeLord");
//toNotify.add("kylerhagler");
}
public void populateReplacements() {
if (!replacementsPopulated) {
replacements[0] = "I love DHU so much!";
replacements[1] = "Guys, let's all go vote for DHU with /vote!";
replacements[2] = "Stale_Muffins is great at making plugins!";
replacements[3] = "Aren't kylerhagler and Sparkkles great owners?";
replacements[4] = "Profanity. Never again.";
replacements[5] = "CleanChat is my favorite plugin.";
replacementsPopulated = true;
}
}
public void addWord(String word) {
String og = this.getConfig().getString("regex");
String out = og.substring(0, 3) + word + "|" + og.substring(4);
this.getConfig().set("regex", out);
this.saveConfig();
}
public String getRandomReplacement() {
populateReplacements();
return (replacements[new Random().nextInt(replacements.length)]);
}
public String getPattern() {
return this.getConfig().getString("regex");
}
public boolean checkOnline(String userName) {
Player p = Bukkit.getPlayerExact(userName);
if (p != null) {
return true;
}
return false;
}
public void reloadNotifiers() {
toNotify = this.getConfig().getStringList("toNotify");
}
public void saveCfg(String pos, List l) {
this.getConfig().set(pos, l);
this.saveConfig();
reloadNotifiers();
}
@Override
public void onEnable() {
new ChatListener(this);
reloadNotifiers();
//initNotifications();
this.saveDefaultConfig();
if (!this.getConfig().contains("useCustomMessages")) {
this.getConfig().set("useCustomMessages", useCustomMessages);
this.saveConfig();
} else {
updateConfig();
}
if (!this.getConfig().contains("hideLag")) {
this.getConfig().set("hideLag", hideLag);
this.saveConfig();
} else {
updateConfig();
}
if (!this.getConfig().contains("regex")) {
this.getConfig().set("regex", baseRegex);
this.saveConfig();
}
}
public void notifyAll(String message) {
reloadNotifiers();
for (int i = 0; i < toNotify.size(); i++) {
if (checkOnline(toNotify.get(i))) {
Player recipient = Bukkit.getPlayerExact(toNotify.get(i));
recipient.sendMessage(message);
}
}
}
public void messageSent(PlayerChatEvent event) {
String m = event.getMessage();
String pattern = getPattern();
Pattern p = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE);
Matcher matcher = p.matcher(m);
boolean found = matcher.find();
if (found) {
notifyAll(ccPrefix + event.getPlayer().getDisplayName() + " said: " + m);
getLogger().info(ChatColor.RED + "Player " + event.getPlayer().getDisplayName() + " said: " + m);
if (useCustomMessages) {
event.setMessage(getRandomReplacement());
} else {
event.setCancelled(true);
}
event.getPlayer().sendMessage(ChatColor.GREEN + "[CleanChat]" + ChatColor.WHITE + " : " + ChatColor.DARK_RED + customMessage);
}
if (hideLag) {
if (m.equalsIgnoreCase("lag")) {
event.setCancelled(true);
event.getPlayer().sendMessage("[" + event.getPlayer().getWorld().getName() + "]" + event.getPlayer().getDisplayName() + ChatColor.GRAY + ": " + m);
notifyAll(ccPrefix + event.getPlayer().getDisplayName() + " said: " + m);
}
}
}
@Override
public void onDisable() {
saveCfg("toNotify", toNotify);
}
public void addRecipient(String rec) {
List og = this.getConfig().getStringList("toNotify");
og.add(rec);
saveCfg("toNotify", og);
}
public void removeRecipient(String rec, Player sender) {
List og = this.getConfig().getStringList("toNotify");
if (og.contains(rec)) {
int ind = og.indexOf(rec);
og.remove(ind);
this.getConfig().set("toNotify", og);
saveCfg("toNotify", og);
} else {
sender.sendMessage(ccPrefix + "Error: " + rec + " is not a valid notification recipient.");
}
}
public void updateConfig() {
this.getConfig().set("useCustomMessages", useCustomMessages);
this.getConfig().set("hideLag", hideLag);
this.saveConfig();
}
public void toggle(String cfg, Player p) {
boolean doSave = false;
if (cfg.equalsIgnoreCase("usecustommessages")) {
useCustomMessages = !useCustomMessages;
p.sendMessage(ccPrefix + "useCustomMessages has been toggled from " + !useCustomMessages + " to " + useCustomMessages + ".");
doSave = true;
} else if (cfg.equalsIgnoreCase("hidelag")) {
hideLag = !hideLag;
p.sendMessage(ccPrefix + "hideLag has been toggled from " + !hideLag + " to " + hideLag + ".");
doSave = true;
} else {
p.sendMessage(ccPrefix + "Error: " + cfg + " is not a recognized setting.");
}
if (doSave) {
saveConfig();
}
}
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
if (command.getName().equalsIgnoreCase("cleanchat")) {
sender.sendMessage(ccPrefix + "CleanChat v" + version + " | By Jack Dalton // Stale_Muffins");
sender.sendMessage(ccPrefix + "useCustomMessages is currently set to " + Boolean.toString(useCustomMessages));
sender.sendMessage(ccPrefix + "hideLag is currently set to " + Boolean.toString(hideLag));
return true;
}
if (command.getName().equalsIgnoreCase("addrecipient")) {
if (args.length == 1) {
addRecipient(args[0]);
sender.sendMessage(ccPrefix + "Recipient \"" + args[0] + "\" added.");
} else {
sender.sendMessage(ccPrefix + "Error: 1 argument required; " + args.length + " given.");
}
return true;
}
if (command.getName().equalsIgnoreCase("addword")) {
if (args.length == 1) {
addWord(args[0]);
sender.sendMessage(ccPrefix + args[0] + " added to blacklist.");
} else {
sender.sendMessage(ccPrefix + "Error: 1 argument required; " + args.length + " given.");
}
return true;
}
if (command.getName().equalsIgnoreCase("removeword")) {
if (args.length == 1) {
sender.sendMessage(ccPrefix + "This feature hasn't been implemented yet. Go complain to jack@jackdalton.co");
//sender.sendMessage(ccPrefix + args[0] + " removed from blacklist.");
} else {
sender.sendMessage(ccPrefix + "Error: 1 argument required; " + args.length + " given.");
}
return true;
}
if (command.getName().equalsIgnoreCase("cctoggle")) {
if (args.length == 1) {
toggle(args[0], (Player)sender);
} else {
sender.sendMessage(ccPrefix + "Error: 1 argument required; " + args.length + " given.");
}
return true;
}
if (command.getName().equalsIgnoreCase("removerecipient")) {
if (args.length == 1) {
removeRecipient(args[0], (Player)sender);
sender.sendMessage(ccPrefix + "Recipient \"" + args[0] + "\" removed.");
} else {
sender.sendMessage(ccPrefix + "Error: 1 argument required; " + args.length + " given.");
}
return true;
}
if (command.getName().equalsIgnoreCase("listrecipients")) {
List<String> recipients = getConfig().getStringList("toNotify");
for (int i = 0; i < recipients.size(); i++) {
sender.sendMessage(ccPrefix + "[" + i + "] : " + recipients.get(i));
}
return true;
}
return false;
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.segment.data;
import com.google.common.primitives.Longs;
import com.metamx.common.guava.CloseQuietly;
import io.druid.segment.CompressedPools;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.LongBuffer;
import java.nio.channels.Channels;
import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
@RunWith(Parameterized.class)
public class CompressedLongsIndexedSupplierTest extends CompressionStrategyTest
{
public CompressedLongsIndexedSupplierTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy)
{
super(compressionStrategy);
}
private IndexedLongs indexed;
private CompressedLongsIndexedSupplier supplier;
private long[] vals;
@Before
public void setUp() throws Exception
{
CloseQuietly.close(indexed);
indexed = null;
supplier = null;
vals = null;
}
@After
public void tearDown() throws Exception
{
CloseQuietly.close(indexed);
}
private void setupSimple(final int chunkSize)
{
CloseQuietly.close(indexed);
vals = new long[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16};
supplier = CompressedLongsIndexedSupplier.fromLongBuffer(
LongBuffer.wrap(vals),
chunkSize,
ByteOrder.nativeOrder(),
compressionStrategy
);
indexed = supplier.get();
}
private void setupSimpleWithSerde(final int chunkSize) throws IOException
{
vals = new long[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16};
makeWithSerde(chunkSize);
}
private void makeWithSerde(final int chunkSize) throws IOException
{
CloseQuietly.close(indexed);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
final CompressedLongsIndexedSupplier theSupplier = CompressedLongsIndexedSupplier.fromLongBuffer(
LongBuffer.wrap(vals), chunkSize, ByteOrder.nativeOrder(), compressionStrategy
);
theSupplier.writeToChannel(Channels.newChannel(baos));
final byte[] bytes = baos.toByteArray();
Assert.assertEquals(theSupplier.getSerializedSize(), bytes.length);
supplier = CompressedLongsIndexedSupplier.fromByteBuffer(ByteBuffer.wrap(bytes), ByteOrder.nativeOrder());
indexed = supplier.get();
}
private void setupLargeChunks(final int chunkSize, final int totalSize) throws IOException
{
vals = new long[totalSize];
Random rand = new Random(0);
for(int i = 0; i < vals.length; ++i) {
vals[i] = rand.nextLong();
}
makeWithSerde(chunkSize);
}
@Test
public void testSanity() throws Exception
{
setupSimple(5);
Assert.assertEquals(4, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
// test powers of 2
setupSimple(4);
Assert.assertEquals(4, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
setupSimple(32);
Assert.assertEquals(1, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
}
@Test
public void testLargeChunks() throws Exception
{
final int maxChunkSize = CompressedPools.BUFFER_SIZE / Longs.BYTES;
setupLargeChunks(maxChunkSize, 10 * maxChunkSize);
Assert.assertEquals(10, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
setupLargeChunks(maxChunkSize, 10 * maxChunkSize + 1);
Assert.assertEquals(11, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
setupLargeChunks(maxChunkSize - 1, 10 * (maxChunkSize - 1) + 1);
Assert.assertEquals(11, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
}
@Test(expected = IllegalArgumentException.class)
public void testChunkTooBig() throws Exception
{
final int maxChunkSize = CompressedPools.BUFFER_SIZE / Longs.BYTES;
setupLargeChunks(maxChunkSize + 1, 10 * (maxChunkSize + 1));
}
@Test
public void testBulkFill() throws Exception
{
setupSimple(5);
tryFill(0, 15);
tryFill(3, 6);
tryFill(7, 7);
tryFill(7, 9);
}
@Test(expected = IndexOutOfBoundsException.class)
public void testBulkFillTooMuch() throws Exception
{
setupSimple(5);
tryFill(7, 10);
}
@Test
public void testSanityWithSerde() throws Exception
{
setupSimpleWithSerde(5);
Assert.assertEquals(4, supplier.getBaseLongBuffers().size());
assertIndexMatchesVals();
}
@Test
public void testBulkFillWithSerde() throws Exception
{
setupSimpleWithSerde(5);
tryFill(0, 15);
tryFill(3, 6);
tryFill(7, 7);
tryFill(7, 9);
}
@Test(expected = IndexOutOfBoundsException.class)
public void testBulkFillTooMuchWithSerde() throws Exception
{
setupSimpleWithSerde(5);
tryFill(7, 10);
}
// This test attempts to cause a race condition with the DirectByteBuffers, it's non-deterministic in causing it,
// which sucks but I can't think of a way to deterministically cause it...
@Test
public void testConcurrentThreadReads() throws Exception
{
setupSimple(5);
final AtomicReference<String> reason = new AtomicReference<String>("none");
final int numRuns = 1000;
final CountDownLatch startLatch = new CountDownLatch(1);
final CountDownLatch stopLatch = new CountDownLatch(2);
final AtomicBoolean failureHappened = new AtomicBoolean(false);
new Thread(new Runnable()
{
@Override
public void run()
{
try {
startLatch.await();
}
catch (InterruptedException e) {
failureHappened.set(true);
reason.set("interrupt.");
stopLatch.countDown();
return;
}
try {
for (int i = 0; i < numRuns; ++i) {
for (int j = 0; j < indexed.size(); ++j) {
final long val = vals[j];
final long indexedVal = indexed.get(j);
if (Longs.compare(val, indexedVal) != 0) {
failureHappened.set(true);
reason.set(String.format("Thread1[%d]: %d != %d", j, val, indexedVal));
stopLatch.countDown();
return;
}
}
}
}
catch (Exception e) {
e.printStackTrace();
failureHappened.set(true);
reason.set(e.getMessage());
}
stopLatch.countDown();
}
}).start();
final IndexedLongs indexed2 = supplier.get();
try {
new Thread(new Runnable()
{
@Override
public void run()
{
try {
startLatch.await();
}
catch (InterruptedException e) {
stopLatch.countDown();
return;
}
try {
for (int i = 0; i < numRuns; ++i) {
for (int j = indexed2.size() - 1; j >= 0; --j) {
final long val = vals[j];
final long indexedVal = indexed2.get(j);
if (Longs.compare(val, indexedVal) != 0) {
failureHappened.set(true);
reason.set(String.format("Thread2[%d]: %d != %d", j, val, indexedVal));
stopLatch.countDown();
return;
}
}
}
}
catch (Exception e) {
e.printStackTrace();
reason.set(e.getMessage());
failureHappened.set(true);
}
stopLatch.countDown();
}
}).start();
startLatch.countDown();
stopLatch.await();
}
finally {
CloseQuietly.close(indexed2);
}
if (failureHappened.get()) {
Assert.fail("Failure happened. Reason: " + reason.get());
}
}
private void tryFill(final int startIndex, final int size)
{
long[] filled = new long[size];
indexed.fill(startIndex, filled);
for (int i = startIndex; i < filled.length; i++) {
Assert.assertEquals(vals[i + startIndex], filled[i]);
}
}
private void assertIndexMatchesVals()
{
Assert.assertEquals(vals.length, indexed.size());
// sequential access
int[] indices = new int[vals.length];
for (int i = 0; i < indexed.size(); ++i) {
Assert.assertEquals(vals[i], indexed.get(i), 0.0);
indices[i] = i;
}
Collections.shuffle(Arrays.asList(indices));
// random access
for (int i = 0; i < indexed.size(); ++i) {
int k = indices[i];
Assert.assertEquals(vals[k], indexed.get(k), 0.0);
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.actions;
import com.intellij.execution.Location;
import com.intellij.execution.PsiLocation;
import com.intellij.execution.RunManager;
import com.intellij.execution.RunnerAndConfigurationSettings;
import com.intellij.execution.configurations.ConfigurationFactory;
import com.intellij.execution.executors.DefaultRunExecutor;
import com.intellij.execution.impl.FakeConfigurationFactory;
import com.intellij.execution.impl.FakeRunConfiguration;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ActionPlaces;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.extensions.ExtensionPoint;
import com.intellij.openapi.fileTypes.FileTypes;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.testFramework.MapDataContext;
import com.intellij.testFramework.TestActionEvent;
import com.intellij.testFramework.fixtures.BasePlatformTestCase;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import java.util.List;
import java.util.Objects;
public class ConfigurationContextTest extends BasePlatformTestCase {
public void testBasicExistingConfigurations() {
myFixture.configureByText(FileTypes.PLAIN_TEXT, "qq<caret>q");
ConfigurationContext context = ConfigurationContext.getFromContext(createDataContext(), ActionPlaces.UNKNOWN);
Assert.assertNull(context.findExisting());
Disposable disposable = Disposer.newDisposable();
try {
RunConfigurationProducer.EP_NAME.getPoint().registerExtension(new FakeRunConfigurationProducer(""), disposable);
List<RunnerAndConfigurationSettings> configs = getConfigurationsFromContext();
Assert.assertEquals(1, configs.size());
for (RunnerAndConfigurationSettings config : configs) {
addConfiguration(config);
}
context = ConfigurationContext.getFromContext(createDataContext(), ActionPlaces.UNKNOWN);
RunnerAndConfigurationSettings existing = context.findExisting();
Assert.assertNotNull(existing);
Assert.assertTrue(existing.getConfiguration() instanceof FakeRunConfiguration);
}
finally {
Disposer.dispose(disposable);
}
context = ConfigurationContext.getFromContext(createDataContext(), ActionPlaces.UNKNOWN);
Assert.assertNull(context.findExisting());
}
public void testPreferredExistingConfiguration() {
myFixture.configureByText(FileTypes.PLAIN_TEXT, "hello,<caret>world");
@SuppressWarnings("rawtypes")
ExtensionPoint<RunConfigurationProducer> ep = RunConfigurationProducer.EP_NAME.getPoint();
ep.registerExtension(new FakeRunConfigurationProducer("hello_"), getTestRootDisposable());
ep.registerExtension(new FakeRunConfigurationProducer("world_"), getTestRootDisposable());
FakeRunConfigurationProducer.SORTING = SortingMode.NONE;
List<RunnerAndConfigurationSettings> configs = getConfigurationsFromContext();
Assert.assertEquals(2, configs.size());
for (RunnerAndConfigurationSettings config : configs) {
addConfiguration(config);
}
FakeRunConfigurationProducer.SORTING = SortingMode.NAME_ASC;
ConfigurationContext context = ConfigurationContext.getFromContext(createDataContext(), ActionPlaces.UNKNOWN);
RunnerAndConfigurationSettings existing = context.findExisting();
Assert.assertNotNull(existing);
Assert.assertTrue(existing.getConfiguration().getName().startsWith("hello_"));
FakeRunConfigurationProducer.SORTING = SortingMode.NAME_DESC;
context = ConfigurationContext.getFromContext(createDataContext(), ActionPlaces.UNKNOWN);
existing = context.findExisting();
Assert.assertNotNull(existing);
Assert.assertTrue(existing.getConfiguration().getName().startsWith("world_"));
}
public void test2ApplicableConfigurationsInContext() {
myFixture.configureByText(FileTypes.PLAIN_TEXT, "hello,<caret>world");
@SuppressWarnings("rawtypes")
ExtensionPoint<RunConfigurationProducer> ep = RunConfigurationProducer.EP_NAME.getPoint();
ep.registerExtension(new FakeRunConfigurationProducer("hello_") {
@Override
public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) {
return true;
}
}, getTestRootDisposable());
ep.registerExtension(new FakeRunConfigurationProducer("world_") {
@Override
public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) {
return true;
}
}, getTestRootDisposable());
List<RunnerAndConfigurationSettings> configs = getConfigurationsFromContext();
Assert.assertEquals(2, configs.size());
RunnerAndConfigurationSettings configuration = configs.get(0);
assertEquals("hello_", configuration.getName());
addConfiguration(configuration);
DataContext context = createDataContext();
TestActionEvent event = new TestActionEvent(context);
new RunNewConfigurationContextAction(DefaultRunExecutor.getRunExecutorInstance()).fullUpdate(event);
assertTrue(event.getPresentation().isEnabledAndVisible());
assertEquals("Run 'world_'", event.getPresentation().getText());
RunnerAndConfigurationSettings configuration1 = configs.get(1);
assertEquals("world_", configuration1.getName());
addConfiguration(configuration1);
DataContext context1 = createDataContext();
TestActionEvent event1 = new TestActionEvent(context1);
new RunNewConfigurationContextAction(DefaultRunExecutor.getRunExecutorInstance()).fullUpdate(event1);
assertTrue(event1.getPresentation().isEnabledAndVisible());
assertEquals("Run 'world_'", event1.getPresentation().getText());
}
public static void main(String[] args) {
}
public void test2ApplicableConfigurationsInContextNestedScopes() {
PsiFile file = myFixture.configureByText(FileTypes.PLAIN_TEXT, "hello,<caret>world");
@SuppressWarnings("rawtypes")
ExtensionPoint<RunConfigurationProducer> ep = RunConfigurationProducer.EP_NAME.getPoint();
ep.registerExtension(new FakeRunConfigurationProducer("hello_") {
@Override
public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) {
return true;
}
@Override
protected boolean setupConfigurationFromContext(@NotNull FakeRunConfiguration configuration,
@NotNull ConfigurationContext context,
@NotNull Ref<PsiElement> sourceElement) {
super.setupConfigurationFromContext(configuration, context, sourceElement);
sourceElement.set(file);
return true;
}
}, getTestRootDisposable());
ep.registerExtension(new FakeRunConfigurationProducer("world_") {
@Override
public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) {
return true;
}
}, getTestRootDisposable());
List<RunnerAndConfigurationSettings> configs = getConfigurationsFromContext();
Assert.assertEquals(2, configs.size());
RunnerAndConfigurationSettings configuration = configs.get(1);
assertEquals("hello_", configuration.getName());
addConfiguration(configuration);
DataContext context = createDataContext();
assertNull(ConfigurationContext.getFromContext(context, ActionPlaces.UNKNOWN).findExisting());
TestActionEvent event = new TestActionEvent(context);
new RunNewConfigurationContextAction(DefaultRunExecutor.getRunExecutorInstance()).fullUpdate(event);
assertFalse(event.getPresentation().isEnabledAndVisible());
DataContext context1 = createDataContext();
TestActionEvent event1 = new TestActionEvent(context1);
new RunContextAction(DefaultRunExecutor.getRunExecutorInstance()).fullUpdate(event1);
assertTrue(event1.getPresentation().isEnabledAndVisible());
assertEquals("Run 'world_'", event1.getPresentation().getText());
}
private @NotNull DataContext createDataContext() {
MapDataContext dataContext = new MapDataContext();
dataContext.put(CommonDataKeys.PROJECT, getProject());
int offset = myFixture.getEditor().getCaretModel().getOffset();
PsiElement element = Objects.requireNonNull(myFixture.getFile().findElementAt(offset));
dataContext.put(Location.DATA_KEY, PsiLocation.fromPsiElement(element));
return dataContext;
}
private void addConfiguration(@NotNull RunnerAndConfigurationSettings configuration) {
Assert.assertTrue(configuration.getConfiguration() instanceof FakeRunConfiguration);
final RunManager runManager = RunManager.getInstance(getProject());
runManager.addConfiguration(configuration);
Disposer.register(getTestRootDisposable(), new Disposable() {
@Override
public void dispose() {
runManager.removeConfiguration(configuration);
}
});
}
@NotNull
private List<RunnerAndConfigurationSettings> getConfigurationsFromContext() {
DataContext dataContext = createDataContext();
List<ConfigurationFromContext> list = PreferredProducerFind.getConfigurationsFromContext(
dataContext.getData(Location.DATA_KEY),
ConfigurationContext.getFromContext(dataContext, ActionPlaces.UNKNOWN),
false
);
return ContainerUtil.map(list, ConfigurationFromContext::getConfigurationSettings);
}
private static class FakeRunConfigurationProducer extends LazyRunConfigurationProducer<FakeRunConfiguration> {
private static SortingMode SORTING = SortingMode.NONE;
private final String myNamePrefix;
FakeRunConfigurationProducer(@NotNull String namePrefix) {
myNamePrefix = namePrefix;
}
@Override
protected boolean setupConfigurationFromContext(@NotNull FakeRunConfiguration configuration,
@NotNull ConfigurationContext context,
@NotNull Ref<PsiElement> sourceElement) {
configuration.setName(myNamePrefix + configuration.getName());
sourceElement.set(context.getPsiLocation());
return true;
}
@Override
public boolean isConfigurationFromContext(@NotNull FakeRunConfiguration configuration, @NotNull ConfigurationContext context) {
return configuration.getName().startsWith(myNamePrefix);
}
@Override
public boolean isPreferredConfiguration(ConfigurationFromContext self, ConfigurationFromContext other) {
FakeRunConfiguration selfConfig = ObjectUtils.tryCast(self.getConfiguration(), FakeRunConfiguration.class);
FakeRunConfiguration otherConfig = ObjectUtils.tryCast(other.getConfiguration(), FakeRunConfiguration.class);
if (selfConfig == null || otherConfig == null) {
return false;
}
if (SORTING == SortingMode.NAME_ASC) {
return selfConfig.getName().compareTo(otherConfig.getName()) < 0;
}
if (SORTING == SortingMode.NAME_DESC) {
return selfConfig.getName().compareTo(otherConfig.getName()) > 0;
}
return false;
}
@NotNull
@Override
public ConfigurationFactory getConfigurationFactory() {
return FakeConfigurationFactory.INSTANCE;
}
}
private enum SortingMode { NAME_ASC, NAME_DESC, NONE }
}
| |
/*
* Copyright (C) 2015-2018 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.cube;
import io.datakernel.aggregation.*;
import io.datakernel.aggregation.AggregationState.ConsolidationDebugInfo;
import io.datakernel.aggregation.fieldtype.FieldType;
import io.datakernel.aggregation.measure.Measure;
import io.datakernel.aggregation.ot.AggregationDiff;
import io.datakernel.aggregation.ot.AggregationStructure;
import io.datakernel.async.function.AsyncSupplier;
import io.datakernel.async.process.AsyncCollector;
import io.datakernel.codegen.*;
import io.datakernel.common.Initializable;
import io.datakernel.common.ref.Ref;
import io.datakernel.cube.CubeQuery.Ordering;
import io.datakernel.cube.asm.MeasuresFunction;
import io.datakernel.cube.asm.RecordFunction;
import io.datakernel.cube.asm.TotalsFunction;
import io.datakernel.cube.attributes.AttributeResolver;
import io.datakernel.cube.ot.CubeDiff;
import io.datakernel.datastream.StreamConsumer;
import io.datakernel.datastream.StreamConsumerWithResult;
import io.datakernel.datastream.StreamDataAcceptor;
import io.datakernel.datastream.StreamSupplier;
import io.datakernel.datastream.processor.StreamFilter;
import io.datakernel.datastream.processor.StreamMapper;
import io.datakernel.datastream.processor.StreamReducer;
import io.datakernel.datastream.processor.StreamReducers.Reducer;
import io.datakernel.datastream.processor.StreamSplitter;
import io.datakernel.etl.LogDataConsumer;
import io.datakernel.eventloop.Eventloop;
import io.datakernel.eventloop.jmx.EventloopJmxBeanEx;
import io.datakernel.jmx.api.attribute.JmxAttribute;
import io.datakernel.jmx.stats.ValueStats;
import io.datakernel.ot.OTState;
import io.datakernel.promise.Promise;
import io.datakernel.promise.Promises;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Type;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.time.Duration;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.Executor;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static io.datakernel.aggregation.Utils.*;
import static io.datakernel.codegen.ExpressionComparator.leftProperty;
import static io.datakernel.codegen.ExpressionComparator.rightProperty;
import static io.datakernel.codegen.Expressions.*;
import static io.datakernel.codegen.utils.Primitives.isWrapperType;
import static io.datakernel.common.Preconditions.checkArgument;
import static io.datakernel.common.Preconditions.checkState;
import static io.datakernel.common.Utils.of;
import static io.datakernel.common.collection.CollectionUtils.entriesToMap;
import static io.datakernel.common.collection.CollectionUtils.keysToMap;
import static io.datakernel.cube.Utils.createResultClass;
import static java.lang.Math.min;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.Collections.sort;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
/**
* Represents an OLAP cube. Provides methods for loading and querying data.
* Also provides functionality for managing aggregations.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public final class Cube implements ICube, OTState<CubeDiff>, Initializable<Cube>, EventloopJmxBeanEx {
private static final Logger logger = LoggerFactory.getLogger(Cube.class);
public static final int DEFAULT_OVERLAPPING_CHUNKS_THRESHOLD = 300;
private final Eventloop eventloop;
private final Executor executor;
private final DefiningClassLoader classLoader;
private final AggregationChunkStorage aggregationChunkStorage;
private Path temporarySortDir;
private final Map<String, FieldType> fieldTypes = new LinkedHashMap<>();
private final Map<String, FieldType> dimensionTypes = new LinkedHashMap<>();
private final Map<String, Measure> measures = new LinkedHashMap<>();
private final Map<String, ComputedMeasure> computedMeasures = new LinkedHashMap<>();
private static final class AttributeResolverContainer {
private final List<String> attributes = new ArrayList<>();
private final List<String> dimensions;
private final AttributeResolver resolver;
private AttributeResolverContainer(List<String> dimensions, AttributeResolver resolver) {
this.dimensions = dimensions;
this.resolver = resolver;
}
}
private final List<AttributeResolverContainer> attributeResolvers = new ArrayList<>();
private final Map<String, Class<?>> attributeTypes = new LinkedHashMap<>();
private final Map<String, AttributeResolverContainer> attributes = new LinkedHashMap<>();
private final Map<String, String> childParentRelations = new LinkedHashMap<>();
// settings
private int aggregationsChunkSize = Aggregation.DEFAULT_CHUNK_SIZE;
private int aggregationsReducerBufferSize = Aggregation.DEFAULT_REDUCER_BUFFER_SIZE;
private int aggregationsSorterItemsInMemory = Aggregation.DEFAULT_SORTER_ITEMS_IN_MEMORY;
private int aggregationsMaxChunksToConsolidate = Aggregation.DEFAULT_MAX_CHUNKS_TO_CONSOLIDATE;
private boolean aggregationsIgnoreChunkReadingExceptions = false;
private int maxOverlappingChunksToProcessLogs = Cube.DEFAULT_OVERLAPPING_CHUNKS_THRESHOLD;
private Duration maxIncrementalReloadPeriod = Aggregation.DEFAULT_MAX_INCREMENTAL_RELOAD_PERIOD;
static final class AggregationContainer {
private final Aggregation aggregation;
private final List<String> measures;
private final AggregationPredicate predicate;
private AggregationContainer(Aggregation aggregation, List<String> measures, AggregationPredicate predicate) {
this.aggregation = aggregation;
this.measures = measures;
this.predicate = predicate;
}
@Override
public String toString() {
return aggregation.toString();
}
}
// state
private final Map<String, AggregationContainer> aggregations = new LinkedHashMap<>();
private CubeClassLoaderCache classLoaderCache;
// JMX
private final AggregationStats aggregationStats = new AggregationStats();
private final ValueStats queryTimes = ValueStats.create(Duration.ofMinutes(10));
private long queryErrors;
private Throwable queryLastError;
Cube(Eventloop eventloop, Executor executor, DefiningClassLoader classLoader,
AggregationChunkStorage aggregationChunkStorage) {
this.eventloop = eventloop;
this.executor = executor;
this.classLoader = classLoader;
this.aggregationChunkStorage = aggregationChunkStorage;
}
public static Cube create(@NotNull Eventloop eventloop, @NotNull Executor executor, @NotNull DefiningClassLoader classLoader,
@NotNull AggregationChunkStorage aggregationChunkStorage) {
return new Cube(eventloop, executor, classLoader, aggregationChunkStorage);
}
public Cube withAttribute(String attribute, AttributeResolver resolver) {
checkArgument(!attributes.containsKey(attribute), "Attribute %s has already been defined", attribute);
int pos = attribute.indexOf('.');
if (pos == -1)
throw new IllegalArgumentException("Attribute identifier is not split into name and dimension");
String dimension = attribute.substring(0, pos);
String attributeName = attribute.substring(pos + 1);
checkArgument(resolver.getAttributeTypes().containsKey(attributeName), "Resolver does not support %s", attribute);
checkArgument(!isWrapperType(resolver.getAttributeTypes().get(attributeName)), "Unsupported attribute type for %s", attribute);
List<String> dimensions = getAllParents(dimension);
checkArgument(dimensions.size() == resolver.getKeyTypes().length, "Parent dimensions: %s, key types: %s", dimensions, asList(resolver.getKeyTypes()));
for (int i = 0; i < dimensions.size(); i++) {
String d = dimensions.get(i);
checkArgument(((Class<?>) dimensionTypes.get(d).getInternalDataType()).equals(resolver.getKeyTypes()[i]), "Dimension type mismatch for %s", d);
}
AttributeResolverContainer resolverContainer = null;
for (AttributeResolverContainer r : attributeResolvers) {
if (r.resolver == resolver) {
resolverContainer = r;
break;
}
}
if (resolverContainer == null) {
resolverContainer = new AttributeResolverContainer(dimensions, resolver);
attributeResolvers.add(resolverContainer);
}
resolverContainer.attributes.add(attribute);
attributes.put(attribute, resolverContainer);
attributeTypes.put(attribute, resolver.getAttributeTypes().get(attributeName));
return this;
}
public Cube withClassLoaderCache(CubeClassLoaderCache classLoaderCache) {
this.classLoaderCache = classLoaderCache;
return this;
}
public Cube withDimension(String dimensionId, FieldType type) {
addDimension(dimensionId, type);
return this;
}
public Cube withMeasure(String measureId, Measure measure) {
addMeasure(measureId, measure);
return this;
}
public Cube withComputedMeasure(String measureId, ComputedMeasure computedMeasure) {
addComputedMeasure(measureId, computedMeasure);
return this;
}
public Cube withRelation(String child, String parent) {
addRelation(child, parent);
return this;
}
public Cube withTemporarySortDir(Path temporarySortDir) {
this.temporarySortDir = temporarySortDir;
return this;
}
public static final class AggregationConfig implements Initializable<AggregationConfig> {
private final String id;
private final List<String> dimensions = new ArrayList<>();
private final List<String> measures = new ArrayList<>();
private AggregationPredicate predicate = AggregationPredicates.alwaysTrue();
private final List<String> partitioningKey = new ArrayList<>();
private int chunkSize;
private int reducerBufferSize;
private int sorterItemsInMemory;
private int maxChunksToConsolidate;
public AggregationConfig(String id) {
this.id = id;
}
public String getId() {
return id;
}
public static AggregationConfig id(String id) {
return new AggregationConfig(id);
}
public AggregationConfig withDimensions(Collection<String> dimensions) {
this.dimensions.addAll(dimensions);
return this;
}
public AggregationConfig withDimensions(String... dimensions) {
return withDimensions(asList(dimensions));
}
public AggregationConfig withMeasures(Collection<String> measures) {
this.measures.addAll(measures);
return this;
}
public AggregationConfig withMeasures(String... measures) {
return withMeasures(asList(measures));
}
public AggregationConfig withPredicate(AggregationPredicate predicate) {
this.predicate = predicate;
return this;
}
public AggregationConfig withPartitioningKey(List<String> partitioningKey) {
this.partitioningKey.addAll(partitioningKey);
return this;
}
public AggregationConfig withPartitioningKey(String... partitioningKey) {
this.partitioningKey.addAll(asList(partitioningKey));
return this;
}
public AggregationConfig withChunkSize(int chunkSize) {
this.chunkSize = chunkSize;
return this;
}
public AggregationConfig withReducerBufferSize(int reducerBufferSize) {
this.reducerBufferSize = reducerBufferSize;
return this;
}
public AggregationConfig withSorterItemsInMemory(int sorterItemsInMemory) {
this.sorterItemsInMemory = sorterItemsInMemory;
return this;
}
public AggregationConfig withMaxChunksToConsolidate(int maxChunksToConsolidate) {
this.maxChunksToConsolidate = maxChunksToConsolidate;
return this;
}
}
public Cube withAggregation(AggregationConfig aggregationConfig) {
addAggregation(aggregationConfig);
return this;
}
private static <K, V> Stream<Entry<K, V>> filterEntryKeys(Stream<Entry<K, V>> stream, Predicate<K> predicate) {
return stream.filter(entry -> predicate.test(entry.getKey()));
}
public void addMeasure(String measureId, Measure measure) {
checkState(aggregations.isEmpty(), "Cannot add measure while aggregations are present");
measures.put(measureId, measure);
fieldTypes.put(measureId, measure.getFieldType());
}
public void addComputedMeasure(String measureId, ComputedMeasure computedMeasure) {
checkState(aggregations.isEmpty(), "Cannot add computed measure while aggregations are present");
computedMeasures.put(measureId, computedMeasure);
}
public void addRelation(String child, String parent) {
childParentRelations.put(child, parent);
}
public void addDimension(String dimensionId, FieldType type) {
checkState(aggregations.isEmpty(), "Cannot add dimension while aggregations are present");
dimensionTypes.put(dimensionId, type);
fieldTypes.put(dimensionId, type);
}
public Cube addAggregation(AggregationConfig config) {
checkArgument(!aggregations.containsKey(config.id), "Aggregation '%s' is already defined", config.id);
AggregationStructure structure = AggregationStructure.create(ChunkIdCodec.ofLong())
.initialize(s -> config.dimensions.forEach(dimensionId ->
s.withKey(dimensionId, dimensionTypes.get(dimensionId))))
.initialize(s -> config.measures.forEach(measureId ->
s.withMeasure(measureId, measures.get(measureId))))
.initialize(s -> measures.forEach((measureId, measure) -> {
if (!config.measures.contains(measureId)) {
s.withIgnoredMeasure(measureId, measure.getFieldType());
}
}))
.withPartitioningKey(config.partitioningKey);
Aggregation aggregation = Aggregation.create(eventloop, executor, classLoader, aggregationChunkStorage, structure)
.withTemporarySortDir(temporarySortDir)
.withChunkSize(config.chunkSize != 0 ? config.chunkSize : aggregationsChunkSize)
.withReducerBufferSize(config.reducerBufferSize != 0 ? config.reducerBufferSize : aggregationsReducerBufferSize)
.withSorterItemsInMemory(config.sorterItemsInMemory != 0 ? config.sorterItemsInMemory : aggregationsSorterItemsInMemory)
.withMaxChunksToConsolidate(config.maxChunksToConsolidate != 0 ? config.maxChunksToConsolidate : aggregationsMaxChunksToConsolidate)
.withIgnoreChunkReadingExceptions(aggregationsIgnoreChunkReadingExceptions)
.withStats(aggregationStats);
aggregations.put(config.id, new AggregationContainer(aggregation, config.measures, config.predicate));
logger.info("Added aggregation {} for id '{}'", aggregation, config.id);
return this;
}
@NotNull
public Class<?> getAttributeInternalType(String attribute) {
if (dimensionTypes.containsKey(attribute))
return dimensionTypes.get(attribute).getInternalDataType();
if (attributeTypes.containsKey(attribute))
return attributeTypes.get(attribute);
throw new IllegalArgumentException("No attribute: " + attribute);
}
@NotNull
public Class<?> getMeasureInternalType(String field) {
if (measures.containsKey(field))
return measures.get(field).getFieldType().getInternalDataType();
if (computedMeasures.containsKey(field))
return computedMeasures.get(field).getType(measures);
throw new IllegalArgumentException("No measure: " + field);
}
@NotNull
public Type getAttributeType(String attribute) {
if (dimensionTypes.containsKey(attribute))
return dimensionTypes.get(attribute).getDataType();
if (attributeTypes.containsKey(attribute))
return attributeTypes.get(attribute);
throw new IllegalArgumentException("No attribute: " + attribute);
}
@NotNull
public Type getMeasureType(String field) {
if (measures.containsKey(field))
return measures.get(field).getFieldType().getDataType();
if (computedMeasures.containsKey(field))
return computedMeasures.get(field).getType(measures);
throw new IllegalArgumentException("No measure: " + field);
}
@Override
public Map<String, Type> getAttributeTypes() {
Map<String, Type> result = new LinkedHashMap<>();
for (String dimension : dimensionTypes.keySet()) {
result.put(dimension, dimensionTypes.get(dimension).getDataType());
}
for (String attribute : attributeTypes.keySet()) {
result.put(attribute, attributeTypes.get(attribute));
}
return result;
}
@Override
public Map<String, Type> getMeasureTypes() {
Map<String, Type> result = new LinkedHashMap<>();
for (String measure : measures.keySet()) {
result.put(measure, measures.get(measure).getFieldType().getDataType());
}
for (String measure : computedMeasures.keySet()) {
result.put(measure, computedMeasures.get(measure).getType(measures));
}
return result;
}
public Aggregation getAggregation(String aggregationId) {
return aggregations.get(aggregationId).aggregation;
}
public Set<String> getAggregationIds() {
return aggregations.keySet();
}
@Override
public void init() {
for (AggregationContainer container : aggregations.values()) {
container.aggregation.getState().init();
}
}
@Override
public void apply(CubeDiff op) {
for (String aggregationId : op.keySet()) {
AggregationDiff aggregationDiff = op.get(aggregationId);
aggregations.get(aggregationId).aggregation.getState().apply(aggregationDiff);
}
}
public <T> LogDataConsumer<T, CubeDiff> logStreamConsumer(Class<T> inputClass) {
return logStreamConsumer(inputClass, AggregationPredicates.alwaysTrue());
}
public <T> LogDataConsumer<T, CubeDiff> logStreamConsumer(Class<T> inputClass,
AggregationPredicate predicate) {
return logStreamConsumer(inputClass, scanKeyFields(inputClass), scanMeasureFields(inputClass), predicate);
}
public <T> LogDataConsumer<T, CubeDiff> logStreamConsumer(Class<T> inputClass, Map<String, String> dimensionFields, Map<String, String> measureFields) {
return logStreamConsumer(inputClass, dimensionFields, measureFields, AggregationPredicates.alwaysTrue());
}
public <T> LogDataConsumer<T, CubeDiff> logStreamConsumer(Class<T> inputClass, Map<String, String> dimensionFields, Map<String, String> measureFields,
AggregationPredicate predicate) {
return () -> consume(inputClass, dimensionFields, measureFields, predicate)
.transformResult(result -> result.map(Collections::singletonList));
}
public <T> StreamConsumerWithResult<T, CubeDiff> consume(Class<T> inputClass) {
return consume(inputClass, AggregationPredicates.alwaysTrue());
}
public <T> StreamConsumerWithResult<T, CubeDiff> consume(Class<T> inputClass, AggregationPredicate predicate) {
return consume(inputClass, scanKeyFields(inputClass), scanMeasureFields(inputClass), predicate);
}
/**
* Provides a {@link StreamConsumer} for streaming data to this cube.
* The returned {@link StreamConsumer} writes to {@link Aggregation}'s chosen using the specified dimensions, measures and input class.
*
* @param inputClass class of input records
* @param <T> data records type
* @return consumer for streaming data to cube
*/
public <T> StreamConsumerWithResult<T, CubeDiff> consume(Class<T> inputClass, Map<String, String> dimensionFields, Map<String, String> measureFields,
AggregationPredicate dataPredicate) {
logger.info("Started consuming data. Dimensions: {}. Measures: {}", dimensionFields.keySet(), measureFields.keySet());
StreamSplitter<T, T> streamSplitter = StreamSplitter.create((item, acceptors) -> {
for (int i = 0; i < acceptors.length; i++) {
StreamDataAcceptor<T> acceptor = acceptors[i];
acceptor.accept(item);
}
});
AsyncCollector<Map<String, AggregationDiff>> diffsCollector = AsyncCollector.create(new HashMap<>());
Map<String, AggregationPredicate> compatibleAggregations = getCompatibleAggregationsForDataInput(dimensionFields, measureFields, dataPredicate);
if (compatibleAggregations.size() == 0) {
throw new IllegalArgumentException(format("No compatible aggregation for " +
"dimensions fields: %s, measureFields: %s", dimensionFields, measureFields));
}
for (Entry<String, AggregationPredicate> aggregationToDataInputFilterPredicate : compatibleAggregations.entrySet()) {
String aggregationId = aggregationToDataInputFilterPredicate.getKey();
AggregationContainer aggregationContainer = aggregations.get(aggregationToDataInputFilterPredicate.getKey());
Aggregation aggregation = aggregationContainer.aggregation;
List<String> keys = aggregation.getKeys();
Map<String, String> aggregationKeyFields = entriesToMap(filterEntryKeys(dimensionFields.entrySet().stream(), keys::contains));
Map<String, String> aggregationMeasureFields = entriesToMap(filterEntryKeys(measureFields.entrySet().stream(), aggregationContainer.measures::contains));
AggregationPredicate dataInputFilterPredicate = aggregationToDataInputFilterPredicate.getValue();
StreamSupplier<T> output = streamSplitter.newOutput();
if (!dataInputFilterPredicate.equals(AggregationPredicates.alwaysTrue())) {
Predicate<T> filterPredicate = createFilterPredicate(inputClass, dataInputFilterPredicate, classLoader, fieldTypes);
output = output
.transformWith(StreamFilter.create(filterPredicate));
}
Promise<AggregationDiff> consume = output.streamTo(aggregation.consume(inputClass, aggregationKeyFields, aggregationMeasureFields));
diffsCollector.addPromise(consume, (accumulator, diff) -> accumulator.put(aggregationId, diff));
}
return StreamConsumerWithResult.of(streamSplitter.getInput(), diffsCollector.run().get().map(CubeDiff::of));
}
Map<String, AggregationPredicate> getCompatibleAggregationsForDataInput(Map<String, String> dimensionFields,
Map<String, String> measureFields,
AggregationPredicate predicate) {
AggregationPredicate dataPredicate = predicate.simplify();
Map<String, AggregationPredicate> aggregationToDataInputFilterPredicate = new HashMap<>();
for (Entry<String, AggregationContainer> aggregationContainer : aggregations.entrySet()) {
AggregationContainer container = aggregationContainer.getValue();
Aggregation aggregation = container.aggregation;
Set<String> dimensions = dimensionFields.keySet();
if (!dimensions.containsAll(aggregation.getKeys())) continue;
Map<String, String> aggregationMeasureFields = entriesToMap(filterEntryKeys(measureFields.entrySet().stream(), container.measures::contains));
if (aggregationMeasureFields.isEmpty()) continue;
AggregationPredicate containerPredicate = container.predicate.simplify();
AggregationPredicate intersection = AggregationPredicates.and(containerPredicate, dataPredicate).simplify();
if (AggregationPredicates.alwaysFalse().equals(intersection)) continue;
if (intersection.equals(containerPredicate)) {
aggregationToDataInputFilterPredicate.put(aggregationContainer.getKey(), AggregationPredicates.alwaysTrue());
continue;
}
aggregationToDataInputFilterPredicate.put(aggregationContainer.getKey(), containerPredicate);
}
return aggregationToDataInputFilterPredicate;
}
static Predicate createFilterPredicate(Class<?> inputClass,
AggregationPredicate predicate,
DefiningClassLoader classLoader,
Map<String, FieldType> keyTypes) {
return ClassBuilder.create(classLoader, Predicate.class)
.withClassKey(inputClass, predicate)
.withMethod("test", boolean.class, singletonList(Object.class),
predicate.createPredicate(cast(arg(0), inputClass), keyTypes))
.buildClassAndCreateNewInstance();
}
/**
* Returns a {@link StreamSupplier} of the records retrieved from cube for the specified query.
*
* @param <T> type of output objects
* @param resultClass class of output records
* @return supplier that streams query results
*/
public <T> StreamSupplier<T> queryRawStream(List<String> dimensions, List<String> storedMeasures, AggregationPredicate where,
Class<T> resultClass) throws QueryException {
return queryRawStream(dimensions, storedMeasures, where, resultClass, classLoader);
}
public <T> StreamSupplier<T> queryRawStream(List<String> dimensions, List<String> storedMeasures, AggregationPredicate where,
Class<T> resultClass, DefiningClassLoader queryClassLoader) throws QueryException {
List<AggregationContainer> compatibleAggregations = getCompatibleAggregationsForQuery(dimensions, storedMeasures, where);
return queryRawStream(dimensions, storedMeasures, where, resultClass, queryClassLoader, compatibleAggregations);
}
private <T, K extends Comparable, S, A> StreamSupplier<T> queryRawStream(List<String> dimensions, List<String> storedMeasures, AggregationPredicate where,
Class<T> resultClass, DefiningClassLoader queryClassLoader,
List<AggregationContainer> compatibleAggregations) {
List<AggregationContainerWithScore> containerWithScores = new ArrayList<>();
for (AggregationContainer compatibleAggregation : compatibleAggregations) {
AggregationQuery aggregationQuery = AggregationQuery.create(dimensions, storedMeasures, where);
double score = compatibleAggregation.aggregation.estimateCost(aggregationQuery);
containerWithScores.add(new AggregationContainerWithScore(compatibleAggregation, score));
}
sort(containerWithScores);
Class<K> resultKeyClass = createKeyClass(
keysToMap(dimensions.stream(), dimensionTypes::get),
queryClassLoader);
StreamReducer<K, T, A> streamReducer = StreamReducer.create(Comparable::compareTo);
StreamSupplier<T> queryResultSupplier = streamReducer.getOutput();
storedMeasures = new ArrayList<>(storedMeasures);
for (AggregationContainerWithScore aggregationContainerWithScore : containerWithScores) {
AggregationContainer aggregationContainer = aggregationContainerWithScore.aggregationContainer;
List<String> compatibleMeasures = storedMeasures.stream().filter(aggregationContainer.measures::contains).collect(toList());
if (compatibleMeasures.isEmpty())
continue;
storedMeasures.removeAll(compatibleMeasures);
Class<S> aggregationClass = createRecordClass(
keysToMap(dimensions.stream(), dimensionTypes::get),
keysToMap(compatibleMeasures.stream(), m -> measures.get(m).getFieldType()),
queryClassLoader);
StreamSupplier<S> aggregationSupplier = aggregationContainer.aggregation.query(
AggregationQuery.create(dimensions, compatibleMeasures, where),
aggregationClass, queryClassLoader);
if (storedMeasures.isEmpty() && streamReducer.getInputs().isEmpty()) {
/*
If query is fulfilled from the single aggregation,
just use mapper instead of reducer to copy requested fields.
*/
Function<S, T> mapper = createMapper(aggregationClass, resultClass, dimensions,
compatibleMeasures, queryClassLoader);
queryResultSupplier = aggregationSupplier
.transformWith(StreamMapper.create(mapper));
break;
}
Function<S, K> keyFunction = io.datakernel.aggregation.Utils.createKeyFunction(aggregationClass, resultKeyClass, dimensions, queryClassLoader);
Reducer<K, S, T, A> reducer = aggregationContainer.aggregation.aggregationReducer(aggregationClass, resultClass,
dimensions, compatibleMeasures, queryClassLoader);
StreamConsumer<S> streamReducerInput = streamReducer.newInput(keyFunction, reducer);
aggregationSupplier.streamTo(streamReducerInput);
}
return queryResultSupplier;
}
List<AggregationContainer> getCompatibleAggregationsForQuery(Collection<String> dimensions,
Collection<String> storedMeasures,
AggregationPredicate where) {
where = where.simplify();
List<String> allDimensions = Stream.concat(dimensions.stream(), where.getDimensions().stream()).collect(toList());
List<AggregationContainer> compatibleAggregations = new ArrayList<>();
for (AggregationContainer aggregationContainer : aggregations.values()) {
List<String> keys = aggregationContainer.aggregation.getKeys();
if (!keys.containsAll(allDimensions)) continue;
List<String> compatibleMeasures = storedMeasures.stream().filter(aggregationContainer.measures::contains).collect(toList());
if (compatibleMeasures.isEmpty()) continue;
AggregationPredicate intersection = AggregationPredicates.and(where, aggregationContainer.predicate).simplify();
if (!intersection.equals(where)) continue;
compatibleAggregations.add(aggregationContainer);
}
return compatibleAggregations;
}
static class AggregationContainerWithScore implements Comparable<AggregationContainerWithScore> {
final AggregationContainer aggregationContainer;
final double score;
private AggregationContainerWithScore(AggregationContainer aggregationContainer, double score) {
this.score = score;
this.aggregationContainer = aggregationContainer;
}
@Override
public int compareTo(@NotNull AggregationContainerWithScore o) {
int result;
result = -Integer.compare(aggregationContainer.measures.size(), o.aggregationContainer.measures.size());
if (result != 0) return result;
result = Double.compare(score, o.score);
if (result != 0) return result;
result = Integer.compare(aggregationContainer.aggregation.getChunks(), o.aggregationContainer.aggregation.getChunks());
if (result != 0) return result;
result = Integer.compare(aggregationContainer.aggregation.getKeys().size(), o.aggregationContainer.aggregation.getKeys().size());
return result;
}
}
public boolean containsExcessiveNumberOfOverlappingChunks() {
boolean excessive = false;
for (AggregationContainer aggregationContainer : aggregations.values()) {
int numberOfOverlappingChunks = aggregationContainer.aggregation.getNumberOfOverlappingChunks();
if (numberOfOverlappingChunks > maxOverlappingChunksToProcessLogs) {
logger.info("Aggregation {} contains {} overlapping chunks", aggregationContainer.aggregation, numberOfOverlappingChunks);
excessive = true;
}
}
return excessive;
}
public Promise<CubeDiff> consolidate(Function<Aggregation, Promise<AggregationDiff>> strategy) {
logger.info("Launching consolidation");
Map<String, AggregationDiff> map = new HashMap<>();
List<AsyncSupplier<Void>> runnables = new ArrayList<>();
for (Entry<String, AggregationContainer> entry : aggregations.entrySet()) {
String aggregationId = entry.getKey();
Aggregation aggregation = entry.getValue().aggregation;
runnables.add(() -> strategy.apply(aggregation)
.whenResult(aggregationDiff -> {
if (!aggregationDiff.isEmpty()) {
map.put(aggregationId, aggregationDiff);
}
})
.toVoid());
}
return Promises.sequence(runnables).map($ -> CubeDiff.of(map));
}
private List<String> getAllParents(String dimension) {
ArrayList<String> chain = new ArrayList<>();
chain.add(dimension);
String child = dimension;
String parent;
while ((parent = childParentRelations.get(child)) != null) {
chain.add(0, parent);
child = parent;
}
return chain;
}
public Set<Object> getAllChunks() {
Set<Object> chunks = new HashSet<>();
for (AggregationContainer container : aggregations.values()) {
chunks.addAll(container.aggregation.getState().getChunks().keySet());
}
return chunks;
}
public Map<String, List<ConsolidationDebugInfo>> getConsolidationDebugInfo() {
Map<String, List<ConsolidationDebugInfo>> m = new HashMap<>();
for (Entry<String, AggregationContainer> aggregationEntry : aggregations.entrySet()) {
m.put(aggregationEntry.getKey(), aggregationEntry.getValue().aggregation.getState().getConsolidationDebugInfo());
}
return m;
}
public DefiningClassLoader getClassLoader() {
return classLoader;
}
// region temp query() method
@Override
public Promise<QueryResult> query(CubeQuery cubeQuery) throws QueryException {
DefiningClassLoader queryClassLoader = getQueryClassLoader(new CubeClassLoaderCache.Key(
new LinkedHashSet<>(cubeQuery.getAttributes()),
new LinkedHashSet<>(cubeQuery.getMeasures()),
cubeQuery.getWhere().getDimensions()));
long queryStarted = eventloop.currentTimeMillis();
return new RequestContext<>().execute(queryClassLoader, cubeQuery)
.whenComplete((queryResult, e) -> {
if (e == null) {
queryTimes.recordValue((int) (eventloop.currentTimeMillis() - queryStarted));
} else {
queryErrors++;
queryLastError = e;
if (e instanceof NoSuchFileException) {
logger.warn("Query failed because of NoSuchFileException. " + cubeQuery.toString(), e);
}
}
});
}
// endregion
private DefiningClassLoader getQueryClassLoader(CubeClassLoaderCache.Key key) {
if (classLoaderCache == null)
return classLoader;
return classLoaderCache.getOrCreate(key);
}
@SuppressWarnings("rawtypes")
private class RequestContext<R> {
DefiningClassLoader queryClassLoader;
CubeQuery query;
AggregationPredicate queryPredicate;
AggregationPredicate queryHaving;
List<AggregationContainer> compatibleAggregations = new ArrayList<>();
Map<String, Object> fullySpecifiedDimensions;
final Set<String> resultDimensions = new LinkedHashSet<>();
final Set<String> resultAttributes = new LinkedHashSet<>();
final Set<String> resultMeasures = new LinkedHashSet<>();
final Set<String> resultStoredMeasures = new LinkedHashSet<>();
final Set<String> resultComputedMeasures = new LinkedHashSet<>();
Class<R> resultClass;
Predicate<R> havingPredicate;
final List<String> resultOrderings = new ArrayList<>();
Comparator<R> comparator;
MeasuresFunction<R> measuresFunction;
TotalsFunction<R, R> totalsFunction;
final List<String> recordAttributes = new ArrayList<>();
final List<String> recordMeasures = new ArrayList<>();
RecordScheme recordScheme;
RecordFunction recordFunction;
Promise<QueryResult> execute(DefiningClassLoader queryClassLoader, CubeQuery query) throws QueryException {
this.queryClassLoader = queryClassLoader;
this.query = query;
queryPredicate = query.getWhere().simplify();
queryHaving = query.getHaving().simplify();
fullySpecifiedDimensions = queryPredicate.getFullySpecifiedDimensions();
prepareDimensions();
prepareMeasures();
resultClass = createResultClass(resultAttributes, resultMeasures, Cube.this, queryClassLoader);
recordScheme = createRecordScheme();
if (query.getReportType() == ReportType.METADATA) {
return Promise.of(QueryResult.createForMetadata(recordScheme, recordAttributes, recordMeasures));
}
measuresFunction = createMeasuresFunction();
totalsFunction = createTotalsFunction();
comparator = createComparator();
havingPredicate = createHavingPredicate();
recordFunction = createRecordFunction();
return queryRawStream(new ArrayList<>(resultDimensions), new ArrayList<>(resultStoredMeasures),
queryPredicate, resultClass, queryClassLoader, compatibleAggregations)
.toList()
.then(this::processResults);
}
void prepareDimensions() throws QueryException {
for (String attribute : query.getAttributes()) {
// if (resultAttributes.contains(attribute))
// continue;
recordAttributes.add(attribute);
List<String> dimensions = new ArrayList<>();
if (dimensionTypes.containsKey(attribute)) {
dimensions = getAllParents(attribute);
} else if (attributes.containsKey(attribute)) {
AttributeResolverContainer resolverContainer = attributes.get(attribute);
for (String dimension : resolverContainer.dimensions) {
dimensions.addAll(getAllParents(dimension));
}
} else
throw new QueryException("Attribute not found: " + attribute);
resultDimensions.addAll(dimensions);
resultAttributes.addAll(dimensions);
resultAttributes.add(attribute);
}
}
void prepareMeasures() {
Set<String> queryStoredMeasures = new HashSet<>();
for (String measure : query.getMeasures()) {
if (computedMeasures.containsKey(measure)) {
queryStoredMeasures.addAll(computedMeasures.get(measure).getMeasureDependencies());
} else if (measures.containsKey(measure)) {
queryStoredMeasures.add(measure);
}
}
compatibleAggregations = getCompatibleAggregationsForQuery(resultDimensions, queryStoredMeasures, queryPredicate);
Set<String> compatibleMeasures = new LinkedHashSet<>();
for (AggregationContainer aggregationContainer : compatibleAggregations) {
compatibleMeasures.addAll(aggregationContainer.measures);
}
for (String computedMeasure : computedMeasures.keySet()) {
if (compatibleMeasures.containsAll(computedMeasures.get(computedMeasure).getMeasureDependencies())) {
compatibleMeasures.add(computedMeasure);
}
}
for (String queryMeasure : query.getMeasures()) {
if (!compatibleMeasures.contains(queryMeasure) || recordMeasures.contains(queryMeasure))
continue;
recordMeasures.add(queryMeasure);
if (measures.containsKey(queryMeasure)) {
resultStoredMeasures.add(queryMeasure);
resultMeasures.add(queryMeasure);
} else if (computedMeasures.containsKey(queryMeasure)) {
ComputedMeasure expression = computedMeasures.get(queryMeasure);
Set<String> dependencies = expression.getMeasureDependencies();
resultStoredMeasures.addAll(dependencies);
resultComputedMeasures.add(queryMeasure);
resultMeasures.addAll(dependencies);
resultMeasures.add(queryMeasure);
}
}
}
RecordScheme createRecordScheme() {
RecordScheme recordScheme = RecordScheme.create();
for (String attribute : recordAttributes) {
recordScheme.addField(attribute, getAttributeType(attribute));
}
for (String measure : recordMeasures) {
recordScheme.addField(measure, getMeasureType(measure));
}
return recordScheme;
}
RecordFunction createRecordFunction() {
return ClassBuilder.create(queryClassLoader, RecordFunction.class)
.withClassKey(resultClass)
.withMethod("copyAttributes",
sequence(expressions -> {
for (String field : recordScheme.getFields()) {
int fieldIndex = recordScheme.getFieldIndex(field);
if (dimensionTypes.containsKey(field)) {
expressions.add(call(arg(1), "put", value(fieldIndex),
cast(dimensionTypes.get(field).toValue(
property(cast(arg(0), resultClass), field)), Object.class)));
}
}
}))
.withMethod("copyMeasures",
sequence(expressions -> {
for (String field : recordScheme.getFields()) {
int fieldIndex = recordScheme.getFieldIndex(field);
if (!dimensionTypes.containsKey(field)) {
if (measures.containsKey(field)) {
Variable fieldValue = property(cast(arg(0), resultClass), field);
expressions.add(call(arg(1), "put", value(fieldIndex),
cast(measures.get(field).getFieldType().toValue(
measures.get(field).valueOfAccumulator(fieldValue)), Object.class)));
} else {
expressions.add(call(arg(1), "put", value(fieldIndex),
cast(property(cast(arg(0), resultClass), field.replace('.', '$')), Object.class)));
}
}
}
}))
.buildClassAndCreateNewInstance();
}
MeasuresFunction<R> createMeasuresFunction() {
return ClassBuilder.create(queryClassLoader, MeasuresFunction.class)
.withClassKey(resultClass, resultComputedMeasures)
.withFields(resultComputedMeasures.stream().collect(toMap(identity(), computedMeasure -> computedMeasures.get(computedMeasure).getType(measures))))
.withMethod("computeMeasures", sequence(list -> {
for (String computedMeasure : resultComputedMeasures) {
Expression record = cast(arg(0), resultClass);
list.add(set(property(record, computedMeasure),
computedMeasures.get(computedMeasure).getExpression(record, measures)));
}
}))
.buildClassAndCreateNewInstance();
}
private Predicate<R> createHavingPredicate() {
if (queryHaving == AggregationPredicates.alwaysTrue()) return o -> true;
if (queryHaving == AggregationPredicates.alwaysFalse()) return o -> false;
return ClassBuilder.create(queryClassLoader, Predicate.class)
.withClassKey(resultClass, queryHaving)
.withMethod("test",
queryHaving.createPredicate(cast(arg(0), resultClass), fieldTypes))
.buildClassAndCreateNewInstance();
}
@SuppressWarnings("unchecked")
Comparator<R> createComparator() {
if (query.getOrderings().isEmpty())
return (o1, o2) -> 0;
return ClassBuilder.create(queryClassLoader, Comparator.class)
.withClassKey(resultClass, query.getOrderings())
.withMethod("compare", of(() -> {
ExpressionComparator comparator = ExpressionComparator.create();
for (Ordering ordering : query.getOrderings()) {
String field = ordering.getField();
if (resultMeasures.contains(field) || resultAttributes.contains(field)) {
String property = field.replace('.', '$');
comparator.with(
ordering.isAsc() ? leftProperty(resultClass, property) : rightProperty(resultClass, property),
ordering.isAsc() ? rightProperty(resultClass, property) : leftProperty(resultClass, property),
true);
resultOrderings.add(field);
}
}
return comparator;
}))
.buildClassAndCreateNewInstance();
}
Promise<QueryResult> processResults(List<R> results) {
R totals;
try {
totals = resultClass.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new RuntimeException(e);
}
if (results.isEmpty()) {
totalsFunction.zero(totals);
} else {
Iterator<R> iterator = results.iterator();
R first = iterator.next();
measuresFunction.computeMeasures(first);
totalsFunction.init(totals, first);
while (iterator.hasNext()) {
R next = iterator.next();
measuresFunction.computeMeasures(next);
totalsFunction.accumulate(totals, next);
}
totalsFunction.computeMeasures(totals);
}
Record totalRecord = Record.create(recordScheme);
recordFunction.copyMeasures(totals, totalRecord);
List<Promise<Void>> tasks = new ArrayList<>();
Map<String, Object> filterAttributes = new LinkedHashMap<>();
for (AttributeResolverContainer resolverContainer : attributeResolvers) {
List<String> attributes = new ArrayList<>(resolverContainer.attributes);
attributes.retainAll(resultAttributes);
if (!attributes.isEmpty()) {
tasks.add(io.datakernel.cube.Utils.resolveAttributes(results, resolverContainer.resolver,
resolverContainer.dimensions, attributes,
fullySpecifiedDimensions, (Class) resultClass, queryClassLoader));
}
}
for (AttributeResolverContainer resolverContainer : attributeResolvers) {
if (fullySpecifiedDimensions.keySet().containsAll(resolverContainer.dimensions)) {
tasks.add(resolveSpecifiedDimensions(resolverContainer, filterAttributes));
}
}
return Promises.all(tasks)
.map($ -> processResults2(results, totals, filterAttributes));
}
QueryResult processResults2(List<R> results, R totals, Map<String, Object> filterAttributes) {
results = results.stream().filter(havingPredicate).collect(toList());
int totalCount = results.size();
results = applyLimitAndOffset(results);
List<Record> resultRecords = new ArrayList<>(results.size());
for (R result : results) {
Record record = Record.create(recordScheme);
recordFunction.copyAttributes(result, record);
recordFunction.copyMeasures(result, record);
resultRecords.add(record);
}
if (query.getReportType() == ReportType.DATA) {
return QueryResult.createForData(recordScheme,
resultRecords,
recordAttributes,
recordMeasures,
resultOrderings,
filterAttributes);
}
if (query.getReportType() == ReportType.DATA_WITH_TOTALS) {
Record totalRecord = Record.create(recordScheme);
recordFunction.copyMeasures(totals, totalRecord);
return QueryResult.createForDataWithTotals(recordScheme,
resultRecords,
totalRecord,
totalCount,
recordAttributes,
recordMeasures,
resultOrderings,
filterAttributes);
}
throw new AssertionError();
}
private Promise<Void> resolveSpecifiedDimensions(AttributeResolverContainer resolverContainer,
Map<String, Object> result) {
Object[] key = new Object[resolverContainer.dimensions.size()];
for (int i = 0; i < resolverContainer.dimensions.size(); i++) {
String dimension = resolverContainer.dimensions.get(i);
key[i] = fullySpecifiedDimensions.get(dimension);
}
Ref<Object> attributesRef = new Ref<>();
return resolverContainer.resolver.resolveAttributes(singletonList(key),
result1 -> (Object[]) result1,
(result12, attributes) -> attributesRef.value = attributes)
.whenResult(() -> {
for (int i = 0; i < resolverContainer.attributes.size(); i++) {
String attribute = resolverContainer.attributes.get(i);
result.put(attribute, attributesRef.value != null ? ((Object[]) attributesRef.value)[i] : null);
}
});
}
List applyLimitAndOffset(List results) {
Integer offset = query.getOffset();
Integer limit = query.getLimit();
int start;
int end;
if (offset == null) {
start = 0;
offset = 0;
} else if (offset >= results.size()) {
return new ArrayList<>();
} else {
start = offset;
}
if (limit == null) {
end = results.size();
limit = results.size();
} else {
end = min(start + limit, results.size());
}
if (comparator != null) {
return ((List<Object>) results).stream()
.sorted((Comparator<Object>) comparator)
.skip(offset)
.limit(limit)
.collect(Collectors.toList());
}
return results.subList(start, end);
}
TotalsFunction<R, R> createTotalsFunction() {
return ClassBuilder.create(queryClassLoader, TotalsFunction.class)
.withClassKey(resultClass, resultStoredMeasures, resultComputedMeasures)
.withMethod("zero",
sequence(expressions -> {
for (String field : resultStoredMeasures) {
Measure measure = measures.get(field);
expressions.add(measure.zeroAccumulator(
property(cast(arg(0), resultClass), field)));
}
}))
.withMethod("init",
sequence(expressions -> {
for (String field : resultStoredMeasures) {
Measure measure = measures.get(field);
expressions.add(measure.initAccumulatorWithAccumulator(
property(cast(arg(0), resultClass), field),
property(cast(arg(1), resultClass), field)));
}
}))
.withMethod("accumulate",
sequence(expressions -> {
for (String field : resultStoredMeasures) {
Measure measure = measures.get(field);
expressions.add(measure.reduce(
property(cast(arg(0), resultClass), field),
property(cast(arg(1), resultClass), field)));
}
}))
.withMethod("computeMeasures",
sequence(expressions -> {
for (String computedMeasure : resultComputedMeasures) {
Expression result = cast(arg(0), resultClass);
expressions.add(set(property(result, computedMeasure),
computedMeasures.get(computedMeasure).getExpression(result, measures)));
}
}))
.buildClassAndCreateNewInstance();
}
}
// endregion
@Override
public String toString() {
return "Cube{" +
"aggregations=" + aggregations +
'}';
}
// jmx
@JmxAttribute
public int getAggregationsChunkSize() {
return aggregationsChunkSize;
}
@JmxAttribute
public void setAggregationsChunkSize(int aggregationsChunkSize) {
this.aggregationsChunkSize = aggregationsChunkSize;
for (AggregationContainer aggregationContainer : aggregations.values()) {
aggregationContainer.aggregation.setChunkSize(aggregationsChunkSize);
}
}
public Cube withAggregationsChunkSize(int aggregationsChunkSize) {
this.aggregationsChunkSize = aggregationsChunkSize;
return this;
}
public Cube withAggregationsReducerBufferSize(int aggregationsReducerBufferSize) {
this.aggregationsReducerBufferSize = aggregationsReducerBufferSize;
return this;
}
@JmxAttribute
public int getAggregationsSorterItemsInMemory() {
return aggregationsSorterItemsInMemory;
}
@JmxAttribute
public void setAggregationsSorterItemsInMemory(int aggregationsSorterItemsInMemory) {
this.aggregationsSorterItemsInMemory = aggregationsSorterItemsInMemory;
for (AggregationContainer aggregationContainer : aggregations.values()) {
aggregationContainer.aggregation.setSorterItemsInMemory(aggregationsSorterItemsInMemory);
}
}
public Cube withAggregationsSorterItemsInMemory(int aggregationsSorterItemsInMemory) {
this.aggregationsSorterItemsInMemory = aggregationsSorterItemsInMemory;
return this;
}
@JmxAttribute
public int getAggregationsMaxChunksToConsolidate() {
return aggregationsMaxChunksToConsolidate;
}
@JmxAttribute
public void setAggregationsMaxChunksToConsolidate(int aggregationsMaxChunksToConsolidate) {
this.aggregationsMaxChunksToConsolidate = aggregationsMaxChunksToConsolidate;
for (AggregationContainer aggregationContainer : aggregations.values()) {
aggregationContainer.aggregation.setMaxChunksToConsolidate(aggregationsMaxChunksToConsolidate);
}
}
public Cube withAggregationsMaxChunksToConsolidate(int aggregationsMaxChunksToConsolidate) {
this.aggregationsMaxChunksToConsolidate = aggregationsMaxChunksToConsolidate;
return this;
}
@JmxAttribute
public boolean getAggregationsIgnoreChunkReadingExceptions() {
return aggregationsIgnoreChunkReadingExceptions;
}
@JmxAttribute
public void setAggregationsIgnoreChunkReadingExceptions(boolean aggregationsIgnoreChunkReadingExceptions) {
this.aggregationsIgnoreChunkReadingExceptions = aggregationsIgnoreChunkReadingExceptions;
for (AggregationContainer aggregation : aggregations.values()) {
aggregation.aggregation.setIgnoreChunkReadingExceptions(aggregationsIgnoreChunkReadingExceptions);
}
}
public Cube withAggregationsIgnoreChunkReadingExceptions(boolean aggregationsIgnoreChunkReadingExceptions) {
this.aggregationsIgnoreChunkReadingExceptions = aggregationsIgnoreChunkReadingExceptions;
return this;
}
@JmxAttribute
public int getMaxOverlappingChunksToProcessLogs() {
return maxOverlappingChunksToProcessLogs;
}
@JmxAttribute
public void setMaxOverlappingChunksToProcessLogs(int maxOverlappingChunksToProcessLogs) {
this.maxOverlappingChunksToProcessLogs = maxOverlappingChunksToProcessLogs;
}
public Cube withMaxOverlappingChunksToProcessLogs(int maxOverlappingChunksToProcessLogs) {
this.maxOverlappingChunksToProcessLogs = maxOverlappingChunksToProcessLogs;
return this;
}
@JmxAttribute
public Duration getMaxIncrementalReloadPeriod() {
return maxIncrementalReloadPeriod;
}
@JmxAttribute
public void setMaxIncrementalReloadPeriod(Duration maxIncrementalReloadPeriod) {
this.maxIncrementalReloadPeriod = maxIncrementalReloadPeriod;
}
public Cube withMaxIncrementalReloadPeriod(Duration maxIncrementalReloadPeriod) {
this.maxIncrementalReloadPeriod = maxIncrementalReloadPeriod;
return this;
}
@JmxAttribute
public ValueStats getQueryTimes() {
return queryTimes;
}
@JmxAttribute
public long getQueryErrors() {
return queryErrors;
}
@JmxAttribute
public Throwable getQueryLastError() {
return queryLastError;
}
@JmxAttribute
public AggregationStats getAggregationStats() {
return aggregationStats;
}
@NotNull
@Override
public Eventloop getEventloop() {
return eventloop;
}
}
| |
package de.bwaldvogel.mongo.backend;
import static de.bwaldvogel.mongo.TestUtils.json;
import static de.bwaldvogel.mongo.wire.BsonConstants.LENGTH_OBJECTID;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.Instant;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.UUID;
import org.junit.jupiter.api.Test;
import de.bwaldvogel.mongo.bson.BsonTimestamp;
import de.bwaldvogel.mongo.bson.Decimal128;
import de.bwaldvogel.mongo.bson.LegacyUUID;
import de.bwaldvogel.mongo.bson.MaxKey;
import de.bwaldvogel.mongo.bson.MinKey;
import de.bwaldvogel.mongo.bson.ObjectId;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
public class ValueComparatorTest {
private final Comparator<Object> comparator = ValueComparator.asc();
@Test
void testReverse() throws Exception {
assertThat(ValueComparator.asc().reversed()).isSameAs(ValueComparator.desc());
assertThat(ValueComparator.desc().reversed()).isSameAs(ValueComparator.asc());
}
@Test
void testCompareNullsAndMissings() {
assertComparesTheSame(null, null);
assertComparesTheSame(Missing.getInstance(), Missing.getInstance());
assertComparesTheSame(null, Missing.getInstance());
}
@Test
void testCompareMinMaxKeys() {
assertComparesTheSame(MaxKey.getInstance(), MaxKey.getInstance());
assertComparesTheSame(MinKey.getInstance(), MinKey.getInstance());
assertFirstValueBeforeSecondValue(MinKey.getInstance(), MaxKey.getInstance());
assertFirstValueBeforeSecondValue(MinKey.getInstance(), "abc");
assertFirstValueBeforeSecondValue(MinKey.getInstance(), null);
assertFirstValueBeforeSecondValue(MinKey.getInstance(), Long.MIN_VALUE);
assertFirstValueBeforeSecondValue("abc", MaxKey.getInstance());
assertFirstValueBeforeSecondValue(Long.MAX_VALUE, MaxKey.getInstance());
assertFirstValueBeforeSecondValue(MinKey.getInstance(), new ObjectId());
assertFirstValueBeforeSecondValue(new ObjectId(), MaxKey.getInstance());
}
@Test
void testCompareNullWithValue() throws Exception {
assertFirstValueBeforeSecondValue(null, 1.0);
}
@Test
void testCompareList() throws Exception {
assertThat(ValueComparator.ascWithoutListHandling().compare(Arrays.asList(1, 2), Arrays.asList(1))).isGreaterThan(0);
assertThat(ValueComparator.ascWithoutListHandling().compare(Arrays.asList(1, 2), Arrays.asList(1, 2))).isZero();
assertThat(ValueComparator.ascWithoutListHandling().compare(Arrays.asList(1, 2.0), Arrays.asList(1.0, 2))).isZero();
}
@Test
void testCompareListsInAscendingOrder() throws Exception {
assertComparesTheSame(1, Arrays.asList(1, 2));
assertFirstValueBeforeSecondValue(Arrays.asList(1, 2), "abc");
assertFirstValueBeforeSecondValue(Arrays.asList(1, 2), json("a: 1"));
assertFirstValueBeforeSecondValue(Arrays.asList(1, 2), true);
assertFirstValueBeforeSecondValue(Arrays.asList(1, 2), new ObjectId());
assertComparesTheSame(Collections.emptyList(), Collections.emptyList());
assertComparesTheSame(Collections.singletonList(1), 1);
assertComparesTheSame(null, Arrays.asList(null, 1, 2));
assertFirstValueBeforeSecondValue(Collections.singletonList(1), 2);
assertFirstValueBeforeSecondValue(1, Collections.singletonList(2));
assertFirstValueBeforeSecondValue(Collections.emptyList(), null);
assertFirstValueBeforeSecondValue(Collections.emptyList(), Arrays.asList(null, 1, 2, 3));
assertFirstValueBeforeSecondValue(Collections.emptyList(), Missing.getInstance());
assertComparesTheSame(Arrays.asList(1, 2, 3), Arrays.asList(1, 2, 3));
assertFirstValueBeforeSecondValue(Arrays.asList(1, 2), Arrays.asList(2, 3));
assertFirstValueBeforeSecondValue(Collections.emptyList(), Arrays.asList(1, 2));
assertFirstValueBeforeSecondValue(Missing.getInstance(), Arrays.asList(1, 2));
assertFirstValueBeforeSecondValue(Collections.emptyList(), Missing.getInstance());
}
@Test
void testCompareListsInDescendingOrder() throws Exception {
assertThat(ValueComparator.desc().compare(Arrays.asList(2, 3), Arrays.asList(1, 2))).isLessThan(0);
assertThat(ValueComparator.desc().compare(Arrays.asList(1, "abc", 2), Collections.singletonList(3))).isLessThan(0);
assertThat(ValueComparator.desc().compare(Collections.singletonList(3), Arrays.asList(1, "abc", 2))).isGreaterThan(0);
}
@Test
void testCompareMissingWithValue() throws Exception {
assertFirstValueBeforeSecondValue(Missing.getInstance(), 1.0);
}
@Test
void testCompareObjectIds() {
assertComparesTheSame(objectId(123000), objectId(123000));
assertFirstValueBeforeSecondValue(objectId(123000), objectId(223000));
assertFirstValueBeforeSecondValue(objectId(123000), objectId(124000));
}
@Test
void testCompareStringValues() {
assertComparesTheSame("abc", "abc");
assertFirstValueBeforeSecondValue("abc", "zzz");
assertFirstValueBeforeSecondValue(null, "abc");
}
@Test
void testCompareNumberValues() {
assertComparesTheSame(123, 123.0);
assertFirstValueBeforeSecondValue(17L, 17.3);
assertFirstValueBeforeSecondValue(58.9999, 59);
assertFirstValueBeforeSecondValue(null, 27);
assertComparesTheSame(-0.0, 0.0);
}
@Test
void testCompareTimestamps() {
BsonTimestamp bsonTimestamp = new BsonTimestamp(12345L);
BsonTimestamp bsonTimestamp2 = new BsonTimestamp(67890L);
assertComparesTheSame(bsonTimestamp, new BsonTimestamp(12345L));
assertFirstValueBeforeSecondValue(bsonTimestamp, bsonTimestamp2);
}
@Test
void testCompareDateValues() {
assertComparesTheSame(Instant.ofEpochSecond(17), Instant.ofEpochSecond(17));
assertFirstValueBeforeSecondValue(Instant.ofEpochSecond(28), Instant.ofEpochSecond(29));
assertFirstValueBeforeSecondValue(null, Instant.now());
}
@Test
void testCompareByteArrayValues() {
assertComparesTheSame(new byte[] { 1 }, new byte[] { 1 });
assertFirstValueBeforeSecondValue(new byte[] { 1 }, new byte[] { 1, 2 });
assertFirstValueBeforeSecondValue(new byte[] { 0x00 }, new byte[] { (byte) 0xFF });
assertFirstValueBeforeSecondValue(null, new byte[] { 1 });
}
@Test
void testCompareDocuments() throws Exception {
assertComparesTheSame(json("a: 1"), json("a: 1.0"));
assertComparesTheSame(json("a: 0"), json("a: -0.0"));
assertComparesTheSame(json("a: {b: 1}"), json("a: {b: 1.0}"));
assertDocumentComparison("a: -1", "a: 0");
assertDocumentComparison("a: 1", "a: 1, b: 1");
assertDocumentComparison("a: {b: 1}", "a: {c: 2}");
assertDocumentComparison("a: {b: 1}", "a: {c: 0}");
assertDocumentComparison("a: {b: -1.0}", "a: {b: 1}");
assertDocumentComparison("a: {b: 1}", "a: {b: 1, c: 1}");
assertDocumentComparison("a: {b: 1}", "a: {b: 1, c: null}");
assertDocumentComparison("a: {b: 1, c: 0}", "a: {b: {c: 1}}");
assertDocumentComparison("a: {b: null, c: 0}", "a: {b: {c: 0}}");
assertDocumentComparison("a: {c: 0}", "a: {b: 'abc'}");
assertDocumentComparison("a: {c: 0}", "a: {b: {c: 0}}");
}
@Test
void testCompareUuids() throws Exception {
assertComparesTheSame(new UUID(1, 1), new UUID(1, 1));
assertFirstValueBeforeSecondValue(null, new UUID(1, 2));
assertFirstValueBeforeSecondValue(new UUID(0, 1), new UUID(1, 1));
assertFirstValueBeforeSecondValue(new byte[0], new UUID(0, 1));
assertFirstValueBeforeSecondValue(UUID.fromString("5542cbb9-7833-96a2-b456-f13b6ae1bc80"), UUID.fromString("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"));
byte[] highBytes = new byte[16];
Arrays.fill(highBytes, (byte) 0xFF);
assertFirstValueBeforeSecondValue(new byte[0], highBytes);
assertFirstValueBeforeSecondValue(highBytes, new UUID(0, 1));
}
@Test
void testCompareLegacyUuids() throws Exception {
assertComparesTheSame(new LegacyUUID(1, 1), new LegacyUUID(1, 1));
assertFirstValueBeforeSecondValue(null, new LegacyUUID(1, 2));
assertFirstValueBeforeSecondValue(new LegacyUUID(0, 1), new LegacyUUID(1, 1));
assertFirstValueBeforeSecondValue(LegacyUUID.fromString("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"), LegacyUUID.fromString("5542cbb9-7833-96a2-b456-f13b6ae1bc80"));
assertFirstValueBeforeSecondValue(new byte[0], new LegacyUUID(0, 1));
byte[] highBytes = new byte[16];
Arrays.fill(highBytes, (byte) 0xFF);
assertFirstValueBeforeSecondValue(new byte[0], highBytes);
assertFirstValueBeforeSecondValue(highBytes, new UUID(0, 1));
}
@Test
void testCompareDecimal128() throws Exception {
assertComparesTheSame(Decimal128.ONE, Decimal128.ONE);
assertComparesTheSame(Decimal128.ONE, 1);
assertComparesTheSame(Decimal128.ONE, 1L);
assertComparesTheSame(Decimal128.ONE, 1.0F);
assertComparesTheSame(Decimal128.ONE, 1.0);
assertComparesTheSame(Decimal128.POSITIVE_ZERO, 0.0);
assertComparesTheSame(Decimal128.NEGATIVE_ZERO, 0.0);
assertComparesTheSame(Decimal128.NEGATIVE_ZERO, Decimal128.POSITIVE_ZERO);
assertComparesTheSame(Decimal128.NaN, Double.NaN);
assertComparesTheSame(Decimal128.POSITIVE_INFINITY, Double.POSITIVE_INFINITY);
assertComparesTheSame(Decimal128.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY);
assertFirstValueBeforeSecondValue(Decimal128.ONE, Decimal128.TWO);
}
private void assertDocumentComparison(String document1, String document2) {
assertFirstValueBeforeSecondValue(json(document1), json(document2));
}
private void assertFirstValueBeforeSecondValue(Object value1, Object value2) {
assertThat(comparator.compare(value1, value2)).isLessThan(0);
assertThat(comparator.compare(value2, value1)).isGreaterThan(0);
}
private void assertComparesTheSame(Object value1, Object value2) {
assertThat(comparator.compare(value1, value2)).isZero();
assertThat(comparator.compare(value2, value1)).isZero();
}
private ObjectId objectId(long value) {
return new ObjectId(convert(value));
}
private static byte[] convert(long value) {
ByteBuf buffer = Unpooled.buffer(LENGTH_OBJECTID);
try {
buffer.writeLong(value);
buffer.writeInt(0);
byte[] data = new byte[LENGTH_OBJECTID];
System.arraycopy(buffer.array(), 0, data, 0, data.length);
return data;
} finally {
buffer.release();
}
}
}
| |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.examination.persistence;
import java.io.IOException;
import java.math.BigInteger;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoUnit;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import org.apache.commons.lang3.builder.CompareToBuilder;
import org.optaplanner.examples.common.persistence.AbstractTxtSolutionImporter;
import org.optaplanner.examples.common.persistence.SolutionConverter;
import org.optaplanner.examples.examination.app.ExaminationApp;
import org.optaplanner.examples.examination.domain.Exam;
import org.optaplanner.examples.examination.domain.Examination;
import org.optaplanner.examples.examination.domain.FollowingExam;
import org.optaplanner.examples.examination.domain.InstitutionParametrization;
import org.optaplanner.examples.examination.domain.LeadingExam;
import org.optaplanner.examples.examination.domain.Period;
import org.optaplanner.examples.examination.domain.PeriodPenalty;
import org.optaplanner.examples.examination.domain.PeriodPenaltyType;
import org.optaplanner.examples.examination.domain.Room;
import org.optaplanner.examples.examination.domain.RoomPenalty;
import org.optaplanner.examples.examination.domain.RoomPenaltyType;
import org.optaplanner.examples.examination.domain.Student;
import org.optaplanner.examples.examination.domain.Topic;
public class ExaminationImporter extends AbstractTxtSolutionImporter<Examination> {
private static final String INPUT_FILE_SUFFIX = "exam";
private static final String SPLIT_REGEX = "\\,\\ ?";
public static void main(String[] args) {
SolutionConverter<Examination> converter = SolutionConverter.createImportConverter(
ExaminationApp.DATA_DIR_NAME, new ExaminationImporter(), Examination.class);
converter.convertAll();
}
@Override
public String getInputFileSuffix() {
return INPUT_FILE_SUFFIX;
}
@Override
public TxtInputBuilder<Examination> createTxtInputBuilder() {
return new ExaminationInputBuilder();
}
public static class ExaminationInputBuilder extends TxtInputBuilder<Examination> {
private Examination examination;
private Map<Topic, Set<Topic>> coincidenceMap;
private Map<Topic, Set<Topic>> exclusionMap;
private Map<Topic, Set<Topic>> afterMap;
@Override
public Examination readSolution() throws IOException {
examination = new Examination();
examination.setId(0L);
readTopicListAndStudentList();
readPeriodList();
readRoomList();
readPeriodPenaltyList();
readRoomPenaltyList();
readInstitutionalWeighting();
tagFrontLoadLargeTopics();
tagFrontLoadLastPeriods();
createExamList();
int possibleForOneExamSize = examination.getPeriodList().size() * examination.getRoomList().size();
BigInteger possibleSolutionSize = BigInteger.valueOf(possibleForOneExamSize).pow(
examination.getExamList().size());
logger.info("Examination {} has {} students, {} exams, {} periods, {} rooms, {} period constraints"
+ " and {} room constraints with a search space of {}.",
getInputId(),
examination.getStudentList().size(),
examination.getExamList().size(),
examination.getPeriodList().size(),
examination.getRoomList().size(),
examination.getPeriodPenaltyList().size(),
examination.getRoomPenaltyList().size(),
getFlooredPossibleSolutionSize(possibleSolutionSize));
return examination;
}
private void readTopicListAndStudentList() throws IOException {
coincidenceMap = new LinkedHashMap<>();
exclusionMap = new LinkedHashMap<>();
afterMap = new LinkedHashMap<>();
Map<Integer, Student> studentMap = new HashMap<>();
int examSize = readHeaderWithNumber("Exams");
List<Topic> topicList = new ArrayList<>(examSize);
for (int i = 0; i < examSize; i++) {
Topic topic = new Topic();
topic.setId((long) i);
String line = bufferedReader.readLine();
String[] lineTokens = line.split(SPLIT_REGEX);
topic.setDuration(Integer.parseInt(lineTokens[0]));
List<Student> topicStudentList = new ArrayList<>(lineTokens.length - 1);
for (int j = 1; j < lineTokens.length; j++) {
topicStudentList.add(findOrCreateStudent(studentMap, Integer.parseInt(lineTokens[j])));
}
topic.setStudentList(topicStudentList);
topic.setFrontLoadLarge(false);
topicList.add(topic);
coincidenceMap.put(topic, new HashSet<>());
exclusionMap.put(topic, new HashSet<>());
afterMap.put(topic, new HashSet<>());
}
examination.setTopicList(topicList);
List<Student> studentList = new ArrayList<>(studentMap.values());
examination.setStudentList(studentList);
}
private Student findOrCreateStudent(Map<Integer, Student> studentMap, int id) {
Student student = studentMap.get(id);
if (student == null) {
student = new Student();
student.setId((long) id);
studentMap.put(id, student);
}
return student;
}
private void readPeriodList() throws IOException {
int periodSize = readHeaderWithNumber("Periods");
List<Period> periodList = new ArrayList<>(periodSize);
DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("dd:MM:yyyy HH:mm:ss", Locale.UK);
LocalDateTime referenceDateTime = null;
for (int i = 0; i < periodSize; i++) {
Period period = new Period();
period.setId((long) i);
String line = bufferedReader.readLine();
String[] lineTokens = line.split(SPLIT_REGEX);
if (lineTokens.length != 4) {
throw new IllegalArgumentException("Read line (" + line + ") is expected to contain 4 tokens.");
}
String startDateTimeString = lineTokens[0] + " " + lineTokens[1];
period.setStartDateTimeString(startDateTimeString);
period.setPeriodIndex(i);
LocalDateTime dateTime;
try {
dateTime = LocalDateTime.parse(startDateTimeString, DATE_FORMAT);
} catch (DateTimeParseException e) {
throw new IllegalArgumentException("Illegal startDateTimeString (" + startDateTimeString + ").", e);
}
if (referenceDateTime == null) {
referenceDateTime = dateTime;
}
int dayIndex = (int) ChronoUnit.DAYS.between(referenceDateTime, dateTime);
if (dayIndex < 0) {
throw new IllegalStateException("The periods should be in ascending order.");
}
period.setDayIndex(dayIndex);
period.setDuration(Integer.parseInt(lineTokens[2]));
period.setPenalty(Integer.parseInt(lineTokens[3]));
periodList.add(period);
}
examination.setPeriodList(periodList);
}
private void readRoomList() throws IOException {
int roomSize = readHeaderWithNumber("Rooms");
List<Room> roomList = new ArrayList<>(roomSize);
for (int i = 0; i < roomSize; i++) {
Room room = new Room();
room.setId((long) i);
String line = bufferedReader.readLine();
String[] lineTokens = line.split(SPLIT_REGEX);
if (lineTokens.length != 2) {
throw new IllegalArgumentException("Read line (" + line + ") is expected to contain 2 tokens.");
}
room.setCapacity(Integer.parseInt(lineTokens[0]));
room.setPenalty(Integer.parseInt(lineTokens[1]));
roomList.add(room);
}
examination.setRoomList(roomList);
}
private void readPeriodPenaltyList() throws IOException {
readConstantLine("\\[PeriodHardConstraints\\]");
List<Topic> topicList = examination.getTopicList();
List<PeriodPenalty> periodPenaltyList = new ArrayList<>();
String line = bufferedReader.readLine();
int id = 0;
while (!line.equals("[RoomHardConstraints]")) {
String[] lineTokens = line.split(SPLIT_REGEX);
if (lineTokens.length != 3) {
throw new IllegalArgumentException("Read line (" + line + ") is expected to contain 3 tokens.");
}
PeriodPenalty periodPenalty = new PeriodPenalty();
periodPenalty.setId((long) id);
id++;
Topic leftTopic = topicList.get(Integer.parseInt(lineTokens[0]));
periodPenalty.setLeftTopic(leftTopic);
PeriodPenaltyType periodPenaltyType = PeriodPenaltyType.valueOf(lineTokens[1]);
periodPenalty.setPeriodPenaltyType(periodPenaltyType);
Topic rightTopic = topicList.get(Integer.parseInt(lineTokens[2]));
periodPenalty.setRightTopic(rightTopic);
boolean ignorePenalty = false;
switch (periodPenaltyType) {
case EXAM_COINCIDENCE:
if (leftTopic.getId().equals(rightTopic.getId())) {
logger.warn(" Filtering out periodPenalty (" + periodPenalty
+ ") because the left and right topic are the same.");
ignorePenalty = true;
} else if (!Collections.disjoint(leftTopic.getStudentList(), rightTopic.getStudentList())) {
throw new IllegalStateException("PeriodPenalty (" + periodPenalty
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ")'s left and right topic share students.");
} else if (coincidenceMap.get(leftTopic).contains(rightTopic)) {
logger.trace(" Filtering out periodPenalty (" + periodPenalty
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") because it is mentioned twice.");
ignorePenalty = true;
} else {
boolean added = coincidenceMap.get(leftTopic).add(rightTopic)
&& coincidenceMap.get(rightTopic).add(leftTopic);
if (!added) {
throw new IllegalStateException("The periodPenaltyType (" + periodPenaltyType
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") was not successfully added twice.");
}
}
break;
case EXCLUSION:
if (leftTopic.getId().equals(rightTopic.getId())) {
logger.warn(" Filtering out periodPenalty (" + periodPenalty
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") because the left and right topic are the same.");
ignorePenalty = true;
} else if (exclusionMap.get(leftTopic).contains(rightTopic)) {
logger.trace(" Filtering out periodPenalty (" + periodPenalty
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") because it is mentioned twice.");
ignorePenalty = true;
} else {
boolean added = exclusionMap.get(leftTopic).add(rightTopic)
&& exclusionMap.get(rightTopic).add(leftTopic);
if (!added) {
throw new IllegalStateException("The periodPenaltyType (" + periodPenaltyType
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") was not successfully added twice.");
}
}
break;
case AFTER:
if (afterMap.get(leftTopic).contains(rightTopic)) {
ignorePenalty = true;
} else {
boolean added = afterMap.get(leftTopic).add(rightTopic);
if (!added) {
throw new IllegalStateException("The periodPenaltyType (" + periodPenaltyType
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") was not successfully added.");
}
}
break;
default:
throw new IllegalStateException("The periodPenaltyType ("
+ periodPenalty.getPeriodPenaltyType() + ") is not implemented.");
}
if (!ignorePenalty) {
periodPenaltyList.add(periodPenalty);
}
line = bufferedReader.readLine();
}
// createIndirectPeriodPenalties of type EXAM_COINCIDENCE
for (Map.Entry<Topic, Set<Topic>> entry : coincidenceMap.entrySet()) {
Topic leftTopic = entry.getKey();
Set<Topic> middleTopicSet = entry.getValue();
for (Topic middleTopic : new ArrayList<>(middleTopicSet)) {
for (Topic rightTopic : new ArrayList<>(coincidenceMap.get(middleTopic))) {
if (rightTopic != leftTopic
&& !middleTopicSet.contains(rightTopic)) {
PeriodPenalty indirectPeriodPenalty = new PeriodPenalty();
indirectPeriodPenalty.setId((long) id);
id++;
indirectPeriodPenalty.setPeriodPenaltyType(PeriodPenaltyType.EXAM_COINCIDENCE);
indirectPeriodPenalty.setLeftTopic(leftTopic);
indirectPeriodPenalty.setRightTopic(rightTopic);
periodPenaltyList.add(indirectPeriodPenalty);
boolean added = coincidenceMap.get(leftTopic).add(rightTopic)
&& coincidenceMap.get(rightTopic).add(leftTopic);
if (!added) {
throw new IllegalStateException("The periodPenalty (" + indirectPeriodPenalty
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") was not successfully added twice.");
}
}
}
}
}
// createIndirectPeriodPenalties of type AFTER
for (Map.Entry<Topic, Set<Topic>> entry : afterMap.entrySet()) {
Topic leftTopic = entry.getKey();
Set<Topic> afterLeftSet = entry.getValue();
Queue<Topic> queue = new ArrayDeque<>();
for (Topic topic : afterMap.get(leftTopic)) {
queue.add(topic);
queue.addAll(coincidenceMap.get(topic));
}
while (!queue.isEmpty()) {
Topic rightTopic = queue.poll();
if (!afterLeftSet.contains(rightTopic)) {
PeriodPenalty indirectPeriodPenalty = new PeriodPenalty();
indirectPeriodPenalty.setId((long) id);
id++;
indirectPeriodPenalty.setPeriodPenaltyType(PeriodPenaltyType.AFTER);
indirectPeriodPenalty.setLeftTopic(leftTopic);
indirectPeriodPenalty.setRightTopic(rightTopic);
periodPenaltyList.add(indirectPeriodPenalty);
boolean added = afterMap.get(leftTopic).add(rightTopic);
if (!added) {
throw new IllegalStateException("The periodPenalty (" + indirectPeriodPenalty
+ ") for leftTopic (" + leftTopic + ") and rightTopic (" + rightTopic
+ ") was not successfully added.");
}
}
for (Topic topic : afterMap.get(rightTopic)) {
queue.add(topic);
queue.addAll(coincidenceMap.get(topic));
}
}
}
examination.setPeriodPenaltyList(periodPenaltyList);
}
private void readRoomPenaltyList() throws IOException {
List<Topic> topicList = examination.getTopicList();
List<RoomPenalty> roomPenaltyList = new ArrayList<>();
String line = bufferedReader.readLine();
int id = 0;
while (!line.equals("[InstitutionalWeightings]")) {
String[] lineTokens = line.split(SPLIT_REGEX);
if (lineTokens.length != 2) {
throw new IllegalArgumentException("Read line (" + line + ") is expected to contain 2 tokens.");
}
RoomPenalty roomPenalty = new RoomPenalty();
roomPenalty.setId((long) id);
roomPenalty.setTopic(topicList.get(Integer.parseInt(lineTokens[0])));
roomPenalty.setRoomPenaltyType(RoomPenaltyType.valueOf(lineTokens[1]));
roomPenaltyList.add(roomPenalty);
line = bufferedReader.readLine();
id++;
}
examination.setRoomPenaltyList(roomPenaltyList);
}
private int readHeaderWithNumber(String header) throws IOException {
String line = bufferedReader.readLine();
if (!line.startsWith("[" + header + ":") || !line.endsWith("]")) {
throw new IllegalStateException("Read line (" + line + " is not the expected header (["
+ header + ":number])");
}
return Integer.parseInt(line.substring(header.length() + 2, line.length() - 1));
}
private void readInstitutionalWeighting() throws IOException {
InstitutionParametrization institutionParametrization = new InstitutionParametrization();
institutionParametrization.setId(0L);
String[] lineTokens;
lineTokens = readInstitutionalWeightingProperty("TWOINAROW", 2);
institutionParametrization.setTwoInARowPenalty(Integer.parseInt(lineTokens[1]));
lineTokens = readInstitutionalWeightingProperty("TWOINADAY", 2);
institutionParametrization.setTwoInADayPenalty(Integer.parseInt(lineTokens[1]));
lineTokens = readInstitutionalWeightingProperty("PERIODSPREAD", 2);
institutionParametrization.setPeriodSpreadLength(Integer.parseInt(lineTokens[1]));
institutionParametrization.setPeriodSpreadPenalty(1); // constant
lineTokens = readInstitutionalWeightingProperty("NONMIXEDDURATIONS", 2);
institutionParametrization.setMixedDurationPenalty(Integer.parseInt(lineTokens[1]));
lineTokens = readInstitutionalWeightingProperty("FRONTLOAD", 4);
institutionParametrization.setFrontLoadLargeTopicSize(Integer.parseInt(lineTokens[1]));
institutionParametrization.setFrontLoadLastPeriodSize(Integer.parseInt(lineTokens[2]));
institutionParametrization.setFrontLoadPenalty(Integer.parseInt(lineTokens[3]));
examination.setInstitutionParametrization(institutionParametrization);
}
private String[] readInstitutionalWeightingProperty(String property,
int propertySize) throws IOException {
String[] lineTokens;
lineTokens = bufferedReader.readLine().split(SPLIT_REGEX);
if (!lineTokens[0].equals(property) || lineTokens.length != propertySize) {
throw new IllegalArgumentException("Read line (" + Arrays.toString(lineTokens)
+ ") is expected to contain " + propertySize + " tokens and start with " + property + ".");
}
return lineTokens;
}
private void tagFrontLoadLargeTopics() {
List<Topic> sortedTopicList = new ArrayList<>(examination.getTopicList());
Collections.sort(sortedTopicList, new Comparator<Topic>() {
@Override
public int compare(Topic a, Topic b) {
return new CompareToBuilder()
.append(a.getStudentSize(), b.getStudentSize()) // Ascending
.append(b.getId(), a.getId()) // Descending (according to spec)
.toComparison();
}
});
int frontLoadLargeTopicSize = examination.getInstitutionParametrization().getFrontLoadLargeTopicSize();
if (frontLoadLargeTopicSize == 0) {
return;
}
int minimumTopicId = sortedTopicList.size() - frontLoadLargeTopicSize;
if (minimumTopicId < 0) {
logger.warn("The frontLoadLargeTopicSize (" + frontLoadLargeTopicSize
+ ") is bigger than topicListSize (" + sortedTopicList.size()
+ "). Tagging all topic as frontLoadLarge...");
minimumTopicId = 0;
}
for (Topic topic : sortedTopicList.subList(minimumTopicId, sortedTopicList.size())) {
topic.setFrontLoadLarge(true);
}
}
private void tagFrontLoadLastPeriods() {
List<Period> periodList = examination.getPeriodList();
int frontLoadLastPeriodSize = examination.getInstitutionParametrization().getFrontLoadLastPeriodSize();
if (frontLoadLastPeriodSize == 0) {
return;
}
int minimumPeriodId = periodList.size() - frontLoadLastPeriodSize;
if (minimumPeriodId < 0) {
logger.warn("The frontLoadLastPeriodSize (" + frontLoadLastPeriodSize
+ ") is bigger than periodListSize (" + periodList.size()
+ "). Tagging all periods as frontLoadLast...");
minimumPeriodId = 0;
}
for (Period period : periodList.subList(minimumPeriodId, periodList.size())) {
period.setFrontLoadLast(true);
}
}
private void createExamList() {
List<Topic> topicList = examination.getTopicList();
List<Exam> examList = new ArrayList<>(topicList.size());
Map<Topic, LeadingExam> leadingTopicToExamMap = new HashMap<>(topicList.size());
for (Topic topic : topicList) {
Exam exam;
Topic leadingTopic = topic;
for (Topic coincidenceTopic : coincidenceMap.get(topic)) {
if (coincidenceTopic.getId() < leadingTopic.getId()) {
leadingTopic = coincidenceTopic;
}
}
if (leadingTopic == topic) {
LeadingExam leadingExam = new LeadingExam();
leadingExam.setFollowingExamList(new ArrayList<>(10));
leadingTopicToExamMap.put(topic, leadingExam);
exam = leadingExam;
} else {
FollowingExam followingExam = new FollowingExam();
LeadingExam leadingExam = leadingTopicToExamMap.get(leadingTopic);
if (leadingExam == null) {
throw new IllegalStateException("The followingExam (" + topic.getId()
+ ")'s leadingExam (" + leadingExam + ") cannot be null.");
}
followingExam.setLeadingExam(leadingExam);
leadingExam.getFollowingExamList().add(followingExam);
exam = followingExam;
}
exam.setId(topic.getId());
exam.setTopic(topic);
// Notice that we leave the PlanningVariable properties on null
examList.add(exam);
}
examination.setExamList(examList);
}
}
}
| |
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.adwords.awreporting.model.entities;
import com.google.api.ads.adwords.awreporting.model.csv.annotation.CsvField;
import com.google.api.ads.adwords.awreporting.model.csv.annotation.CsvReport;
import com.google.api.ads.adwords.awreporting.model.csv.annotation.MoneyField;
import com.google.api.ads.adwords.awreporting.model.util.BigDecimalUtil;
import com.google.api.ads.adwords.lib.jaxb.v201809.ReportDefinitionReportType;
import java.math.BigDecimal;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Lob;
import javax.persistence.Table;
/**
* Specific report class for AudiencePerformanceReport.
*
*/
@Entity
@Table(name = "AW_AudiencePerformanceReport")
@CsvReport(value = ReportDefinitionReportType.AUDIENCE_PERFORMANCE_REPORT)
public class AudiencePerformanceReport extends DateReport {
@Column(name = "AccountCurrencyCode")
@CsvField(value = "Currency", reportField = "AccountCurrencyCode")
private String accountCurrencyCode;
@Column(name = "AccountDescriptiveName")
@CsvField(value = "Account", reportField = "AccountDescriptiveName")
private String accountDescriptiveName;
@Column(name = "AccountTimeZone")
@CsvField(value = "Time zone", reportField = "AccountTimeZone")
private String accountTimeZone;
@Column(name = "ActiveViewCpm")
@CsvField(value = "Active View avg. CPM", reportField = "ActiveViewCpm")
@MoneyField
private BigDecimal activeViewCpm;
@Column(name = "ActiveViewCtr")
@CsvField(value = "Active View viewable CTR", reportField = "ActiveViewCtr")
private BigDecimal activeViewCtr;
@Column(name = "ActiveViewImpressions")
@CsvField(value = "Active View viewable impressions", reportField = "ActiveViewImpressions")
private Long activeViewImpressions;
@Column(name = "ActiveViewMeasurability")
@CsvField(value = "Active View measurable impr. / impr.", reportField = "ActiveViewMeasurability")
private BigDecimal activeViewMeasurability;
@Column(name = "ActiveViewMeasurableCost")
@CsvField(value = "Active View measurable cost", reportField = "ActiveViewMeasurableCost")
@MoneyField
private BigDecimal activeViewMeasurableCost;
@Column(name = "ActiveViewMeasurableImpressions")
@CsvField(value = "Active View measurable impr.", reportField = "ActiveViewMeasurableImpressions")
private Long activeViewMeasurableImpressions;
@Column(name = "ActiveViewViewability")
@CsvField(value = "Active View viewable impr. / measurable impr.", reportField = "ActiveViewViewability")
private BigDecimal activeViewViewability;
@Column(name = "AdGroupId")
@CsvField(value = "Ad group ID", reportField = "AdGroupId")
private Long adGroupId;
@Column(name = "AdGroupName")
@CsvField(value = "Ad group", reportField = "AdGroupName")
private String adGroupName;
@Column(name = "AdGroupStatus")
@CsvField(value = "Ad group state", reportField = "AdGroupStatus")
private String adGroupStatus;
@Column(name = "AdNetworkType1")
@CsvField(value = "Network", reportField = "AdNetworkType1")
private String adNetworkType1;
@Column(name = "AdNetworkType2")
@CsvField(value = "Network (with search partners)", reportField = "AdNetworkType2")
private String adNetworkType2;
@Column(name = "AllConversionRate")
@CsvField(value = "All conv. rate", reportField = "AllConversionRate")
private BigDecimal allConversionRate;
@Column(name = "AllConversions")
@CsvField(value = "All conv.", reportField = "AllConversions")
private BigDecimal allConversions;
@Column(name = "AllConversionValue")
@CsvField(value = "All conv. value", reportField = "AllConversionValue")
private BigDecimal allConversionValue;
@Column(name = "AverageCost")
@CsvField(value = "Avg. Cost", reportField = "AverageCost")
@MoneyField
private BigDecimal averageCost;
@Column(name = "AverageCpc")
@CsvField(value = "Avg. CPC", reportField = "AverageCpc")
@MoneyField
private BigDecimal averageCpc;
@Column(name = "AverageCpe")
@CsvField(value = "Avg. CPE", reportField = "AverageCpe")
private BigDecimal averageCpe;
@Column(name = "AverageCpm")
@CsvField(value = "Avg. CPM", reportField = "AverageCpm")
@MoneyField
private BigDecimal averageCpm;
@Column(name = "AverageCpv")
@CsvField(value = "Avg. CPV", reportField = "AverageCpv")
private BigDecimal averageCpv;
@Column(name = "AveragePosition")
@CsvField(value = "Avg. position", reportField = "AveragePosition")
private BigDecimal averagePosition;
@Column(name = "BaseAdGroupId")
@CsvField(value = "Base Ad group ID", reportField = "BaseAdGroupId")
private Long baseAdGroupId;
@Column(name = "BaseCampaignId")
@CsvField(value = "Base Campaign ID", reportField = "BaseCampaignId")
private Long baseCampaignId;
@Column(name = "BiddingStrategyId")
@CsvField(value = "Bid Strategy ID", reportField = "BiddingStrategyId")
private Long biddingStrategyId;
@Column(name = "BiddingStrategyName")
@CsvField(value = "Bid Strategy Name", reportField = "BiddingStrategyName")
private String biddingStrategyName;
@Column(name = "BiddingStrategyType")
@CsvField(value = "Bid Strategy Type", reportField = "BiddingStrategyType")
private String biddingStrategyType;
@Column(name = "BidModifier")
@CsvField(value = "Bid adj.", reportField = "BidModifier")
private BigDecimal bidModifier;
@Column(name = "CampaignId")
@CsvField(value = "Campaign ID", reportField = "CampaignId")
private Long campaignId;
@Column(name = "CampaignName")
@CsvField(value = "Campaign", reportField = "CampaignName")
private String campaignName;
@Column(name = "CampaignStatus")
@CsvField(value = "Campaign state", reportField = "CampaignStatus")
private String campaignStatus;
@Column(name = "Clicks")
@CsvField(value = "Clicks", reportField = "Clicks")
private Long clicks;
@Column(name = "ClickType")
@CsvField(value = "Click type", reportField = "ClickType")
private String clickType;
@Column(name = "ConversionCategoryName")
@CsvField(value = "Conversion category", reportField = "ConversionCategoryName")
private String conversionCategoryName;
@Column(name = "ConversionRate")
@CsvField(value = "Conv. rate", reportField = "ConversionRate")
private BigDecimal conversionRate;
@Column(name = "Conversions")
@CsvField(value = "Conversions", reportField = "Conversions")
private BigDecimal conversions;
@Column(name = "ConversionTrackerId")
@CsvField(value = "Conversion Tracker Id", reportField = "ConversionTrackerId")
private Long conversionTrackerId;
@Column(name = "ConversionTypeName")
@CsvField(value = "Conversion name", reportField = "ConversionTypeName")
private String conversionTypeName;
@Column(name = "ConversionValue")
@CsvField(value = "Total conv. value", reportField = "ConversionValue")
private BigDecimal conversionValue;
@Column(name = "Cost")
@CsvField(value = "Cost", reportField = "Cost")
@MoneyField
private BigDecimal cost;
@Column(name = "CostPerAllConversion")
@CsvField(value = "Cost / all conv.", reportField = "CostPerAllConversion")
@MoneyField
private BigDecimal costPerAllConversion;
@Column(name = "CostPerConversion")
@CsvField(value = "Cost / conv.", reportField = "CostPerConversion")
@MoneyField
private BigDecimal costPerConversion;
@Column(name = "CpcBid")
@CsvField(value = "Max. CPC", reportField = "CpcBid")
@MoneyField
private BigDecimal cpcBid;
@Column(name = "CpcBidSource")
@CsvField(value = "Max CPC source", reportField = "CpcBidSource")
private String cpcBidSource;
@Column(name = "CpmBid")
@CsvField(value = "Max. CPM", reportField = "CpmBid")
@MoneyField
private BigDecimal cpmBid;
@Column(name = "CpmBidSource")
@CsvField(value = "Max CPM Source", reportField = "CpmBidSource")
private String cpmBidSource;
@Column(name = "Criteria")
@CsvField(value = "Audience", reportField = "Criteria")
private String criteria;
@Column(name = "CriteriaDestinationUrl")
@Lob
@CsvField(value = "Destination URL", reportField = "CriteriaDestinationUrl")
private String criteriaDestinationUrl;
@Column(name = "CriterionAttachmentLevel")
@CsvField(value = "Level", reportField = "CriterionAttachmentLevel")
private String criterionAttachmentLevel;
@Column(name = "CrossDeviceConversions")
@CsvField(value = "Cross-device conv.", reportField = "CrossDeviceConversions")
private BigDecimal crossDeviceConversions;
@Column(name = "Ctr")
@CsvField(value = "CTR", reportField = "Ctr")
private BigDecimal ctr;
@Column(name = "CustomerDescriptiveName")
@CsvField(value = "Client name", reportField = "CustomerDescriptiveName")
private String customerDescriptiveName;
@Column(name = "Device")
@CsvField(value = "Device", reportField = "Device")
private String device;
@Column(name = "EngagementRate")
@CsvField(value = "Engagement rate", reportField = "EngagementRate")
private BigDecimal engagementRate;
@Column(name = "Engagements")
@CsvField(value = "Engagements", reportField = "Engagements")
private Long engagements;
@Column(name = "ExternalConversionSource")
@CsvField(value = "Conversion source", reportField = "ExternalConversionSource")
private String externalConversionSource;
@Column(name = "FinalAppUrls")
@Lob
@CsvField(value = "App final URL", reportField = "FinalAppUrls")
private String finalAppUrls;
@Column(name = "FinalMobileUrls")
@Lob
@CsvField(value = "Mobile final URL", reportField = "FinalMobileUrls")
private String finalMobileUrls;
@Column(name = "FinalUrls")
@Lob
@CsvField(value = "Final URL", reportField = "FinalUrls")
private String finalUrls;
@Column(name = "GmailForwards")
@CsvField(value = "Gmail forwards", reportField = "GmailForwards")
private Long gmailForwards;
@Column(name = "GmailSaves")
@CsvField(value = "Gmail saves", reportField = "GmailSaves")
private Long gmailSaves;
@Column(name = "GmailSecondaryClicks")
@CsvField(value = "Gmail clicks to website", reportField = "GmailSecondaryClicks")
private Long gmailSecondaryClicks;
@Column(name = "Id")
@CsvField(value = "Criterion ID", reportField = "Id")
private Long id;
@Column(name = "Impressions")
@CsvField(value = "Impressions", reportField = "Impressions")
private Long impressions;
@Column(name = "InteractionRate")
@CsvField(value = "Interaction Rate", reportField = "InteractionRate")
private BigDecimal interactionRate;
@Column(name = "Interactions")
@CsvField(value = "Interactions", reportField = "Interactions")
private Long interactions;
@Column(name = "InteractionTypes")
@CsvField(value = "Interaction Types", reportField = "InteractionTypes")
private String interactionTypes;
@Column(name = "IsRestrict")
@CsvField(value = "Is restricting", reportField = "IsRestrict")
private String isRestrict;
@Column(name = "Slot")
@CsvField(value = "Top vs. Other", reportField = "Slot")
private String slot;
@Column(name = "Status")
@CsvField(value = "Audience state", reportField = "Status")
private String status;
@Column(name = "TrackingUrlTemplate")
@Lob
@CsvField(value = "Tracking template", reportField = "TrackingUrlTemplate")
private String trackingUrlTemplate;
@Column(name = "UrlCustomParameters")
@Lob
@CsvField(value = "Custom parameter", reportField = "UrlCustomParameters")
private String urlCustomParameters;
@Column(name = "UserListName")
@CsvField(value = "User list name", reportField = "UserListName")
private String userListName;
@Column(name = "ValuePerAllConversion")
@CsvField(value = "Value / all conv.", reportField = "ValuePerAllConversion")
private BigDecimal valuePerAllConversion;
@Column(name = "ValuePerConversion")
@CsvField(value = "Value / conv.", reportField = "ValuePerConversion")
private BigDecimal valuePerConversion;
@Column(name = "VideoQuartile100Rate")
@CsvField(value = "Video played to 100%", reportField = "VideoQuartile100Rate")
private BigDecimal videoQuartile100Rate;
@Column(name = "VideoQuartile25Rate")
@CsvField(value = "Video played to 25%", reportField = "VideoQuartile25Rate")
private BigDecimal videoQuartile25Rate;
@Column(name = "VideoQuartile50Rate")
@CsvField(value = "Video played to 50%", reportField = "VideoQuartile50Rate")
private BigDecimal videoQuartile50Rate;
@Column(name = "VideoQuartile75Rate")
@CsvField(value = "Video played to 75%", reportField = "VideoQuartile75Rate")
private BigDecimal videoQuartile75Rate;
@Column(name = "VideoViewRate")
@CsvField(value = "View rate", reportField = "VideoViewRate")
private BigDecimal videoViewRate;
@Column(name = "VideoViews")
@CsvField(value = "Views", reportField = "VideoViews")
private Long videoViews;
@Column(name = "ViewThroughConversions")
@CsvField(value = "View-through conv.", reportField = "ViewThroughConversions")
private Long viewThroughConversions;
/**
* Hibernate needs an empty constructor
*/
public AudiencePerformanceReport() {
}
public AudiencePerformanceReport(Long topAccountId, Long accountId){
super(topAccountId, accountId);
}
public String getAccountCurrencyCode() {
return accountCurrencyCode;
}
public void setAccountCurrencyCode(String accountCurrencyCode) {
this.accountCurrencyCode = accountCurrencyCode;
}
public String getAccountDescriptiveName() {
return accountDescriptiveName;
}
public void setAccountDescriptiveName(String accountDescriptiveName) {
this.accountDescriptiveName = accountDescriptiveName;
}
public String getAccountTimeZone() {
return accountTimeZone;
}
public void setAccountTimeZone(String accountTimeZone) {
this.accountTimeZone = accountTimeZone;
}
public BigDecimal getActiveViewCpm() {
return activeViewCpm;
}
public void setActiveViewCpm(BigDecimal activeViewCpm) {
this.activeViewCpm = activeViewCpm;
}
public String getActiveViewCtr() {
return BigDecimalUtil.formatAsReadable(activeViewCtr);
}
public BigDecimal getActiveViewCtrBigDecimal() {
return activeViewCtr;
}
public void setActiveViewCtr(String activeViewCtr) {
this.activeViewCtr = BigDecimalUtil.parseFromNumberString(activeViewCtr);
}
public Long getActiveViewImpressions() {
return activeViewImpressions;
}
public void setActiveViewImpressions(Long activeViewImpressions) {
this.activeViewImpressions = activeViewImpressions;
}
public String getActiveViewMeasurability() {
return BigDecimalUtil.formatAsReadable(activeViewMeasurability);
}
public BigDecimal getActiveViewMeasurabilityBigDecimal() {
return activeViewMeasurability;
}
public void setActiveViewMeasurability(String activeViewMeasurability) {
this.activeViewMeasurability = BigDecimalUtil.parseFromNumberString(activeViewMeasurability);
}
public BigDecimal getActiveViewMeasurableCost() {
return activeViewMeasurableCost;
}
public void setActiveViewMeasurableCost(BigDecimal activeViewMeasurableCost) {
this.activeViewMeasurableCost = activeViewMeasurableCost;
}
public Long getActiveViewMeasurableImpressions() {
return activeViewMeasurableImpressions;
}
public void setActiveViewMeasurableImpressions(Long activeViewMeasurableImpressions) {
this.activeViewMeasurableImpressions = activeViewMeasurableImpressions;
}
public String getActiveViewViewability() {
return BigDecimalUtil.formatAsReadable(activeViewViewability);
}
public BigDecimal getActiveViewViewabilityBigDecimal() {
return activeViewViewability;
}
public void setActiveViewViewability(String activeViewViewability) {
this.activeViewViewability = BigDecimalUtil.parseFromNumberString(activeViewViewability);
}
public Long getAdGroupId() {
return adGroupId;
}
public void setAdGroupId(Long adGroupId) {
this.adGroupId = adGroupId;
}
public String getAdGroupName() {
return adGroupName;
}
public void setAdGroupName(String adGroupName) {
this.adGroupName = adGroupName;
}
public String getAdGroupStatus() {
return adGroupStatus;
}
public void setAdGroupStatus(String adGroupStatus) {
this.adGroupStatus = adGroupStatus;
}
public String getAdNetworkType1() {
return adNetworkType1;
}
public void setAdNetworkType1(String adNetworkType1) {
this.adNetworkType1 = adNetworkType1;
}
public String getAdNetworkType2() {
return adNetworkType2;
}
public void setAdNetworkType2(String adNetworkType2) {
this.adNetworkType2 = adNetworkType2;
}
public String getAllConversionRate() {
return BigDecimalUtil.formatAsReadable(allConversionRate);
}
public BigDecimal getAllConversionRateBigDecimal() {
return allConversionRate;
}
public void setAllConversionRate(String allConversionRate) {
this.allConversionRate = BigDecimalUtil.parseFromNumberString(allConversionRate);
}
public String getAllConversions() {
return BigDecimalUtil.formatAsReadable(allConversions);
}
public BigDecimal getAllConversionsBigDecimal() {
return allConversions;
}
public void setAllConversions(String allConversions) {
this.allConversions = BigDecimalUtil.parseFromNumberString(allConversions);
}
public String getAllConversionValue() {
return BigDecimalUtil.formatAsReadable(allConversionValue);
}
public BigDecimal getAllConversionValueBigDecimal() {
return allConversionValue;
}
public void setAllConversionValue(String allConversionValue) {
this.allConversionValue = BigDecimalUtil.parseFromNumberString(allConversionValue);
}
public BigDecimal getAverageCost() {
return averageCost;
}
public void setAverageCost(BigDecimal averageCost) {
this.averageCost = averageCost;
}
public BigDecimal getAverageCpc() {
return averageCpc;
}
public void setAverageCpc(BigDecimal averageCpc) {
this.averageCpc = averageCpc;
}
public String getAverageCpe() {
return BigDecimalUtil.formatAsReadable(averageCpe);
}
public BigDecimal getAverageCpeBigDecimal() {
return averageCpe;
}
public void setAverageCpe(String averageCpe) {
this.averageCpe = BigDecimalUtil.parseFromNumberString(averageCpe);
}
public BigDecimal getAverageCpm() {
return averageCpm;
}
public void setAverageCpm(BigDecimal averageCpm) {
this.averageCpm = averageCpm;
}
public String getAverageCpv() {
return BigDecimalUtil.formatAsReadable(averageCpv);
}
public BigDecimal getAverageCpvBigDecimal() {
return averageCpv;
}
public void setAverageCpv(String averageCpv) {
this.averageCpv = BigDecimalUtil.parseFromNumberString(averageCpv);
}
public String getAveragePosition() {
return BigDecimalUtil.formatAsReadable(averagePosition);
}
public BigDecimal getAveragePositionBigDecimal() {
return averagePosition;
}
public void setAveragePosition(String averagePosition) {
this.averagePosition = BigDecimalUtil.parseFromNumberString(averagePosition);
}
public Long getBaseAdGroupId() {
return baseAdGroupId;
}
public void setBaseAdGroupId(Long baseAdGroupId) {
this.baseAdGroupId = baseAdGroupId;
}
public Long getBaseCampaignId() {
return baseCampaignId;
}
public void setBaseCampaignId(Long baseCampaignId) {
this.baseCampaignId = baseCampaignId;
}
public Long getBiddingStrategyId() {
return biddingStrategyId;
}
public void setBiddingStrategyId(Long biddingStrategyId) {
this.biddingStrategyId = biddingStrategyId;
}
public String getBiddingStrategyName() {
return biddingStrategyName;
}
public void setBiddingStrategyName(String biddingStrategyName) {
this.biddingStrategyName = biddingStrategyName;
}
public String getBiddingStrategyType() {
return biddingStrategyType;
}
public void setBiddingStrategyType(String biddingStrategyType) {
this.biddingStrategyType = biddingStrategyType;
}
public String getBidModifier() {
return BigDecimalUtil.formatAsReadable(bidModifier);
}
public BigDecimal getBidModifierBigDecimal() {
return bidModifier;
}
public void setBidModifier(String bidModifier) {
this.bidModifier = BigDecimalUtil.parseFromNumberString(bidModifier);
}
public Long getCampaignId() {
return campaignId;
}
public void setCampaignId(Long campaignId) {
this.campaignId = campaignId;
}
public String getCampaignName() {
return campaignName;
}
public void setCampaignName(String campaignName) {
this.campaignName = campaignName;
}
public String getCampaignStatus() {
return campaignStatus;
}
public void setCampaignStatus(String campaignStatus) {
this.campaignStatus = campaignStatus;
}
public Long getClicks() {
return clicks;
}
public void setClicks(Long clicks) {
this.clicks = clicks;
}
public String getClickType() {
return clickType;
}
public void setClickType(String clickType) {
this.clickType = clickType;
}
public String getConversionCategoryName() {
return conversionCategoryName;
}
public void setConversionCategoryName(String conversionCategoryName) {
this.conversionCategoryName = conversionCategoryName;
}
public String getConversionRate() {
return BigDecimalUtil.formatAsReadable(conversionRate);
}
public BigDecimal getConversionRateBigDecimal() {
return conversionRate;
}
public void setConversionRate(String conversionRate) {
this.conversionRate = BigDecimalUtil.parseFromNumberString(conversionRate);
}
public String getConversions() {
return BigDecimalUtil.formatAsReadable(conversions);
}
public BigDecimal getConversionsBigDecimal() {
return conversions;
}
public void setConversions(String conversions) {
this.conversions = BigDecimalUtil.parseFromNumberString(conversions);
}
public Long getConversionTrackerId() {
return conversionTrackerId;
}
public void setConversionTrackerId(Long conversionTrackerId) {
this.conversionTrackerId = conversionTrackerId;
}
public String getConversionTypeName() {
return conversionTypeName;
}
public void setConversionTypeName(String conversionTypeName) {
this.conversionTypeName = conversionTypeName;
}
public String getConversionValue() {
return BigDecimalUtil.formatAsReadable(conversionValue);
}
public BigDecimal getConversionValueBigDecimal() {
return conversionValue;
}
public void setConversionValue(String conversionValue) {
this.conversionValue = BigDecimalUtil.parseFromNumberString(conversionValue);
}
public BigDecimal getCost() {
return cost;
}
public void setCost(BigDecimal cost) {
this.cost = cost;
}
public BigDecimal getCostPerAllConversion() {
return costPerAllConversion;
}
public void setCostPerAllConversion(BigDecimal costPerAllConversion) {
this.costPerAllConversion = costPerAllConversion;
}
public BigDecimal getCostPerConversion() {
return costPerConversion;
}
public void setCostPerConversion(BigDecimal costPerConversion) {
this.costPerConversion = costPerConversion;
}
public BigDecimal getCpcBid() {
return cpcBid;
}
public void setCpcBid(BigDecimal cpcBid) {
this.cpcBid = cpcBid;
}
public String getCpcBidSource() {
return cpcBidSource;
}
public void setCpcBidSource(String cpcBidSource) {
this.cpcBidSource = cpcBidSource;
}
public BigDecimal getCpmBid() {
return cpmBid;
}
public void setCpmBid(BigDecimal cpmBid) {
this.cpmBid = cpmBid;
}
public String getCpmBidSource() {
return cpmBidSource;
}
public void setCpmBidSource(String cpmBidSource) {
this.cpmBidSource = cpmBidSource;
}
public String getCriteria() {
return criteria;
}
public void setCriteria(String criteria) {
this.criteria = criteria;
}
public String getCriteriaDestinationUrl() {
return criteriaDestinationUrl;
}
public void setCriteriaDestinationUrl(String criteriaDestinationUrl) {
this.criteriaDestinationUrl = criteriaDestinationUrl;
}
public String getCriterionAttachmentLevel() {
return criterionAttachmentLevel;
}
public void setCriterionAttachmentLevel(String criterionAttachmentLevel) {
this.criterionAttachmentLevel = criterionAttachmentLevel;
}
public String getCrossDeviceConversions() {
return BigDecimalUtil.formatAsReadable(crossDeviceConversions);
}
public BigDecimal getCrossDeviceConversionsBigDecimal() {
return crossDeviceConversions;
}
public void setCrossDeviceConversions(String crossDeviceConversions) {
this.crossDeviceConversions = BigDecimalUtil.parseFromNumberString(crossDeviceConversions);
}
public String getCtr() {
return BigDecimalUtil.formatAsReadable(ctr);
}
public BigDecimal getCtrBigDecimal() {
return ctr;
}
public void setCtr(String ctr) {
this.ctr = (ctr == null ? null : BigDecimalUtil.parseFromNumberString(ctr.replace("%","")));
}
public String getCustomerDescriptiveName() {
return customerDescriptiveName;
}
public void setCustomerDescriptiveName(String customerDescriptiveName) {
this.customerDescriptiveName = customerDescriptiveName;
}
public String getDevice() {
return device;
}
public void setDevice(String device) {
this.device = device;
}
public String getEngagementRate() {
return BigDecimalUtil.formatAsReadable(engagementRate);
}
public BigDecimal getEngagementRateBigDecimal() {
return engagementRate;
}
public void setEngagementRate(String engagementRate) {
this.engagementRate = BigDecimalUtil.parseFromNumberString(engagementRate);
}
public Long getEngagements() {
return engagements;
}
public void setEngagements(Long engagements) {
this.engagements = engagements;
}
public String getExternalConversionSource() {
return externalConversionSource;
}
public void setExternalConversionSource(String externalConversionSource) {
this.externalConversionSource = externalConversionSource;
}
public String getFinalAppUrls() {
return finalAppUrls;
}
public void setFinalAppUrls(String finalAppUrls) {
this.finalAppUrls = finalAppUrls;
}
public String getFinalMobileUrls() {
return finalMobileUrls;
}
public void setFinalMobileUrls(String finalMobileUrls) {
this.finalMobileUrls = finalMobileUrls;
}
public String getFinalUrls() {
return finalUrls;
}
public void setFinalUrls(String finalUrls) {
this.finalUrls = finalUrls;
}
public Long getGmailForwards() {
return gmailForwards;
}
public void setGmailForwards(Long gmailForwards) {
this.gmailForwards = gmailForwards;
}
public Long getGmailSaves() {
return gmailSaves;
}
public void setGmailSaves(Long gmailSaves) {
this.gmailSaves = gmailSaves;
}
public Long getGmailSecondaryClicks() {
return gmailSecondaryClicks;
}
public void setGmailSecondaryClicks(Long gmailSecondaryClicks) {
this.gmailSecondaryClicks = gmailSecondaryClicks;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getImpressions() {
return impressions;
}
public void setImpressions(Long impressions) {
this.impressions = impressions;
}
public String getInteractionRate() {
return BigDecimalUtil.formatAsReadable(interactionRate);
}
public BigDecimal getInteractionRateBigDecimal() {
return interactionRate;
}
public void setInteractionRate(String interactionRate) {
this.interactionRate = BigDecimalUtil.parseFromNumberString(interactionRate);
}
public Long getInteractions() {
return interactions;
}
public void setInteractions(Long interactions) {
this.interactions = interactions;
}
public String getInteractionTypes() {
return interactionTypes;
}
public void setInteractionTypes(String interactionTypes) {
this.interactionTypes = interactionTypes;
}
public String getIsRestrict() {
return isRestrict;
}
public void setIsRestrict(String isRestrict) {
this.isRestrict = isRestrict;
}
public String getSlot() {
return slot;
}
public void setSlot(String slot) {
this.slot = slot;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getTrackingUrlTemplate() {
return trackingUrlTemplate;
}
public void setTrackingUrlTemplate(String trackingUrlTemplate) {
this.trackingUrlTemplate = trackingUrlTemplate;
}
public String getUrlCustomParameters() {
return urlCustomParameters;
}
public void setUrlCustomParameters(String urlCustomParameters) {
this.urlCustomParameters = urlCustomParameters;
}
public String getUserListName() {
return userListName;
}
public void setUserListName(String userListName) {
this.userListName = userListName;
}
public String getValuePerAllConversion() {
return BigDecimalUtil.formatAsReadable(valuePerAllConversion);
}
public BigDecimal getValuePerAllConversionBigDecimal() {
return valuePerAllConversion;
}
public void setValuePerAllConversion(String valuePerAllConversion) {
this.valuePerAllConversion = BigDecimalUtil.parseFromNumberString(valuePerAllConversion);
}
public String getValuePerConversion() {
return BigDecimalUtil.formatAsReadable(valuePerConversion);
}
public BigDecimal getValuePerConversionBigDecimal() {
return valuePerConversion;
}
public void setValuePerConversion(String valuePerConversion) {
this.valuePerConversion = BigDecimalUtil.parseFromNumberString(valuePerConversion);
}
public String getVideoQuartile100Rate() {
return BigDecimalUtil.formatAsReadable(videoQuartile100Rate);
}
public BigDecimal getVideoQuartile100RateBigDecimal() {
return videoQuartile100Rate;
}
public void setVideoQuartile100Rate(String videoQuartile100Rate) {
this.videoQuartile100Rate = BigDecimalUtil.parseFromNumberString(videoQuartile100Rate);
}
public String getVideoQuartile25Rate() {
return BigDecimalUtil.formatAsReadable(videoQuartile25Rate);
}
public BigDecimal getVideoQuartile25RateBigDecimal() {
return videoQuartile25Rate;
}
public void setVideoQuartile25Rate(String videoQuartile25Rate) {
this.videoQuartile25Rate = BigDecimalUtil.parseFromNumberString(videoQuartile25Rate);
}
public String getVideoQuartile50Rate() {
return BigDecimalUtil.formatAsReadable(videoQuartile50Rate);
}
public BigDecimal getVideoQuartile50RateBigDecimal() {
return videoQuartile50Rate;
}
public void setVideoQuartile50Rate(String videoQuartile50Rate) {
this.videoQuartile50Rate = BigDecimalUtil.parseFromNumberString(videoQuartile50Rate);
}
public String getVideoQuartile75Rate() {
return BigDecimalUtil.formatAsReadable(videoQuartile75Rate);
}
public BigDecimal getVideoQuartile75RateBigDecimal() {
return videoQuartile75Rate;
}
public void setVideoQuartile75Rate(String videoQuartile75Rate) {
this.videoQuartile75Rate = BigDecimalUtil.parseFromNumberString(videoQuartile75Rate);
}
public String getVideoViewRate() {
return BigDecimalUtil.formatAsReadable(videoViewRate);
}
public BigDecimal getVideoViewRateBigDecimal() {
return videoViewRate;
}
public void setVideoViewRate(String videoViewRate) {
this.videoViewRate = BigDecimalUtil.parseFromNumberString(videoViewRate);
}
public Long getVideoViews() {
return videoViews;
}
public void setVideoViews(Long videoViews) {
this.videoViews = videoViews;
}
public Long getViewThroughConversions() {
return viewThroughConversions;
}
public void setViewThroughConversions(Long viewThroughConversions) {
this.viewThroughConversions = viewThroughConversions;
}
@Override
public void setRowId() {
// General fields for generating unique id.
StringBuilder idBuilder = new StringBuilder(getCustomerId().toString());
if (campaignId != null) {
idBuilder.append("-").append(campaignId);
}
if (adGroupId != null) {
idBuilder.append("-").append(adGroupId);
}
if (id != null) {
idBuilder.append("-").append(id);
}
idBuilder.append("-").append(getDateLabel());
// Include all segmentation fields (if set).
if (!StringUtils.isEmpty(adGroupName)) {
idBuilder.append("-").append(adGroupName);
}
if (!StringUtils.isEmpty(adGroupStatus)) {
idBuilder.append("-").append(adGroupStatus);
}
if (!StringUtils.isEmpty(adNetworkType1)) {
idBuilder.append("-").append(adNetworkType1);
}
if (!StringUtils.isEmpty(adNetworkType2)) {
idBuilder.append("-").append(adNetworkType2);
}
if (!StringUtils.isEmpty(clickType)) {
idBuilder.append("-").append(clickType);
}
if (!StringUtils.isEmpty(conversionCategoryName)) {
idBuilder.append("-").append(conversionCategoryName);
}
if (conversionTrackerId != null) {
idBuilder.append("-").append(conversionTrackerId);
}
if (!StringUtils.isEmpty(conversionTypeName)) {
idBuilder.append("-").append(conversionTypeName);
}
if (!StringUtils.isEmpty(device)) {
idBuilder.append("-").append(device);
}
if (!StringUtils.isEmpty(externalConversionSource)) {
idBuilder.append("-").append(externalConversionSource);
}
if (!StringUtils.isEmpty(slot)) {
idBuilder.append("-").append(slot);
}
this.rowId = idBuilder.toString();
}
@Override
public boolean equals(Object obj) {
if (obj == null) { return false; }
if (obj == this) { return true; }
if (obj.getClass() != getClass()) { return false; }
AudiencePerformanceReport other = (AudiencePerformanceReport) obj;
return new EqualsBuilder()
.appendSuper(super.equals(obj))
.append(accountCurrencyCode, other.accountCurrencyCode)
.append(accountDescriptiveName, other.accountDescriptiveName)
.append(accountTimeZone, other.accountTimeZone)
.append(activeViewCpm, other.activeViewCpm)
.append(activeViewCtr, other.activeViewCtr)
.append(activeViewImpressions, other.activeViewImpressions)
.append(activeViewMeasurability, other.activeViewMeasurability)
.append(activeViewMeasurableCost, other.activeViewMeasurableCost)
.append(activeViewMeasurableImpressions, other.activeViewMeasurableImpressions)
.append(activeViewViewability, other.activeViewViewability)
.append(adGroupId, other.adGroupId)
.append(adGroupName, other.adGroupName)
.append(adGroupStatus, other.adGroupStatus)
.append(adNetworkType1, other.adNetworkType1)
.append(adNetworkType2, other.adNetworkType2)
.append(allConversionRate, other.allConversionRate)
.append(allConversions, other.allConversions)
.append(allConversionValue, other.allConversionValue)
.append(averageCost, other.averageCost)
.append(averageCpc, other.averageCpc)
.append(averageCpe, other.averageCpe)
.append(averageCpm, other.averageCpm)
.append(averageCpv, other.averageCpv)
.append(averagePosition, other.averagePosition)
.append(baseAdGroupId, other.baseAdGroupId)
.append(baseCampaignId, other.baseCampaignId)
.append(biddingStrategyId, other.biddingStrategyId)
.append(biddingStrategyName, other.biddingStrategyName)
.append(biddingStrategyType, other.biddingStrategyType)
.append(bidModifier, other.bidModifier)
.append(campaignId, other.campaignId)
.append(campaignName, other.campaignName)
.append(campaignStatus, other.campaignStatus)
.append(clicks, other.clicks)
.append(clickType, other.clickType)
.append(conversionCategoryName, other.conversionCategoryName)
.append(conversionRate, other.conversionRate)
.append(conversions, other.conversions)
.append(conversionTrackerId, other.conversionTrackerId)
.append(conversionTypeName, other.conversionTypeName)
.append(conversionValue, other.conversionValue)
.append(cost, other.cost)
.append(costPerAllConversion, other.costPerAllConversion)
.append(costPerConversion, other.costPerConversion)
.append(cpcBid, other.cpcBid)
.append(cpcBidSource, other.cpcBidSource)
.append(cpmBid, other.cpmBid)
.append(cpmBidSource, other.cpmBidSource)
.append(criteria, other.criteria)
.append(criteriaDestinationUrl, other.criteriaDestinationUrl)
.append(criterionAttachmentLevel, other.criterionAttachmentLevel)
.append(crossDeviceConversions, other.crossDeviceConversions)
.append(ctr, other.ctr)
.append(customerDescriptiveName, other.customerDescriptiveName)
.append(device, other.device)
.append(engagementRate, other.engagementRate)
.append(engagements, other.engagements)
.append(externalConversionSource, other.externalConversionSource)
.append(finalAppUrls, other.finalAppUrls)
.append(finalMobileUrls, other.finalMobileUrls)
.append(finalUrls, other.finalUrls)
.append(gmailForwards, other.gmailForwards)
.append(gmailSaves, other.gmailSaves)
.append(gmailSecondaryClicks, other.gmailSecondaryClicks)
.append(id, other.id)
.append(impressions, other.impressions)
.append(interactionRate, other.interactionRate)
.append(interactions, other.interactions)
.append(interactionTypes, other.interactionTypes)
.append(isRestrict, other.isRestrict)
.append(slot, other.slot)
.append(status, other.status)
.append(trackingUrlTemplate, other.trackingUrlTemplate)
.append(urlCustomParameters, other.urlCustomParameters)
.append(userListName, other.userListName)
.append(valuePerAllConversion, other.valuePerAllConversion)
.append(valuePerConversion, other.valuePerConversion)
.append(videoQuartile100Rate, other.videoQuartile100Rate)
.append(videoQuartile25Rate, other.videoQuartile25Rate)
.append(videoQuartile50Rate, other.videoQuartile50Rate)
.append(videoQuartile75Rate, other.videoQuartile75Rate)
.append(videoViewRate, other.videoViewRate)
.append(videoViews, other.videoViews)
.append(viewThroughConversions, other.viewThroughConversions)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.appendSuper(super.hashCode())
.append(accountCurrencyCode)
.append(accountDescriptiveName)
.append(accountTimeZone)
.append(activeViewCpm)
.append(activeViewCtr)
.append(activeViewImpressions)
.append(activeViewMeasurability)
.append(activeViewMeasurableCost)
.append(activeViewMeasurableImpressions)
.append(activeViewViewability)
.append(adGroupId)
.append(adGroupName)
.append(adGroupStatus)
.append(adNetworkType1)
.append(adNetworkType2)
.append(allConversionRate)
.append(allConversions)
.append(allConversionValue)
.append(averageCost)
.append(averageCpc)
.append(averageCpe)
.append(averageCpm)
.append(averageCpv)
.append(averagePosition)
.append(baseAdGroupId)
.append(baseCampaignId)
.append(biddingStrategyId)
.append(biddingStrategyName)
.append(biddingStrategyType)
.append(bidModifier)
.append(campaignId)
.append(campaignName)
.append(campaignStatus)
.append(clicks)
.append(clickType)
.append(conversionCategoryName)
.append(conversionRate)
.append(conversions)
.append(conversionTrackerId)
.append(conversionTypeName)
.append(conversionValue)
.append(cost)
.append(costPerAllConversion)
.append(costPerConversion)
.append(cpcBid)
.append(cpcBidSource)
.append(cpmBid)
.append(cpmBidSource)
.append(criteria)
.append(criteriaDestinationUrl)
.append(criterionAttachmentLevel)
.append(crossDeviceConversions)
.append(ctr)
.append(customerDescriptiveName)
.append(device)
.append(engagementRate)
.append(engagements)
.append(externalConversionSource)
.append(finalAppUrls)
.append(finalMobileUrls)
.append(finalUrls)
.append(gmailForwards)
.append(gmailSaves)
.append(gmailSecondaryClicks)
.append(id)
.append(impressions)
.append(interactionRate)
.append(interactions)
.append(interactionTypes)
.append(isRestrict)
.append(slot)
.append(status)
.append(trackingUrlTemplate)
.append(urlCustomParameters)
.append(userListName)
.append(valuePerAllConversion)
.append(valuePerConversion)
.append(videoQuartile100Rate)
.append(videoQuartile25Rate)
.append(videoQuartile50Rate)
.append(videoQuartile75Rate)
.append(videoViewRate)
.append(videoViews)
.append(viewThroughConversions)
.toHashCode();
}
}
| |
/*
* Copyright 2012 JBoss by Red Hat.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.task;
import static org.junit.Assert.assertEquals;
import java.io.StringReader;
import java.util.HashMap;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Query;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.jbpm.services.task.impl.factories.TaskFactory;
import org.jbpm.services.task.impl.model.TaskImpl;
import org.junit.Assume;
import org.junit.Test;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.Task;
import bitronix.tm.TransactionManagerServices;
/**
*
*/
public abstract class SubTasksBaseTest extends HumanTaskServicesBaseTest{
public abstract EntityManagerFactory getEmf();
@Test
public void noActionStrategy() throws Exception {
// One potential owner, should go straight to state Reserved
String parentTaskstr = "(with (new Task()) { priority = 55, taskData = (with( new TaskData()) { } ), ";
parentTaskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('salaboy') ],businessAdministrators = [ new User('Administrator') ], }),";
parentTaskstr += "names = [ new I18NText( 'en-UK', 'This is my task Parent name')] })";
// By default the task will contain a SubTask SubTasksStrategy.NoAction
Task parentTask = TaskFactory.evalTask(new StringReader(parentTaskstr));
taskService.addTask(parentTask, new HashMap<String, Object>());
long taskParentId = parentTask.getId();
// Task should remain in Created state with no actual owner
Task parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(parentTask1.getTaskData().getStatus(), Status.Reserved);
taskService.start(taskParentId, "salaboy");
String child1Taskstr = "(with (new Task()) { priority = 55, taskData = (with( new TaskData()) { parentId= "+taskParentId+" } ), ";
child1Taskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('Darth Vader') ],businessAdministrators = [ new User('Administrator') ], }),";
child1Taskstr += "names = [ new I18NText( 'en-UK', 'This is my task Child 1 name')] })";
Task child1Task = TaskFactory.evalTask(new StringReader(child1Taskstr));
taskService.addTask(child1Task, new HashMap<String, Object>());
long child1TaskId = child1Task.getId();
//Test if the task is succesfully created
assertEquals(1, taskService.getPendingSubTasksByParent(taskParentId));
Task childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(taskParentId, childTask1.getTaskData().getParentId());
taskService.start(child1TaskId, "Darth Vader");
taskService.complete(child1TaskId, "Darth Vader", null);
childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(Status.Completed, childTask1.getTaskData().getStatus());
parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(Status.InProgress, parentTask1.getTaskData().getStatus() );
assertEquals(0, taskService.getPendingSubTasksByParent(taskParentId));
taskService.complete(taskParentId, "salaboy", null);
parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(Status.Completed, parentTask1.getTaskData().getStatus() );
}
@Test
public void onSubtaskCompletionAutoCompleteParentStrategy() throws Exception {
// One potential owner, should go straight to state Reserved
String parentTaskstr = "(with (new Task()) { subTaskStrategy = SubTasksStrategy.EndParentOnAllSubTasksEnd, priority = 55, taskData = (with( new TaskData()) { } ), ";
parentTaskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('salaboy') ],businessAdministrators = [ new User('Administrator') ], }),";
parentTaskstr += "names = [ new I18NText( 'en-UK', 'This is my task Parent name')] })";
// By default the task will contain a SubTask SubTasksStrategy.NoAction
Task parentTask = TaskFactory.evalTask(new StringReader(parentTaskstr));
taskService.addTask(parentTask, new HashMap<String, Object>());
long taskParentId = parentTask.getId();
// Task should remain in Created state with no actual owner
Task parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(parentTask1.getTaskData().getStatus(), Status.Reserved);
taskService.start(taskParentId, "salaboy");
String child1Taskstr = "(with (new Task()) { priority = 55, taskData = (with( new TaskData()) { parentId= "+taskParentId+" } ), ";
child1Taskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('Darth Vader') ],businessAdministrators = [ new User('Administrator') ], }),";
child1Taskstr += "names = [ new I18NText( 'en-UK', 'This is my task Child 1 name')] })";
Task child1Task = TaskFactory.evalTask(new StringReader(child1Taskstr));
taskService.addTask(child1Task, new HashMap<String, Object>());
long child1TaskId = child1Task.getId();
//Test if the task is succesfully created
assertEquals(1, taskService.getPendingSubTasksByParent(taskParentId));
Task childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(taskParentId, childTask1.getTaskData().getParentId());
taskService.start(child1TaskId, "Darth Vader");
taskService.complete(child1TaskId, "Darth Vader", null);
childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(Status.Completed, childTask1.getTaskData().getStatus());
parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(0, taskService.getPendingSubTasksByParent(taskParentId));
assertEquals(Status.Completed, parentTask1.getTaskData().getStatus() );
}
/**
* Loop and create 500 tasks. The reason to do so, is Java cache's Long objects for small numbers
* (http://stackoverflow.com/questions/3130311/weird-integer-boxing-in-java), so the ProcessSubTaskCommand was passing
* the test, even when failing once there were a sufficient number of tasks in the system.
*
* @throws Exception
*/
@Test
public void onSubtaskCompletionAutoCompleteParentStrategyWithLotsOfTasks() throws Exception {
String tableName = TaskImpl.class.getAnnotation(Table.class).name();
TransactionManagerServices.getTransactionManager().begin();
try {
EntityManager em = getEmf().createEntityManager();
Query query = em.createNativeQuery(
"select SEQUENCE_NAME from INFORMATION_SCHEMA.COLUMNS "
+ "where TABLE_NAME = '" + tableName.toUpperCase() + "' "
+ " and SEQUENCE_NAME IS NOT null");
String seqName = (String) query.getSingleResult();
query = em.createNativeQuery("alter sequence " + seqName + " increment by 1000");
query.executeUpdate();
} catch( Throwable t ) {
// underlying database is NOT h2, skip test
Assume.assumeFalse(true);
} finally {
TransactionManagerServices.getTransactionManager().commit();
}
// One potential owner, should go straight to state Reserved
String parentTaskstr = "(with (new Task()) { subTaskStrategy = SubTasksStrategy.EndParentOnAllSubTasksEnd, priority = 55, taskData = (with( new TaskData()) { } ), ";
parentTaskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('salaboy') ],businessAdministrators = [ new User('Administrator') ], }),";
parentTaskstr += "names = [ new I18NText( 'en-UK', 'This is my task Parent name')] })";
// By default the task will contain a SubTask SubTasksStrategy.NoAction
Task parentTask = TaskFactory.evalTask(new StringReader(parentTaskstr));
taskService.addTask(parentTask, new HashMap<String, Object>());
long taskParentId = parentTask.getId();
// Task should remain in Created state with no actual owner
Task parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(parentTask1.getTaskData().getStatus(), Status.Reserved);
taskService.start(taskParentId, "salaboy");
String child1Taskstr = "(with (new Task()) { priority = 55, taskData = (with( new TaskData()) { parentId= "+taskParentId+" } ), ";
child1Taskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('Darth Vader') ],businessAdministrators = [ new User('Administrator') ], }),";
child1Taskstr += "names = [ new I18NText( 'en-UK', 'This is my task Child 1 name')] })";
Task child1Task = TaskFactory.evalTask(new StringReader(child1Taskstr));
taskService.addTask(child1Task, new HashMap<String, Object>());
long child1TaskId = child1Task.getId();
//Test if the task is succesfully created
assertEquals(1, taskService.getPendingSubTasksByParent(taskParentId));
Task childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(taskParentId, childTask1.getTaskData().getParentId());
taskService.start(child1TaskId, "Darth Vader");
taskService.complete(child1TaskId, "Darth Vader", null);
childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(Status.Completed, childTask1.getTaskData().getStatus());
parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(0, taskService.getPendingSubTasksByParent(taskParentId));
assertEquals(Status.Completed, parentTask1.getTaskData().getStatus() );
}
@Test
public void onParentAbortCompleteAllSubTasksStrategy() throws Exception {
// One potential owner, should go straight to state Reserved
// Notice skippable in task data
String parentTaskstr = "(with (new Task()) { subTaskStrategy = SubTasksStrategy.SkipAllSubTasksOnParentSkip , priority = 55, "
+ "taskData = (with( new TaskData()) { skipable = true } ), ";
parentTaskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('salaboy') ],businessAdministrators = [ new User('Administrator') ], }),";
parentTaskstr += "names = [ new I18NText( 'en-UK', 'This is my task Parent name')] })";
// By default the task will contain a SubTask SubTasksStrategy.NoAction
Task parentTask = TaskFactory.evalTask(new StringReader(parentTaskstr));
taskService.addTask(parentTask, new HashMap<String, Object>());
long taskParentId = parentTask.getId();
// Task should remain in Created state with no actual owner
Task parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(parentTask1.getTaskData().getStatus(), Status.Reserved);
taskService.start(taskParentId, "salaboy");
String child1Taskstr = "(with (new Task()) { priority = 55, taskData = (with( new TaskData()) { skipable = true, parentId= "+taskParentId+" } ), ";
child1Taskstr += "peopleAssignments = (with ( new PeopleAssignments() ) { potentialOwners = [new User('Darth Vader') ],businessAdministrators = [ new User('Administrator') ], }),";
child1Taskstr += "names = [ new I18NText( 'en-UK', 'This is my task Child 1 name')] })";
Task child1Task = TaskFactory.evalTask(new StringReader(child1Taskstr));
taskService.addTask(child1Task, new HashMap<String, Object>());
long child1TaskId = child1Task.getId();
//Test if the task is succesfully created
assertEquals(1, taskService.getPendingSubTasksByParent(taskParentId));
Task childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(taskParentId, childTask1.getTaskData().getParentId());
taskService.start(child1TaskId, "Darth Vader");
taskService.skip(taskParentId, "salaboy");
parentTask1 = taskService.getTaskById(taskParentId);
assertEquals(0, taskService.getPendingSubTasksByParent(taskParentId));
assertEquals(Status.Obsolete, parentTask1.getTaskData().getStatus() );
childTask1 = taskService.getTaskById(child1TaskId);
assertEquals(Status.Obsolete, childTask1.getTaskData().getStatus());
}
}
| |
/*
* Copyright 2014-2022 Web Firm Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webfirmframework.wffweb.server.page;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.Collection;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.webfirmframework.wffweb.InvalidTagException;
import com.webfirmframework.wffweb.internal.security.object.SecurityObject;
import com.webfirmframework.wffweb.internal.server.page.js.WffJsFile;
import com.webfirmframework.wffweb.internal.tag.html.listener.ChildTagAppendListener;
import com.webfirmframework.wffweb.tag.html.AbstractHtml;
import com.webfirmframework.wffweb.tag.html.TagUtil;
import com.webfirmframework.wffweb.tag.html.html5.attribute.global.DataWffId;
import com.webfirmframework.wffweb.util.data.NameValue;
public final class ChildTagAppendListenerImpl implements ChildTagAppendListener {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(ChildTagRemoveListenerImpl.class.getName());
private final SecurityObject accessObject;
private final BrowserPage browserPage;
private final Map<String, AbstractHtml> tagByWffId;
@SuppressWarnings("unused")
private ChildTagAppendListenerImpl() {
throw new AssertionError();
}
ChildTagAppendListenerImpl(final BrowserPage browserPage, final SecurityObject accessObject,
final Map<String, AbstractHtml> tagByWffId) {
this.browserPage = browserPage;
this.accessObject = accessObject;
this.tagByWffId = tagByWffId;
}
@Override
public void childAppended(@SuppressWarnings("exports") final Event event) {
try {
final AbstractHtml parentTag = event.parentTag();
final AbstractHtml appendedChildTag = event.appendedChildTag();
// add data-wff-id to all tags including nested tags
final Deque<Set<AbstractHtml>> childrenStack = new ArrayDeque<>();
// passed 2 instead of 1 because the load factor is 0.75f
final Set<AbstractHtml> initialSet = new LinkedHashSet<>(2);
initialSet.add(appendedChildTag);
childrenStack.push(initialSet);
Set<AbstractHtml> children;
while ((children = childrenStack.poll()) != null) {
for (final AbstractHtml child : children) {
if (TagUtil.isTagged(child)) {
if (child.getDataWffId() == null) {
child.setDataWffId(browserPage.getNewDataWffId());
}
tagByWffId.put(child.getDataWffId().getValue(), child);
}
final Set<AbstractHtml> subChildren = child.getChildren(accessObject);
if (subChildren != null && subChildren.size() > 0) {
childrenStack.push(subChildren);
}
}
}
final DataWffId dataWffId = parentTag.getDataWffId();
if (dataWffId == null && LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Could not find data-wff-id from direct parent tag");
}
// @formatter:off
// appended child task format :-
// { "name": task_byte, "values" : [invoke_method_byte_from_Task_enum]}, {
// "name": data-wff-id, "values" : [ parent_tag_name, html_string ]}
// { "name": 2, "values" : [[3]]}, { "name":"C55", "values" : ["body",
// "<div><div></div></div>"]}
// @formatter:on
final NameValue task = Task.APPENDED_CHILD_TAG.getTaskNameValue();
final NameValue nameValue = new NameValue();
final byte[][] tagNameAndWffId = DataWffIdUtil.getIndexedTagNameAndWffId(accessObject, parentTag);
final byte[] parentWffIdBytes = tagNameAndWffId[1];
nameValue.setName(parentWffIdBytes);
final byte[] parentTagName = tagNameAndWffId[0];
if (WffJsFile.COMPRESSED_WFF_DATA) {
nameValue.setValues(parentTagName,
appendedChildTag.toCompressedWffBMBytesV2(StandardCharsets.UTF_8, accessObject));
} else {
nameValue.setValues(parentTagName, appendedChildTag.toWffBMBytes(StandardCharsets.UTF_8, accessObject));
}
browserPage.push(task, nameValue);
} catch (final InvalidTagException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, "Do not append/add an empty NoTag as child tag, eg: new NoTag(null, \"\").\n"
.concat("To make a tag's children as empty then invoke removeAllChildren() method in it."), e);
}
}
}
@Override
public void childrenAppended(@SuppressWarnings("exports") final Event event) {
final AbstractHtml parentTag = event.parentTag();
final Collection<? extends AbstractHtml> appendedChildTags = event.appendedChildrenTags();
// add data-wff-id to all tags including nested tags
final Deque<Set<AbstractHtml>> childrenStack = new ArrayDeque<>();
childrenStack.push(new LinkedHashSet<AbstractHtml>(appendedChildTags));
Set<AbstractHtml> children;
while ((children = childrenStack.poll()) != null) {
for (final AbstractHtml child : children) {
if (TagUtil.isTagged(child)) {
if (child.getDataWffId() == null) {
child.setDataWffId(browserPage.getNewDataWffId());
}
tagByWffId.put(child.getDataWffId().getValue(), child);
}
final Set<AbstractHtml> subChildren = child.getChildren(accessObject);
if (subChildren != null && subChildren.size() > 0) {
childrenStack.push(subChildren);
}
}
}
final DataWffId dataWffId = parentTag.getDataWffId();
if (dataWffId == null && LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Could not find data-wff-id from direct parent tag");
}
// @formatter:off
// appended child task format :-
// { "name": task_byte, "values" : [invoke_method_byte_from_Task_enum]}, {
// "name": data-wff-id, "values" : [ parent_tag_name, html_string ]}
// { "name": 2, "values" : [[3]]}, { "name":"C55", "values" : ["body",
// "<div><div></div></div>"]}
// @formatter:on
final NameValue task = Task.APPENDED_CHILDREN_TAGS.getTaskNameValue();
final Deque<NameValue> nameValues = new ArrayDeque<>(appendedChildTags.size() + 1);
nameValues.add(task);
for (final AbstractHtml appendedChildTag : appendedChildTags) {
final NameValue nameValue = new NameValue();
final byte[][] tagNameAndWffId = DataWffIdUtil.getIndexedTagNameAndWffId(accessObject, parentTag);
final byte[] parentWffIdBytes = tagNameAndWffId[1];
nameValue.setName(parentWffIdBytes);
final byte[] parentTagName = tagNameAndWffId[0];
try {
if (WffJsFile.COMPRESSED_WFF_DATA) {
nameValue.setValues(parentTagName,
appendedChildTag.toCompressedWffBMBytesV2(StandardCharsets.UTF_8, accessObject));
} else {
nameValue.setValues(parentTagName,
appendedChildTag.toWffBMBytes(StandardCharsets.UTF_8, accessObject));
}
} catch (final InvalidTagException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING,
"Do not append/add an empty NoTag as child tag, eg: new NoTag(null, \"\").\n".concat(
"To make a tag's children as empty then invoke removeAllChildren() method in it."),
e);
}
continue;
}
nameValues.add(nameValue);
}
browserPage.push(nameValues.toArray(new NameValue[nameValues.size()]));
}
/**
* adds to wffid map
*
* @param tag
* @since 2.0.0
* @author WFF
*/
private void addInWffIdMap(final AbstractHtml tag) {
final Deque<Set<AbstractHtml>> childrenStack = new ArrayDeque<>();
// passed 2 instead of 1 because the load factor is 0.75f
final Set<AbstractHtml> initialSet = new HashSet<>(2);
initialSet.add(tag);
childrenStack.push(initialSet);
Set<AbstractHtml> children;
while ((children = childrenStack.poll()) != null) {
for (final AbstractHtml child : children) {
if (TagUtil.isTagged(child)) {
final DataWffId wffIdAttr = child.getDataWffId();
if (wffIdAttr != null) {
tagByWffId.put(wffIdAttr.getValue(), child);
}
}
final Set<AbstractHtml> subChildren = child.getChildren(accessObject);
if (subChildren != null && subChildren.size() > 0) {
childrenStack.push(subChildren);
}
}
}
}
@Override
public void childMoved(@SuppressWarnings("exports") final ChildMovedEvent event) {
// @formatter:off
// moved children tags from some parents to another task format (in this method
// moving only one child) :-
// { "name": task_byte, "values" : [MOVED_CHILDREN_TAGS_byte_from_Task_enum]}, {
// "name": new_parent_data-wff-id, "values" : [ new_parent_tag_name,
// child_data-wff-id, child_tag_name ]}
// { "name": 2, "values" : [[3]]}, { "name":"C55", "values" : ["div", "S255",
// "span"]}
// @formatter:on
final AbstractHtml currentParentTag = event.currentParentTag();
final AbstractHtml movedChildTag = event.movedChildTag();
final NameValue task = Task.MOVED_CHILDREN_TAGS.getTaskNameValue();
final DataWffId currentParentDataWffIdAttr = currentParentTag.getDataWffId();
if (currentParentDataWffIdAttr != null) {
final NameValue nameValue = new NameValue();
final byte[][] currentParentTagNameAndWffId = DataWffIdUtil.getIndexedTagNameAndWffId(accessObject,
currentParentTag);
final byte[] parentWffIdBytes = currentParentTagNameAndWffId[1];
nameValue.setName(parentWffIdBytes);
final byte[] currentTagName = currentParentTagNameAndWffId[0];
final byte[][] movedChildTagNameAndWffId = DataWffIdUtil.getIndexedTagNameAndWffId(accessObject,
movedChildTag);
final byte[] movedChildWffIdBytes = movedChildTagNameAndWffId[1];
final byte[] movedChildTagName = movedChildTagNameAndWffId[0];
nameValue.setValues(currentTagName, movedChildWffIdBytes, movedChildTagName);
browserPage.push(task, nameValue);
addInWffIdMap(movedChildTag);
} else {
LOGGER.severe("Could not find data-wff-id from previousParentTag");
}
}
@Override
public void childrendAppendedOrMoved(@SuppressWarnings("exports") final Collection<ChildMovedEvent> events) {
// @formatter:off
// moved children tags from some parents to another task format (in this method
// moving only one child) :-
// { "name": task_byte, "values" : [MOVED_CHILDREN_TAGS_byte_from_Task_enum]}, {
// "name": new_parent_data-wff-id, "values" : [ new_parent_tag_name,
// child_data-wff-id, child_tag_name ]}
// { "name": 2, "values" : [[3]]}, { "name":"C55", "values" : ["div", "S255",
// "span"]}
// @formatter:on
try {
final NameValue task = Task.MOVED_CHILDREN_TAGS.getTaskNameValue();
final Deque<NameValue> nameValues = new ArrayDeque<>();
nameValues.add(task);
for (final ChildMovedEvent event : events) {
// if previousParentTag == null it means it's appending a new
// child tag
// this checking is done at client side
final AbstractHtml previousParentTag = event.previousParentTag();
final AbstractHtml currentParentTag = event.currentParentTag();
final AbstractHtml movedChildTag = event.movedChildTag();
final DataWffId currentParentDataWffIdAttr = currentParentTag.getDataWffId();
if (currentParentDataWffIdAttr != null) {
final NameValue nameValue = new NameValue();
final byte[][] currentParentTagNameAndWffId = DataWffIdUtil.getIndexedTagNameAndWffId(accessObject,
currentParentTag);
final byte[] parentWffIdBytes = currentParentTagNameAndWffId[1];
nameValue.setName(parentWffIdBytes);
final byte[] currentTagName = currentParentTagNameAndWffId[0];
final boolean noTag = movedChildTag.getTagName() == null;
final byte[][] movedChildTagNameAndWffId = noTag ? DataWffIdUtil.getTagNameAndWffIdForNoTag()
: DataWffIdUtil.getIndexedTagNameAndWffId(accessObject, movedChildTag);
final byte[] movedChildWffIdBytes = movedChildTagNameAndWffId[1];
final byte[] movedChildTagName = movedChildTagNameAndWffId[0];
if (previousParentTag == null) {
try {
// if the previousParentTag is null it means it's a
// new
// tag
if (WffJsFile.COMPRESSED_WFF_DATA) {
nameValue.setValues(currentTagName, movedChildWffIdBytes, movedChildTagName,
movedChildTag.toCompressedWffBMBytesV2(StandardCharsets.UTF_8, accessObject));
} else {
nameValue.setValues(currentTagName, movedChildWffIdBytes, movedChildTagName,
movedChildTag.toWffBMBytes(StandardCharsets.UTF_8, accessObject));
}
} catch (final InvalidTagException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING,
"Do not append/add an empty NoTag as child tag, eg: new NoTag(null, \"\").\n"
.concat("To make a tag's children as empty then invoke removeAllChildren() method in it."),
e);
}
continue;
}
} else {
nameValue.setValues(currentTagName, movedChildWffIdBytes, movedChildTagName);
}
nameValues.add(nameValue);
addInWffIdMap(movedChildTag);
} else {
LOGGER.severe("Could not find data-wff-id from previousParentTag");
}
}
browserPage.push(nameValues.toArray(new NameValue[nameValues.size()]));
} catch (final NoSuchElementException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, "Do not append/add an empty NoTag as child tag, eg: new NoTag(null, \"\").\n"
.concat("To make a tag's children as empty then invoke removeAllChildren() method in it."), e);
}
} catch (final UnsupportedEncodingException e) {
if (LOGGER.isLoggable(Level.SEVERE)) {
LOGGER.log(Level.SEVERE, e.getMessage(), e);
}
}
}
}
| |
/**
*
*/
package io.pkts.tools;
import io.pkts.Pcap;
import io.pkts.packet.IPPacket;
import io.pkts.packet.sip.SipPacket;
import io.pkts.packet.sip.SipParseException;
import io.pkts.streams.FragmentListener;
import io.pkts.streams.SipStatistics;
import io.pkts.streams.SipStream;
import io.pkts.streams.SipStream.CallState;
import io.pkts.streams.Stream;
import io.pkts.streams.StreamHandler;
import io.pkts.streams.StreamId;
import io.pkts.streams.StreamListener;
import io.pkts.streams.impl.DefaultStreamHandler;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
/**
* Simple class that takes one or more pcaps and separates out all SIP dialogs
* from each other and writes them their own pcap files.
*
* This class also serves as an example of how to use the yajpcap library.
*
* @author jonas@jonasborjesson.com
*/
public final class SipSplitter implements StreamListener<SipPacket>, FragmentListener {
private final Map<StreamId, SipStream> streams = new HashMap<StreamId, SipStream>(20000);
public int count;
public int endCount;
public int fragmented;
public int rejected;
public int completed;
public int failed;
public int trying;
public int inCall;
public int ringing;
public int cancelled;
public int calls;
public long maxPDD;
public long totalPDD;
public long pddCount;
public long maxCallDuration;
public long totalCallDuration;
public long callDurationCount;
/**
*
*/
public SipSplitter() {
// TODO Auto-generated constructor stub
}
public void saveAll(final Pcap pcap, final String directory) throws Exception {
/*
* final String dir = (directory == null) || directory.isEmpty() ? "." :
* directory; for (final SipStream stream : this.streams) { final
* StreamId id = stream.getStreamIdentifier(); final PcapOutputStream
* out = pcap.createOutputStream(new FileOutputStream(dir + "/" + id +
* ".pcap")); try { stream.write(out); } catch (final IOException e) {
* e.printStackTrace(); } finally { out.flush(); out.close(); } }
*/
}
public static void main(final String[] args) throws Exception {
BasicConfigurator.configure();
Logger.getRootLogger().setLevel(Level.WARN);
final SipSplitter splitter = new SipSplitter();
final String filename = "/home/jonas/development/private/aboutsip/modules/yajpcap/src/test/resources/com/aboutsip/yajpcap/sipp.pcap";
// final String filename = "/home/jonas/development/private/aboutsip/big_pcaps/openser-udp-5060_01871_20121112132549.pcap";
final long start = System.currentTimeMillis();
final InputStream is = new FileInputStream(filename);
final Pcap pcap = Pcap.openStream(is);
final StreamHandler streamHandler = new DefaultStreamHandler();
streamHandler.setFragmentListener(splitter);
streamHandler.addStreamListener(splitter);
pcap.loop(streamHandler);
pcap.close();
final long stop = System.currentTimeMillis();
System.out.println("Processing time(s): " + (stop - start) / 1000.0);
// System.out.println("Fragmented pkts: " + ((DefaultStreamHandler) streamHandler).getNoFragmentedPackets());
final SipStatistics stats = streamHandler.getSipStatistics();
System.out.println(stats.dumpInfo());
final Map<StreamId, ? extends Stream> unfinishedStreams = streamHandler.getStreams();
for (final Map.Entry<StreamId, ? extends Stream> entry : unfinishedStreams.entrySet()) {
final SipStream stream = (SipStream) entry.getValue();
splitter.count(stream);
}
System.out.println("Start: " + splitter.count);
System.out.println("End : " + splitter.endCount);
System.out.println("Calls : " + splitter.calls);
System.out.println("Fragmented : " + splitter.fragmented);
System.out.println("Max PDD : " + splitter.maxPDD);
System.out.println("Avg PDD : " + splitter.totalPDD / (double) splitter.pddCount);
System.out.println("Max Call Duration : " + splitter.maxCallDuration);
System.out.println("Avg Call Duration : " + splitter.totalCallDuration / (double) splitter.callDurationCount);
System.out.println("Trying : " + splitter.trying);
System.out.println("Ringing : " + splitter.trying);
System.out.println("In Call : " + splitter.inCall);
System.out.println("Rejected : " + splitter.rejected);
System.out.println("Completed : " + splitter.completed);
System.out.println("Failed : " + splitter.failed);
System.out.println("Cancelled : " + splitter.cancelled);
final int[] responses = stats.totalResponses();
int count = 0;
for (int i = 200; i < responses.length; ++i) {
count += responses[i];
}
System.out.println(" total bad responses" + count);
// splitter.saveAll(pcap, null);
}
@Override
public void startStream(final Stream<SipPacket> stream, final SipPacket message) {
try {
if (message.isInfo() || message.isMessage() || message.isOptions()) {
System.out.println("Strange...");
System.out.println(message);
}
} catch (final SipParseException e) {
e.printStackTrace();
}
++this.count;
}
@Override
public void packetReceived(final Stream<SipPacket> stream, final SipPacket packet) {
// TODO Auto-generated method stub
}
private void checkPDD(final SipStream stream) throws SipParseException {
final long pdd = stream.getPostDialDelay() / 1000;
if (pdd > 40000) {
System.out.println("PDD crazy high: " + stream.getStreamIdentifier());
}
if (pdd != -1) {
this.maxPDD = Math.max(this.maxPDD, pdd);
this.totalPDD += pdd;
++this.pddCount;
}
}
private void checkDuration(final SipStream stream) throws SipParseException {
final long duration = stream.getDuration() / 1000;
if (duration != -1) {
this.maxCallDuration = Math.max(this.maxCallDuration, duration);
this.totalCallDuration += duration;
++this.callDurationCount;
}
}
public void count(final SipStream stream) throws SipParseException {
if (this.streams.containsKey(stream.getStreamIdentifier())) {
return;
}
final SipStream.CallState state = stream.getCallState();
// System.out.println(state);
if (state == CallState.REJECTED) {
++this.rejected;
} else if (state == CallState.COMPLETED) {
++this.completed;
} else if (state == CallState.FAILED) {
++this.failed;
} else if (state == CallState.RINGING) {
++this.ringing;
} else if (state == CallState.TRYING) {
++this.trying;
} else if (state == CallState.IN_CALL) {
++this.inCall;
} else if (state == CallState.CANCELLED) {
++this.cancelled;
}
checkPDD(stream);
checkDuration(stream);
for (final SipPacket msg : stream.getPackets()) {
try {
if (msg.isInvite()) {
++this.calls;
break;
}
} catch (final SipParseException e) {
e.printStackTrace();
}
}
}
@Override
public void endStream(final Stream<SipPacket> stream) {
++this.endCount;
try {
count((SipStream) stream);
this.streams.put(stream.getStreamIdentifier(), (SipStream) stream);
} catch (final SipParseException e) {
e.toString();
}
}
@Override
public IPPacket handleFragment(final IPPacket ipPacket) {
++this.fragmented;
return null;
}
}
| |
//========================================================================
//Copyright 2007-2010 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package com.dyuproject.protostuff.compiler;
import com.dyuproject.protostuff.parser.Annotation;
import com.dyuproject.protostuff.parser.Message;
import com.dyuproject.protostuff.parser.Proto;
import org.antlr.stringtemplate.NoIndentWriter;
import org.antlr.stringtemplate.StringTemplate;
import org.antlr.stringtemplate.StringTemplateGroup;
import java.io.*;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Kind of preprocessor for proto files.
* Able to extend one messages with fields from other ones.
*
* @author Ivan Prisyazhniy, Igor Scherbak
* @created Mar 9, 2012
*/
public class ProtoToProtoCompiler extends STCodeGenerator
{
public static final String LINE_SEPARATOR = System.getProperty("line.separator");
public ProtoToProtoCompiler()
{
super("proto_extender");
}
public void compile(ProtoModule module, Proto proto) throws IOException
{
StringTemplateGroup group = getSTG("proto_to_proto");
// Prepare writer
String src = module.getSource().getAbsolutePath();
String path = proto.getFile().getAbsolutePath().replace(src, "").replace(proto.getFile().getName(), "");
Writer writer = CompilerUtil.newWriter(module, path, proto.getFile().getName());
// Read proto file in a buffer
StringBuilder builder = new StringBuilder();
BufferedReader reader = new BufferedReader(new FileReader(proto.getFile()));
String line = reader.readLine();
while (line != null)
{
builder.append(line);
builder.append(LINE_SEPARATOR);
line = reader.readLine();
}
reader.close();
String data = builder.toString();
for (Message message : proto.getMessages())
{
Annotation annotation = message.getAnnotation("Extend");
if (annotation != null)
{
Object byMessageRef = annotation.getValue("by");
if (byMessageRef == null)
throw new IllegalArgumentException("By parameter of attribute @Extend is not specified");
if (!(byMessageRef instanceof Message))
throw new IllegalArgumentException("By parameter have a non Message reference in your @Extend annotation");
Message base = (Message) byMessageRef;
String result = extendBy(group, message, base);
if (result != null && result.length() > 0)
data = injectAfterAnnotation(message, base, data, result);
}
Object extOpt = message.getExtraOption("extends");
if (extOpt != null)
{
if (!(extOpt instanceof Message))
throw new IllegalArgumentException("Option extends specified not a message reference");
Message base = (Message) extOpt;
String result = extendBy(group, message, base);
if (result != null && result.length() > 0)
data = injectAfterOption(message, base, data, result);
}
}
writer.write(data);
writer.close();
}
public static String extendBy(StringTemplateGroup group, Message extend, Message by) throws IOException
{
StringWriter stringer = new StringWriter(16);
NoIndentWriter out = new NoIndentWriter(stringer);
StringTemplate messageBlock = group.getInstanceOf("extend_by");
messageBlock.setAttribute("message", extend);
messageBlock.setAttribute("by", by);
messageBlock.write(out);
return stringer.toString();
}
public static String injectAfterAnnotation(Message extend, Message by, String extendProto, String byContent)
{
// Insert after annotated message
Pattern messageRegexp = Pattern.compile("[\\n\\r]?([ \\t]*)(message\\s+" + extend.getName() + "\\s+\\{)", Pattern.MULTILINE);
int messageIndex = -1, openBracketIndex = -1;
Matcher matcher = messageRegexp.matcher(extendProto);
if (matcher.find())
{
// Calculate indentation of option
int is = matcher.start(1), ie = matcher.end(1);
String indentation = generateIndentation(extendProto.substring(is, ie), 4 /* spaces */);
// Make a replace
messageIndex = matcher.start(2);
openBracketIndex = matcher.end(2);
extendProto = extendProto.substring(0, openBracketIndex) +
LINE_SEPARATOR + indentation + "// " + generateTimestamp(extend, by) +
LINE_SEPARATOR + insertIndentation(byContent, indentation) +
LINE_SEPARATOR + extendProto.substring(openBracketIndex);
}
// Remove annotation
Pattern annotationRegexp = Pattern.compile("[\\n\\r]?([ \\t]*@Extend\\s*\\([^)]+" + by.getName() + "[^)]*\\))");
String annotationSpace = extendProto.substring(0, messageIndex);
matcher = annotationRegexp.matcher(annotationSpace);
int astart = -1, aend = 0;
while (matcher.find(aend))
{
astart = matcher.start(1);
aend = matcher.end(1);
}
if (astart > -1)
extendProto = extendProto.substring(0, astart) + "// " + extendProto.substring(astart);
return extendProto;
}
public static String injectAfterOption(Message extend, Message by, String extendProto, String byContent)
{
Pattern messageRegexp = Pattern.compile("([\\n\\r]?[ \\t]*message\\s+" + extend.getName() +
"\\s+\\{[^{}]*[\\n\\r][ \\t]*)(option\\s+extends\\s+=\\s+" + by.getName() +
"\\s*;)", Pattern.MULTILINE);
Matcher matcher = messageRegexp.matcher(extendProto);
if (matcher.find())
{
// Calculate indentation of option
Pattern indentRegexp = Pattern.compile("[\\n\\r]([ \\t]+)option\\s+extends\\s+=\\s+" + by.getName() +
"\\s*;", Pattern.MULTILINE);
Matcher indent = indentRegexp.matcher(extendProto.substring(matcher.start(), matcher.end()));
String indentation = "";
if (indent.find())
{
int is = matcher.start() + indent.start(1), ie = matcher.start() + indent.end(1);
indentation = generateIndentation(extendProto.substring(is, ie), 0);
}
// Make a replace
StringBuffer sb = new StringBuffer();
matcher.appendReplacement(sb, "$1" +
"// " + generateTimestamp(extend, by) + LINE_SEPARATOR +
indentation + "// $2" + LINE_SEPARATOR + insertIndentation(byContent, indentation));
matcher.appendTail(sb);
return sb.toString();
}
return extendProto;
}
public static String insertIndentation(String content, String indent)
{
if (!content.startsWith(LINE_SEPARATOR)) content = indent + content;
return content.replace(LINE_SEPARATOR, LINE_SEPARATOR + indent);
}
public static String generateIndentation(String indentation, int length)
{
if (indentation == null)
indentation = "";
StringBuilder builder = new StringBuilder(indentation);
for (int i = 0; i < length; i++)
builder.append(' ');
return builder.toString();
}
public static String generateTimestamp(Message extend, Message by)
{
return "Extended by " + by.getName() + " at " + new Date();
}
}
| |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.user.core.listener;
import org.wso2.carbon.user.api.Permission;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.user.core.UserStoreManager;
import java.util.Map;
/**
* This allows an extension point to implement various additional operations before and after
* actual user operation is done.
*/
public interface UserOperationEventListener {
/**
* Get the execution order identifier for this listener.
*
* @return The execution order identifier integer value.
*/
int getExecutionOrderId();
/**
* Define any additional actions before actual authentication is happen
*
* @param userName User name of User
* @param credential Credential/password of the user
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreAuthenticate(String userName, Object credential,
UserStoreManager userStoreManager) throws UserStoreException;
/**
* Define any additional actions after actual authentication is happen
*
* @param userName User name of User
* @param authenticated where user is authenticated or not
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostAuthenticate(String userName, boolean authenticated,
UserStoreManager userStoreManager) throws UserStoreException;
/**
* Define any additional actions before user is added.
*
* @param userName User name of User
* @param credential Credential/password of the user
* @param roleList role list of user
* @param claims Properties of the user
* @param profile profile name of user
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreAddUser(String userName, Object credential, String[] roleList,
Map<String, String> claims, String profile,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions after user is added.
*
* @param userName User name of User
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostAddUser(String userName, Object credential, String[] roleList,
Map<String, String> claims, String profile,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions before credential is updated by user
*
* @param userName User name of User
* @param newCredential new credential/password of the user
* @param oldCredential Old credential/password of the user
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreUpdateCredential(String userName, Object newCredential,
Object oldCredential,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions after credential is updated by user
*
* @param userName User name of User
* @param credential
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostUpdateCredential(String userName, Object credential, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions before credential is updated by Admin
*
* @param userName User name of User
* @param newCredential new credential/password of the user
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreUpdateCredentialByAdmin(String userName, Object newCredential,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions after credential is updated by Admin
*
* @param userName User name of User
* @param credential
*@param userStoreManager The underlying UserStoreManager @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostUpdateCredentialByAdmin(String userName, Object credential,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions before user is deleted by Admin
*
* @param userName User name of User
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreDeleteUser(String userName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions after user is deleted by Admin
*
* @param userName User name of User
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostDeleteUser(String userName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before user attribute is set by Admin
*
* @param userName User name of User
* @param claimURI claim uri
* @param claimValue claim value
* @param profileName user profile name
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreSetUserClaimValue(String userName, String claimURI, String claimValue,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions after user attribute is set by Admin
*
* @param userName User name of User
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostSetUserClaimValue(String userName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before user attributes are set by Admin
*
* @param userName User name of User
* @param claims claim uri and claim value map
* @param profileName user profile name
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreSetUserClaimValues(String userName, Map<String, String> claims,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions after user attributes are set by Admin
*
* @param userName User name of User
* @param claims
*@param profileName
* @param userStoreManager The underlying UserStoreManager @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostSetUserClaimValues(String userName, Map<String, String> claims,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before user attributes are deleted by Admin
*
* @param userName User name of User
* @param claims claim uri and claim value map
* @param profileName user profile name
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreDeleteUserClaimValues(String userName, String[] claims, String profileName,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions after user attributes are deleted by Admin
*
* @param userName User name of User
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPostDeleteUserClaimValues(String userName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before user attribute is deleted by Admin
*
* @param userName User name of User
* @param claimURI claim uri
* @param profileName user profile name
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManager
*/
public boolean doPreDeleteUserClaimValue(String userName, String claimURI, String profileName,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions after user attribute is deleted by Admin
*
* @param userName User name of User
* @param userStoreManager The underlying UserStoreManager
* @return Whether execution of this method of the underlying UserStoreManager must happen.
* @throws UserStoreException Thrown by the underlying UserStoreManagern
*/
public boolean doPostDeleteUserClaimValue(String userName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before adding a role.
*
* @param roleName
* @param userList
* @param permissions
* @param userStoreManager
* @return
* @throws UserStoreException
*/
public boolean doPreAddRole(String roleName, String[] userList, Permission[] permissions,
UserStoreManager userStoreManager) throws UserStoreException;
/**
* Defines any additional actions after adding a role.
*
* @param roleName
* @param userList
* @param permissions
* @param userStoreManager
* @return
* @throws UserStoreException
*/
public boolean doPostAddRole(String roleName, String[] userList, Permission[] permissions,
UserStoreManager userStoreManager) throws UserStoreException;
/**
* Defines any additional actions before deleting a role.
*
* @param roleName
* @param userStoreManager
* @return
* @throws UserStoreException
*/
public boolean doPreDeleteRole(String roleName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before deleting a role.
*
* @param roleName
* @param userStoreManager
* @return
* @throws UserStoreException
*/
public boolean doPostDeleteRole(String roleName, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before updating a role name.
*
* @param roleName
* @param newRoleName
* @return
* @throws UserStoreException
*/
public boolean doPreUpdateRoleName(String roleName, String newRoleName,
UserStoreManager userStoreManager) throws UserStoreException;
/**
* Defines any additional actions after updating a role name.
*
* @param roleName
* @param newRoleName
* @return
* @throws UserStoreException
*/
public boolean doPostUpdateRoleName(String roleName, String newRoleName,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions before updating a role.
*
* @param roleName
* @param deletedUsers
* @param newUsers
* @return
* @throws UserStoreException
*/
public boolean doPreUpdateUserListOfRole(String roleName, String deletedUsers[],
String[] newUsers, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Defines any additional actions after updating a role.
*
* @param roleName
* @param deletedUsers
* @param newUsers
* @return
* @throws UserStoreException
*/
public boolean doPostUpdateUserListOfRole(String roleName, String deletedUsers[],
String[] newUsers, UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions before updating role list of user.
* @param userName
* @param deletedRoles
* @param newRoles
* @param userStoreManager
* @return
* @throws UserStoreException
*/
public boolean doPreUpdateRoleListOfUser(String userName, String[] deletedRoles,
String[] newRoles,
UserStoreManager userStoreManager)
throws UserStoreException;
/**
* Define any additional actions after updating role list of user.
* @param userName
* @param deletedRoles
* @param newRoles
* @param userStoreManager
* @return
* @throws UserStoreException
*/
public boolean doPostUpdateRoleListOfUser(String userName, String[] deletedRoles,
String[] newRoles,
UserStoreManager userStoreManager)
throws UserStoreException;
}
| |
/**
* Copyright (C) 2013 by Raphael Michel under the MIT license:
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software
* is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package de.geeksfactory.opacclient.objects;
import java.util.List;
import java8.util.concurrent.CompletableFuture;
import de.geeksfactory.opacclient.searchfields.SearchQuery;
/**
* Object representing a search result
*
* @author Raphael Michel
*/
public class SearchResult implements CoverHolder {
private MediaType type;
private int nr;
private String id;
private String innerhtml;
private Status status;
private byte[] coverBitmap;
private String cover;
private CompletableFuture<Void> coverFuture = null;
private int page;
private List<SearchQuery> childQuery;
private String libraryIdent;
/**
* Create a new SearchResult object
*
* @param type media type (like "BOOK")
* @param nr Position in result list
* @param innerhtml HTML to display
*/
public SearchResult(MediaType type, int nr, String innerhtml) {
this.type = type;
this.nr = nr;
this.innerhtml = innerhtml;
}
/**
* Create an empty object
*/
public SearchResult() {
this.type = MediaType.NONE;
this.nr = 0;
this.innerhtml = "";
}
/**
* Get the unique identifier of this object
*
* @return ID or <code>null</code> if unknown
*/
public String getId() {
return id;
}
/**
* Set the unique identifier of this object
*
* @param id unique identifier
*/
public void setId(String id) {
this.id = id;
}
/**
* Get this item's media type.
*
* @return Media type or <code>null</code> if unknown
*/
public MediaType getType() {
return type;
}
/**
* Set this item's media type.
*
* @param type Media type
*/
public void setType(MediaType type) {
this.type = type;
}
/**
* Get this item's position in result list
*
* @return position
*/
public int getNr() {
return nr;
}
/**
* Set this item's position in result list
*
* @param nr position
*/
public void setNr(int nr) {
this.nr = nr;
}
/**
* Get HTML describing the item to the user in a result list.
*
* @return position
*/
public String getInnerhtml() {
return innerhtml;
}
/**
* Set HTML describing the item to the user in a result list. Only "simple" HTML like
* {@code <b>}, {@code <i>}, etc. can be used.
*
* @param innerhtml simple HTML code
*/
public void setInnerhtml(String innerhtml) {
this.innerhtml = innerhtml;
}
/**
* Get item status (if known)
*
* @return Status or <code>null</code> if not set.
* @since 2.0.7
*/
public Status getStatus() {
return status;
}
/**
* Set item status (if known)
*
* @since 2.0.7
*/
public void setStatus(Status status) {
this.status = status;
}
/**
* Get the page this result was found on
*/
public int getPage() {
return page;
}
/**
* Set the page this result was found on
*/
public void setPage(int page) {
this.page = page;
}
/**
* Get cover image bitmap
*/
@Override
public byte[] getCoverBitmap() {
return coverBitmap;
}
/**
* Set cover image bitmap
*/
@Override
public void setCoverBitmap(byte[] coverBitmap) {
this.coverBitmap = coverBitmap;
}
/**
* Get cover image URL
*/
@Override
public String getCover() {
return cover;
}
/**
* Set cover image URL
*/
@Override
public void setCover(String cover) {
this.cover = cover;
}
/**
* Get the child query (see setChildQuery for details)
*/
public List<SearchQuery> getChildQuery() {
return childQuery;
}
/**
* Set the child query. If this is set, clicking the item in the UI will not
* open a detail page, but start another search.
*/
public void setChildQuery(
List<SearchQuery> childQuery) {
this.childQuery = childQuery;
}
/**
* Sets the libraryIdent of the library this search result belongs to.
*/
public void setLibraryIdent(String libraryIdent) {
this.libraryIdent = libraryIdent;
}
/**
* Gets the libraryIdent of the library this search result belongs to if set.
* Returns null if libraryIdent is not set
*/
public String getLibraryIdent() {
return libraryIdent;
}
public CompletableFuture<Void> getCoverFuture() {
return coverFuture;
}
public void setCoverFuture(CompletableFuture<Void> coverFuture) {
this.coverFuture = coverFuture;
}
@Override
public String toString() {
return "SearchResult [id= " + id + ", type=" + type + ", nr=" + nr
+ ", innerhtml=" + innerhtml + "]";
}
/**
* Supported media types.
*
* @since 2.0.3
*/
public enum MediaType {
NONE, BOOK, CD, CD_SOFTWARE, CD_MUSIC, DVD, MOVIE, AUDIOBOOK, PACKAGE,
GAME_CONSOLE, EBOOK, SCORE_MUSIC, PACKAGE_BOOKS, UNKNOWN, NEWSPAPER,
BOARDGAME, SCHOOL_VERSION, MAP, BLURAY, AUDIO_CASSETTE, ART, MAGAZINE,
GAME_CONSOLE_WII, GAME_CONSOLE_NINTENDO, GAME_CONSOLE_PLAYSTATION,
GAME_CONSOLE_XBOX, LP_RECORD, MP3, URL, EVIDEO, EDOC, EAUDIO, DEVICE,
MICROFORM, FIGURINE
}
/**
* Media status, simplified like a traffic light, e.g. red for "lent out, no reservation
* possible", yellow for "reservation needed" or green for "available".
*
* @since 2.0.7
*/
public enum Status {
UNKNOWN, RED, YELLOW, GREEN
}
}
| |
/* Copyright (C) 2013-2014 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ezbake.locksmith.service;
import static org.junit.Assert.*;
import java.net.UnknownHostException;
import java.security.NoSuchAlgorithmException;
import java.util.Collections;
import java.util.Random;
import com.mongodb.MongoClient;
import ezbake.configuration.ClasspathConfigurationLoader;
import ezbake.configuration.EzConfiguration;
import ezbake.locksmith.service.EzLocksmithHandler;
import ezbake.security.test.MockEzSecurityToken;
import ezbake.crypto.AESCrypto;
import ezbakehelpers.ezconfigurationhelpers.application.EzBakeApplicationConfigurationHelper;
import org.apache.commons.codec.binary.Base64;
import org.apache.thrift.TException;
import org.junit.*;
import ezbake.thrift.ThriftClientPool;
import ezbake.thrift.ThriftServerPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ezbake.base.thrift.EzSecurityToken;
import ezbake.locksmith.db.MongoDBService;
import ezbake.security.lock.smith.thrift.EzLocksmith;
import ezbake.security.lock.smith.thrift.KeyExistsException;
import ezbake.security.lock.smith.thrift.KeyNotFoundException;
import ezbake.security.lock.smith.thrift.KeyType;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
public class EzLocksmithHandlerTest extends BaseTest {
private static Logger log = LoggerFactory.getLogger(EzLocksmithHandlerTest.class);
private static ThriftServerPool serverPool;
private static ThriftClientPool clientPool;
private static final String SERVICE_NAME = "SERVICE_NAME";
private static MongoDBService mongo;
private static String rsaTable;
private static String aesTable;
@BeforeClass
public static void init() throws Exception {
BaseTest.init();
Random portChooser = new Random();
int port = portChooser.nextInt((23999 - 22999) + 1) + 22999;
serverPool = new ThriftServerPool(ezConfiguration, port);
serverPool.startCommonService(new EzLocksmithHandler(), SERVICE_NAME, "gibberish1234");
clientPool = new ThriftClientPool(ezConfiguration);
mongo = new MongoDBService(ezConfiguration);
EzBakeApplicationConfigurationHelper appConfig = new EzBakeApplicationConfigurationHelper(ezConfiguration);
rsaTable = String.format("lock_smith_%s_%s_%s", appConfig.getApplicationName(), appConfig.getServiceName(), "rsa_keys");
aesTable = String.format("lock_smith_%s_%s_%s", appConfig.getApplicationName(), appConfig.getServiceName(), "aes_keys");
}
@Before
public void clean() throws UnknownHostException {
MongoClient mongoClient = new MongoClient("localhost", port);
mongoClient.dropDatabase("db");
}
@Test(expected=KeyExistsException.class)
public void testAESGenerateKeyDuplicatedId() throws TException, KeyExistsException {
log.info("Test Generate AES");
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.generateKey(ezToken, keyId, KeyType.AES, Collections.<String>emptyList());
client.generateKey(ezToken, keyId, KeyType.AES, Collections.<String>emptyList());
}
finally {
clientPool.returnToPool(client);
}
}
@Test
(expected=KeyExistsException.class)
public void testRSAGenerateKeyDuplicatedId() throws TException, KeyExistsException {
log.info("Test Generate RSA");
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.generateKey(ezToken, keyId, KeyType.RSA, Collections.<String>emptyList());
client.generateKey(ezToken, keyId, KeyType.RSA, Collections.<String>emptyList());
}
finally {
clientPool.returnToPool(client);
}
}
@Test(expected=KeyExistsException.class)
public void testAESUploadKeyDuplicatedId() throws TException, KeyExistsException {
log.info("Test Upload");
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.uploadKey(ezToken, keyId, new String(), KeyType.AES);
client.uploadKey(ezToken, keyId, new String(), KeyType.AES);
}
finally {
clientPool.returnToPool(client);
}
}
@Test
(expected=KeyExistsException.class)
public void testRSAUploadKeyDuplicatedId() throws TException, KeyExistsException {
log.info("Test RSA Upload Key Duplicated Id");
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.uploadKey(ezToken, keyId, new String(), KeyType.RSA);
client.uploadKey(ezToken, keyId, new String(), KeyType.RSA);
}
finally {
clientPool.returnToPool(client);
}
}
@Test
public void testRetrieveGeneratesIfNotExists() throws TException {
String keyId = "RETRIEVE_KEY";
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
String keyData = client.retrieveKey(ezToken, keyId, KeyType.AES);
// Make sure it's a valid AES key
SecretKey keySpec = new SecretKeySpec(Base64.decodeBase64(keyData), "AES");
String data = "encrypt this";
AESCrypto crypto = new AESCrypto();
byte[] enc = crypto.encrypt(keySpec, data.getBytes());
assertEquals(data, new String(crypto.decrypt(keySpec, enc)));
} finally {
clientPool.returnToPool(client);
}
}
@Test
public void testAESGetGeneratedData() throws TException, KeyExistsException, KeyNotFoundException, NoSuchAlgorithmException {
log.info("Test AES Get Generated Data");
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.generateKey(ezToken, keyId, KeyType.AES, Collections.<String>emptyList());
String keyData = client.retrieveKey(ezToken, keyId, KeyType.AES);
assertTrue(keyData != null);
// Make sure it's a valid AES key
SecretKey keySpec = new SecretKeySpec(Base64.decodeBase64(keyData), "AES");
String data = "encrypt this";
AESCrypto crypto = new AESCrypto();
byte[] enc = crypto.encrypt(keySpec, data.getBytes());
assertEquals(data, new String(crypto.decrypt(keySpec, enc)));
}
finally {
clientPool.returnToPool(client);
}
}
@Test
public void testRSAGetGeneratedData() throws TException, KeyExistsException, KeyNotFoundException {
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.generateKey(ezToken, keyId, KeyType.RSA, Collections.<String>emptyList());
String keyData = client.retrieveKey(ezToken, keyId, KeyType.RSA);
assertTrue(keyData != null);
}
finally {
clientPool.returnToPool(client);
}
}
@Test
public void testAESRemoveGeneratedData() throws KeyExistsException, TException, KeyNotFoundException {
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.generateKey(ezToken, keyId, KeyType.AES, Collections.<String>emptyList());
client.removeKey(ezToken, keyId, KeyType.AES);
assertTrue(mongo.collectionCount(aesTable) == 0);
}
finally {
clientPool.returnToPool(client);
}
}
@Test
public void testRSARemoveGeneratedData() throws TException, KeyExistsException, KeyNotFoundException {
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
try {
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
client.generateKey(ezToken, keyId, KeyType.RSA, Collections.<String>emptyList());
client.removeKey(ezToken, keyId, KeyType.RSA);
assertTrue(mongo.collectionCount(rsaTable) == 0);
}
finally {
clientPool.returnToPool(client);
}
}
@Test
public void testRSAGenerateData() throws TException {
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
String pk = client.retrievePublicKey(ezToken,keyId, ezToken.getValidity().getIssuedTo());
assertTrue(pk != null);
}
@Test
public void testRSANonOwnerGeneratesPrivate() throws TException {
EzSecurityToken ezToken = getTestEzSecurityToken();
EzLocksmith.Client client = null;
String keyId = "keyId";
// Somebody accesses the public key first
client = clientPool.getClient(SERVICE_NAME, EzLocksmith.Client.class);
String pk = client.retrievePublicKey(MockEzSecurityToken.getMockUserToken("Not Owner"), keyId, null);
assertTrue(pk != null);
// Now attempt to retrieve the
String priv = client.retrieveKey(MockEzSecurityToken.getMockUserToken("Key Owner"), keyId, KeyType.RSA);
assertTrue(priv != null);
}
public EzSecurityToken getTestEzSecurityToken() {
return MockEzSecurityToken.getMockUserToken("Key Owner");
}
}
| |
package twilightforest.entity;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.entity.Entity;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.EntityAIAttackOnCollide;
import net.minecraft.entity.ai.EntityAITasks;
import net.minecraft.entity.ai.EntityAIWander;
import net.minecraft.entity.ai.EntityAIWatchClosest;
import net.minecraft.entity.ai.attributes.IAttributeInstance;
import net.minecraft.entity.monster.EntityMob;
import net.minecraft.entity.monster.IMob;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.pathfinding.PathNavigate;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.util.MathHelper;
import net.minecraft.world.World;
import twilightforest.block.TFBlocks;
public class EntityTFTowerGolem extends EntityMob
{
private int attackTimer;
public EntityTFTowerGolem(World par1World)
{
super(par1World);
func_70105_a(1.4F, 2.9F);
func_70661_as().func_75491_a(true);
field_70714_bg.func_75776_a(1, new EntityAIAttackOnCollide(this, EntityPlayer.class, 1.0D, false));
field_70714_bg.func_75776_a(2, new EntityAIWander(this, 1.0D));
field_70714_bg.func_75776_a(3, new EntityAIWatchClosest(this, EntityPlayer.class, 6.0F));
field_70714_bg.func_75776_a(3, new net.minecraft.entity.ai.EntityAILookIdle(this));
field_70715_bh.func_75776_a(1, new net.minecraft.entity.ai.EntityAIHurtByTarget(this, false));
field_70715_bh.func_75776_a(2, new net.minecraft.entity.ai.EntityAINearestAttackableTarget(this, EntityPlayer.class, 0, true));
}
protected boolean func_70650_aV()
{
return true;
}
protected void func_110147_ax()
{
super.func_110147_ax();
func_110148_a(SharedMonsterAttributes.field_111267_a).func_111128_a(40.0D);
func_110148_a(SharedMonsterAttributes.field_111263_d).func_111128_a(0.25D);
func_110148_a(SharedMonsterAttributes.field_111264_e).func_111128_a(9.0D);
}
public int func_70658_aO()
{
int var1 = super.func_70658_aO() + 2;
if (var1 > 20)
{
var1 = 20;
}
return var1;
}
public boolean func_70652_k(Entity par1Entity)
{
attackTimer = 10;
field_70170_p.func_72960_a(this, (byte)4);
boolean attackSuccess = super.func_70652_k(par1Entity);
if (attackSuccess)
{
field_70181_x += 0.4000000059604645D;
}
func_85030_a("mob.irongolem.throw", 1.0F, 1.0F);
return attackSuccess;
}
protected String func_70639_aQ()
{
return "none";
}
protected String func_70621_aR()
{
return "mob.irongolem.hit";
}
protected String func_70673_aS()
{
return "mob.irongolem.death";
}
protected void func_145780_a(int par1, int par2, int par3, Block par4)
{
func_85030_a("mob.irongolem.walk", 1.0F, 1.0F);
}
protected void func_82167_n(Entity par1Entity)
{
if (((par1Entity instanceof IMob)) && (func_70681_au().nextInt(10) == 0))
{
func_70624_b((net.minecraft.entity.EntityLivingBase)par1Entity);
}
super.func_82167_n(par1Entity);
}
public void func_70636_d()
{
super.func_70636_d();
if (attackTimer > 0)
{
attackTimer -= 1;
}
if ((field_70159_w * field_70159_w + field_70179_y * field_70179_y > 2.500000277905201E-7D) && (field_70146_Z.nextInt(5) == 0))
{
int var1 = MathHelper.func_76128_c(field_70165_t);
int var2 = MathHelper.func_76128_c(field_70163_u - 0.20000000298023224D - field_70129_M);
int var3 = MathHelper.func_76128_c(field_70161_v);
Block block = field_70170_p.func_147439_a(var1, var2, var3);
if (block.func_149688_o() != Material.field_151579_a)
{
field_70170_p.func_72869_a("blockcrack_" + Block.func_149682_b(block) + "_" + field_70170_p.func_72805_g(var1, var2, var3), field_70165_t + (field_70146_Z.nextFloat() - 0.5D) * field_70130_N, field_70121_D.field_72338_b + 0.1D, field_70161_v + (field_70146_Z.nextFloat() - 0.5D) * field_70130_N, 4.0D * (field_70146_Z.nextFloat() - 0.5D), 0.5D, (field_70146_Z.nextFloat() - 0.5D) * 4.0D);
}
}
if (field_70146_Z.nextBoolean())
{
field_70170_p.func_72869_a("reddust", field_70165_t + (field_70146_Z.nextDouble() - 0.5D) * field_70130_N, field_70163_u + field_70146_Z.nextDouble() * field_70131_O - 0.25D, field_70161_v + (field_70146_Z.nextDouble() - 0.5D) * field_70130_N, 0.0D, 0.0D, 0.0D);
}
}
@SideOnly(Side.CLIENT)
public void func_70103_a(byte par1)
{
if (par1 == 4)
{
attackTimer = 10;
func_85030_a("mob.irongolem.throw", 1.0F, 1.0F);
}
else
{
super.func_70103_a(par1);
}
}
@SideOnly(Side.CLIENT)
public int getAttackTimer()
{
return attackTimer;
}
protected void func_70628_a(boolean par1, int par2)
{
int var4 = field_70146_Z.nextInt(3);
for (int i = 0; i < var4; i++)
{
func_145779_a(Items.field_151042_j, 1);
}
var4 = field_70146_Z.nextInt(3);
for (int i = 0; i < var4; i++)
{
func_145779_a(Item.func_150898_a(TFBlocks.towerWood), 1);
}
}
public int func_70641_bl()
{
return 16;
}
}
| |
package org.ovirt.engine.ui.uicommonweb.models.storage;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.ovirt.engine.core.common.businessentities.StorageDomainSharedStatus;
import org.ovirt.engine.core.common.businessentities.StorageDomainType;
import org.ovirt.engine.core.common.businessentities.StorageFormatType;
import org.ovirt.engine.core.common.businessentities.StorageType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSStatus;
import org.ovirt.engine.core.common.businessentities.VDSType;
import org.ovirt.engine.core.common.businessentities.VdsSpmStatus;
import org.ovirt.engine.core.common.businessentities.storage_domains;
import org.ovirt.engine.core.common.businessentities.storage_pool;
import org.ovirt.engine.core.common.interfaces.SearchType;
import org.ovirt.engine.core.common.queries.SearchParameters;
import org.ovirt.engine.core.common.queries.VdcQueryReturnValue;
import org.ovirt.engine.core.common.queries.VdcQueryType;
import org.ovirt.engine.core.compat.Event;
import org.ovirt.engine.core.compat.EventArgs;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.StringHelper;
import org.ovirt.engine.core.compat.Version;
import org.ovirt.engine.ui.frontend.AsyncQuery;
import org.ovirt.engine.ui.frontend.Frontend;
import org.ovirt.engine.ui.frontend.INewAsyncCallback;
import org.ovirt.engine.ui.uicommonweb.Linq;
import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.ISupportSystemTreeContext;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemModel;
import org.ovirt.engine.ui.uicommonweb.models.SystemTreeItemType;
import org.ovirt.engine.ui.uicommonweb.validation.NotEmptyValidation;
public class StorageModel extends ListModel implements ISupportSystemTreeContext
{
public static final Guid UnassignedDataCenterId = Guid.Empty;
private StorageModelBehavior behavior;
private String localFSPath;
@Override
public IStorageModel getSelectedItem()
{
return (IStorageModel) super.getSelectedItem();
}
public void setSelectedItem(IStorageModel value)
{
super.setSelectedItem(value);
}
/**
* Gets or sets the storage being edited. Null if it's a new one.
*/
private storage_domains privateStorage;
public storage_domains getStorage()
{
return privateStorage;
}
public void setStorage(storage_domains value)
{
privateStorage = value;
}
public ArrayList<IStorageModel> UpdatedStorageModels = new ArrayList<IStorageModel>();
private String privateOriginalName;
public String getOriginalName()
{
return privateOriginalName;
}
public void setOriginalName(String value)
{
privateOriginalName = value;
}
private EntityModel privateName;
public EntityModel getName()
{
return privateName;
}
private void setName(EntityModel value)
{
privateName = value;
}
private ListModel privateDataCenter;
public ListModel getDataCenter()
{
return privateDataCenter;
}
private void setDataCenter(ListModel value)
{
privateDataCenter = value;
}
private ListModel privateHost;
public ListModel getHost()
{
return privateHost;
}
public void setHost(ListModel value)
{
privateHost = value;
}
private ListModel privateFormat;
public ListModel getFormat()
{
return privateFormat;
}
private void setFormat(ListModel value)
{
privateFormat = value;
}
private ListModel privateAvailableStorageItems;
public ListModel getAvailableStorageItems()
{
return privateAvailableStorageItems;
}
private void setAvailableStorageItems(ListModel value)
{
privateAvailableStorageItems = value;
}
private String privateHash;
public String getHash()
{
return privateHash;
}
public void setHash(String value)
{
privateHash = value;
}
public StorageModel(StorageModelBehavior behavior)
{
this.behavior = behavior;
this.behavior.setModel(this);
Frontend.getQueryStartedEvent().addListener(this);
Frontend.getQueryCompleteEvent().addListener(this);
Frontend.Subscribe(new VdcQueryType[] { VdcQueryType.Search, VdcQueryType.GetConfigurationValue,
VdcQueryType.GetStoragePoolsByStorageDomainId, VdcQueryType.GetStorageDomainsByStoragePoolId,
VdcQueryType.GetLunsByVgId, VdcQueryType.GetAllVdsByStoragePool,
VdcQueryType.DiscoverSendTargets, VdcQueryType.GetDeviceList, VdcQueryType.GetExistingStorageDomainList });
setName(new EntityModel());
setDataCenter(new ListModel());
getDataCenter().getSelectedItemChangedEvent().addListener(this);
setHost(new ListModel());
getHost().getSelectedItemChangedEvent().addListener(this);
setFormat(new ListModel());
setAvailableStorageItems(new ListModel());
getAvailableStorageItems().getSelectedItemChangedEvent().addListener(this);
AsyncDataProvider.GetLocalFSPath(new AsyncQuery(this,
new INewAsyncCallback() {
@Override
public void OnSuccess(Object target, Object returnValue) {
StorageModel storageModel = (StorageModel) target;
storageModel.localFSPath = (String) returnValue;
}
},
getHash()));
}
@Override
public void Initialize()
{
super.Initialize();
setHash(getHashName() + new Date());
behavior.setHash(getHash());
InitDataCenter();
}
@Override
public void eventRaised(Event ev, Object sender, EventArgs args)
{
super.eventRaised(ev, sender, args);
if (ev.equals(SelectedItemChangedEventDefinition))
{
if (sender == getDataCenter())
{
DataCenter_SelectedItemChanged();
}
else if (sender == getHost())
{
Host_SelectedItemChanged();
}
else if (sender == getAvailableStorageItems())
{
if (getAvailableStorageItems().getSelectedItem() instanceof IStorageModel)
{
setSelectedItem(null);
setSelectedItem((IStorageModel) getAvailableStorageItems().getSelectedItem());
}
}
}
else if (ev.equals(NfsStorageModel.PathChangedEventDefinition))
{
NfsStorageModel_PathChanged(sender, args);
}
else if (ev.equals(Frontend.QueryStartedEventDefinition)
&& StringHelper.stringsEqual(Frontend.getCurrentContext(), getHash()))
{
Frontend_QueryStarted();
}
else if (ev.equals(Frontend.QueryCompleteEventDefinition)
&& StringHelper.stringsEqual(Frontend.getCurrentContext(), getHash()))
{
Frontend_QueryComplete();
}
}
private int queryCounter;
public void Frontend_QueryStarted()
{
queryCounter++;
if (getProgress() == null)
{
StartProgress(null);
}
}
public void Frontend_QueryComplete()
{
queryCounter--;
if (queryCounter == 0)
{
StopProgress();
}
}
private void NfsStorageModel_PathChanged(Object sender, EventArgs args)
{
NfsStorageModel senderModel = (NfsStorageModel) sender;
for (Object item : getItems())
{
if (item instanceof NfsStorageModel && item != sender)
{
NfsStorageModel model = (NfsStorageModel) item;
model.getPath().setEntity(senderModel.getPath().getEntity());
}
}
}
@Override
protected void OnSelectedItemChanged()
{
super.OnSelectedItemChanged();
if (getSelectedItem() != null)
{
UpdateFormat();
UpdateHost();
}
}
@Override
protected void ItemsChanged()
{
super.ItemsChanged();
if (getItems() != null)
{
for (Object item : getItems())
{
IStorageModel model = (IStorageModel) item;
model.setContainer(this);
if (item instanceof NfsStorageModel)
{
NfsStorageModel nfsModel = (NfsStorageModel) item;
nfsModel.getPathChangedEvent().addListener(this);
}
}
}
}
private void DataCenter_SelectedItemChanged()
{
UpdateItemsAvailability();
}
private void Host_SelectedItemChanged()
{
VDS host = (VDS) getHost().getSelectedItem();
if (getSelectedItem() != null)
{
// When changing host clear items for san storage model.
if (getSelectedItem() instanceof SanStorageModelBase)
{
SanStorageModelBase sanStorageModel = (SanStorageModelBase) getSelectedItem();
sanStorageModel.setHash(getHash());
if (getStorage() == null) {
sanStorageModel.setItems(null);
}
}
if (host != null)
{
getSelectedItem().getUpdateCommand().Execute();
VDSType vdsType = ((VDS) this.getHost().getSelectedItem()).getvds_type();
String prefix = vdsType.equals(VDSType.oVirtNode) ? localFSPath : ""; //$NON-NLS-1$
if (!StringHelper.isNullOrEmpty(prefix))
{
for (Object item : getItems())
{
if (item instanceof LocalStorageModel)
{
LocalStorageModel model = (LocalStorageModel) item;
model.getPath().setEntity(prefix);
model.getPath().setIsChangable(false);
}
}
}
}
}
}
private void InitDataCenter()
{
if (getSystemTreeSelectedItem() != null && getSystemTreeSelectedItem().getType() != SystemTreeItemType.System)
{
switch (getSystemTreeSelectedItem().getType())
{
case DataCenter:
case Cluster:
case Storages:
case Storage: {
SystemTreeItemModel dataCenterItem =
SystemTreeItemModel.FindAncestor(SystemTreeItemType.DataCenter, getSystemTreeSelectedItem());
storage_pool dc = (storage_pool) dataCenterItem.getEntity();
getDataCenter().setItems(new ArrayList<storage_pool>(Arrays.asList(new storage_pool[] { dc })));
getDataCenter().setSelectedItem(dc);
getDataCenter().setIsChangable(false);
getDataCenter().setInfo("Cannot choose Storage's Data Center in tree context"); //$NON-NLS-1$
}
break;
case Host: {
VDS host = (VDS) getSystemTreeSelectedItem().getEntity();
getHost().setIsChangable(false);
getHost().setInfo("Cannot choose Storage's Host in tree context"); //$NON-NLS-1$
getHost().setSelectedItem(host);
SystemTreeItemModel dataCenterItem =
SystemTreeItemModel.FindAncestor(SystemTreeItemType.DataCenter, getSystemTreeSelectedItem());
storage_pool dc = (storage_pool) dataCenterItem.getEntity();
getDataCenter().setItems(new ArrayList<storage_pool>(Arrays.asList(new storage_pool[] { dc })));
getDataCenter().setSelectedItem(dc);
getDataCenter().setIsChangable(false);
getDataCenter().setInfo("Cannot choose Storage's Data Center in tree context"); //$NON-NLS-1$
}
break;
}
}
else
{
if (getStorage() == null
|| getStorage().getstorage_domain_shared_status() == StorageDomainSharedStatus.Unattached)
// We are either adding a new storage or editing an unattached storage
// -> fill DataCenters drop-down with all possible Data-Centers, choose the empty one:
// [TODO: In case of an Unattached SD, choose only DCs of the same type]
{
AsyncDataProvider.GetDataCenterList(new AsyncQuery(new Object[] { this, behavior },
new INewAsyncCallback() {
@Override
public void OnSuccess(Object target, Object returnValue) {
Object[] array = (Object[]) target;
StorageModel storageModel = (StorageModel) array[0];
StorageModelBehavior storageModelBehavior = (StorageModelBehavior) array[1];
List<storage_pool> dataCenters =
(ArrayList<storage_pool>) returnValue;
dataCenters = storageModelBehavior.FilterDataCenter(dataCenters);
StorageModel.AddEmptyDataCenterToList(dataCenters);
storage_pool oldSelectedItem =
(storage_pool) storageModel.getDataCenter().getSelectedItem();
storageModel.getDataCenter().setItems(dataCenters);
if (oldSelectedItem != null)
{
storageModel.getDataCenter().setSelectedItem(Linq.FirstOrDefault(dataCenters,
new Linq.DataCenterPredicate(oldSelectedItem.getId())));
}
else
{
storageModel.getDataCenter()
.setSelectedItem(getStorage() == null ? Linq.FirstOrDefault(dataCenters)
: Linq.FirstOrDefault(dataCenters,
new Linq.DataCenterPredicate(UnassignedDataCenterId)));
}
}
}, getHash()));
}
else // "Edit Storage" mode:
{
AsyncDataProvider.GetDataCentersByStorageDomain(new AsyncQuery(this,
new INewAsyncCallback() {
@Override
public void OnSuccess(Object target, Object returnValue) {
StorageModel storageModel = (StorageModel) target;
List<storage_pool> dataCenters = new ArrayList<storage_pool>();
List<storage_pool> dataCentersWithStorage =
(ArrayList<storage_pool>) returnValue;
if (dataCentersWithStorage.size() < 1 || dataCentersWithStorage.get(0) == null)
{
StorageModel.AddEmptyDataCenterToList(dataCenters);
}
else
{
dataCenters =
new ArrayList<storage_pool>(Arrays.asList(new storage_pool[] { dataCentersWithStorage.get(0) }));
}
storageModel.getDataCenter().setItems(dataCenters);
storageModel.getDataCenter().setSelectedItem(Linq.FirstOrDefault(dataCenters));
}
},
getHash()),
getStorage().getId());
}
}
}
private static void AddEmptyDataCenterToList(List<storage_pool> dataCenters)
{
storage_pool tempVar = new storage_pool();
tempVar.setId(UnassignedDataCenterId);
tempVar.setname("(none)"); //$NON-NLS-1$
dataCenters.add(tempVar);
}
void UpdateHost()
{
if (getDataCenter().getItems() == null)
{
return;
}
if (getSelectedItem() == null)
{
return;
}
storage_pool dataCenter = (storage_pool) getDataCenter().getSelectedItem();
if (getSelectedItem() instanceof LocalStorageModel
&& (dataCenter == null || dataCenter.getId().equals(UnassignedDataCenterId)))
{
ArrayList<storage_pool> dataCenterList =
(ArrayList<storage_pool>) getDataCenter().getItems();
ArrayList<storage_pool> localDCList = new ArrayList<storage_pool>();
String dataCenterQueryLine = ""; //$NON-NLS-1$
for (storage_pool storagePool : dataCenterList)
{
if (storagePool.getstorage_pool_type() == StorageType.LOCALFS)
{
localDCList.add(storagePool);
}
}
if (localDCList.size() > 0)
{
int i = 0;
for (; i < localDCList.size() - 1; i++)
{
dataCenterQueryLine += "datacenter=" + localDCList.get(i).getname() + " or "; //$NON-NLS-1$ //$NON-NLS-2$
}
dataCenterQueryLine += "datacenter=" + localDCList.get(i).getname(); //$NON-NLS-1$
AsyncQuery _asyncQuery = new AsyncQuery();
_asyncQuery.setModel(this);
_asyncQuery.setContext(getHash());
_asyncQuery.asyncCallback = new INewAsyncCallback() {
@Override
public void OnSuccess(Object model, Object ReturnValue)
{
StorageModel storageModel = (StorageModel) model;
Iterable<VDS> hosts =
(ArrayList<VDS>) ((VdcQueryReturnValue) ReturnValue).getReturnValue();
storageModel.PostUpdateHost(hosts);
}
};
Frontend.RunQuery(VdcQueryType.Search, new SearchParameters("Hosts: status=Up " + dataCenterQueryLine, //$NON-NLS-1$
SearchType.VDS), _asyncQuery);
}
}
else
{
if (dataCenter == null || dataCenter.getId().equals(UnassignedDataCenterId))
{
AsyncDataProvider.GetHostList(new AsyncQuery(this,
new INewAsyncCallback() {
@Override
public void OnSuccess(Object target, Object returnValue) {
StorageModel storageModel = (StorageModel) target;
Iterable<VDS> hosts = (Iterable<VDS>) returnValue;
storageModel.PostUpdateHost(hosts);
}
}, getHash()));
}
else
{
AsyncDataProvider.GetHostListByDataCenter(new AsyncQuery(this,
new INewAsyncCallback() {
@Override
public void OnSuccess(Object target, Object returnValue) {
StorageModel storageModel = (StorageModel) target;
Iterable<VDS> hosts = (Iterable<VDS>) returnValue;
storageModel.PostUpdateHost(hosts);
}
}, getHash()), dataCenter.getId());
}
}
}
public void PostUpdateHost(Iterable<VDS> hosts)
{
// Filter hosts
hosts = Linq.Where(hosts, new Linq.HostStatusPredicate(VDSStatus.Up));
// Allow only hosts with version above 2.2 for export storage.
ArrayList<VDS> list = new ArrayList<VDS>();
if (getSelectedItem() != null && getSelectedItem().getRole() == StorageDomainType.ImportExport)
{
for (VDS host : hosts)
{
if (host.getvds_group_compatibility_version().compareTo(new Version("2.2")) >= 0) //$NON-NLS-1$
{
list.add(host);
}
}
hosts = list;
}
VDS oldSelectedItem = (VDS) getHost().getSelectedItem();
VDS selectedItem = null;
// On Edit - only SPM is available.
if (getStorage() != null) {
hosts = Collections.singletonList(getSPM(hosts));
}
// Try to select previously selected host.
if (oldSelectedItem != null) {
selectedItem = Linq.FirstOrDefault(hosts, new Linq.HostPredicate(oldSelectedItem.getId()));
}
// Select a default - first host in the list.
if (selectedItem == null) {
selectedItem = Linq.FirstOrDefault(hosts);
}
getHost().setItems(hosts);
getHost().setSelectedItem(selectedItem);
}
private VDS getSPM(Iterable<VDS> hosts) {
for (VDS host : hosts) {
if (host.getspm_status() == VdsSpmStatus.SPM) {
return host;
}
}
return null;
}
void UpdateFormat()
{
storage_pool dataCenter = (storage_pool) getDataCenter().getSelectedItem();
StorageFormatType selectItem = StorageFormatType.V1;
ArrayList<StorageFormatType> formats = new ArrayList<StorageFormatType>();
if (dataCenter != null && getSelectedItem() != null)
{
if (!dataCenter.getId().equals(UnassignedDataCenterId))
{
getFormat().setIsChangable(false);
// If data center has format defined and the selected-item role is Data, choose it.
if (dataCenter.getStoragePoolFormatType() != null
&& getSelectedItem().getRole() == StorageDomainType.Data)
{
formats.add(dataCenter.getStoragePoolFormatType());
selectItem = dataCenter.getStoragePoolFormatType();
}
// If selected-item role is ISO or Export, add only the 'V1' option.
// (*** Note that currently both ISO and Export can be only NFS, so theoretically they are covered by
// the next "else if..." condition; however, just in case we will support non-NFS ISO/Export in the
// future
// and in order to make the code more explicit, it is here. ***)
else if ((getSelectedItem().getRole() == StorageDomainType.ISO
|| getSelectedItem().getRole() == StorageDomainType.ImportExport))
{
formats.add(StorageFormatType.V1);
}
else if ((getSelectedItem().getType() == StorageType.NFS
|| getSelectedItem().getType() == StorageType.LOCALFS)
&& (dataCenter.getcompatibility_version().compareTo(Version.v3_1) < 0))
{
formats.add(StorageFormatType.V1);
}
else if ((getSelectedItem().getType() == StorageType.ISCSI || getSelectedItem().getType() == StorageType.FCP)
&& dataCenter.getcompatibility_version().compareTo(Version.v3_0) < 0)
{
formats.add(StorageFormatType.V1);
}
else if ((getSelectedItem().getType() == StorageType.ISCSI || getSelectedItem().getType() == StorageType.FCP)
&& dataCenter.getcompatibility_version().compareTo(Version.v3_0) == 0)
{
formats.add(StorageFormatType.V2);
selectItem = StorageFormatType.V2;
}
else if (dataCenter.getcompatibility_version().compareTo(Version.v3_1) >= 0)
{
formats.add(StorageFormatType.V3);
selectItem = StorageFormatType.V3;
}
}
else // Unassigned DC:
{
if ((getSelectedItem().getRole() == StorageDomainType.ISO
|| getSelectedItem().getRole() == StorageDomainType.ImportExport))
{
// ISO/Export domains should not be available for '(none)' DC
return;
}
getFormat().setIsChangable(true);
formats.add(StorageFormatType.V1);
if ((getSelectedItem().getType() == StorageType.FCP || getSelectedItem().getType() == StorageType.ISCSI)
&& getSelectedItem().getRole() == StorageDomainType.Data)
{
formats.add(StorageFormatType.V2);
}
formats.add(StorageFormatType.V3);
selectItem = StorageFormatType.V3;
}
}
getFormat().setItems(formats);
getFormat().setSelectedItem(selectItem);
}
private void UpdateItemsAvailability()
{
if (getItems() == null)
{
return;
}
behavior.UpdateItemsAvailability();
}
public boolean Validate()
{
getHost().ValidateSelectedItem(new NotEmptyValidation[] { new NotEmptyValidation() });
ValidateSelectedItem(new NotEmptyValidation[] { new NotEmptyValidation() });
return getName().getIsValid() && getHost().getIsValid() && getIsValid() && getSelectedItem().Validate();
}
private SystemTreeItemModel privateSystemTreeSelectedItem;
@Override
public SystemTreeItemModel getSystemTreeSelectedItem()
{
return privateSystemTreeSelectedItem;
}
@Override
public void setSystemTreeSelectedItem(SystemTreeItemModel value)
{
privateSystemTreeSelectedItem = value;
}
public boolean isStorageActive() {
return getStorage() == null
|| getStorage().getstorage_domain_shared_status() == StorageDomainSharedStatus.Active
|| getStorage().getstorage_domain_shared_status() == StorageDomainSharedStatus.Mixed;
}
}
| |
package org.apereo.cas.authentication;
import org.apache.commons.lang3.StringUtils;
import org.apereo.cas.authentication.handler.support.AbstractUsernamePasswordAuthenticationHandler;
import org.apereo.cas.authentication.principal.Principal;
import org.apereo.cas.authentication.support.LdapPasswordPolicyConfiguration;
import org.ldaptive.LdapAttribute;
import org.ldaptive.LdapEntry;
import org.ldaptive.LdapException;
import org.ldaptive.ReturnAttributes;
import org.ldaptive.auth.AuthenticationRequest;
import org.ldaptive.auth.AuthenticationResponse;
import org.ldaptive.auth.AuthenticationResultCode;
import org.ldaptive.auth.Authenticator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.PostConstruct;
import javax.security.auth.login.AccountNotFoundException;
import javax.security.auth.login.FailedLoginException;
import javax.security.auth.login.LoginException;
import java.security.GeneralSecurityException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* LDAP authentication handler that uses the ldaptive {@code Authenticator} component underneath.
* This handler provides simple attribute resolution machinery by reading attributes from the entry
* corresponding to the DN of the bound user (in the bound security context) upon successful authentication.
* Principal resolution is controlled by the following properties:
* <ul>
* <li>{@link #setPrincipalIdAttribute(String)}</li>
* <li>{@link #setPrincipalAttributeMap(java.util.Map)}</li>
* </ul>
*
* @author Marvin S. Addison
* @since 4.0.0
*/
public class LdapAuthenticationHandler extends AbstractUsernamePasswordAuthenticationHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
/**
* Mapping of LDAP attribute name to principal attribute name.
*/
protected Map<String, String> principalAttributeMap = Collections.emptyMap();
/**
* List of additional attributes to be fetched but are not principal attributes.
*/
protected List<String> additionalAttributes = Collections.emptyList();
/**
* Performs LDAP authentication given username/password.
**/
private final Authenticator authenticator;
/**
* Name of attribute to be used for resolved principal.
*/
private String principalIdAttribute;
/**
* Flag indicating whether multiple values are allowed fo principalIdAttribute.
*/
private boolean allowMultiplePrincipalAttributeValues;
/**
* Flag to indicate whether CAS should block authentication
* if a specific/configured principal id attribute is not found.
*/
private boolean allowMissingPrincipalAttributeValue = true;
/**
* Set of LDAP attributes fetch from an entry as part of the authentication process.
*/
private String[] authenticatedEntryAttributes = ReturnAttributes.NONE.value();
/**
* Creates a new authentication handler that delegates to the given authenticator.
*
* @param authenticator Ldaptive authenticator component.
*/
public LdapAuthenticationHandler(final Authenticator authenticator) {
this.authenticator = authenticator;
}
/**
* Sets the name of the LDAP principal attribute whose value should be used for the
* principal ID.
*
* @param attributeName LDAP attribute name.
*/
public void setPrincipalIdAttribute(final String attributeName) {
this.principalIdAttribute = attributeName;
}
/**
* Sets a flag that determines whether multiple values are allowed for the {@link #principalIdAttribute}.
* This flag only has an effect if {@link #principalIdAttribute} is configured. If multiple values are detected
* when the flag is false, the first value is used and a warning is logged. If multiple values are detected
* when the flag is true, an exception is raised.
*
* @param allowed True to allow multiple principal ID attribute values, false otherwise.
*/
public void setAllowMultiplePrincipalAttributeValues(final boolean allowed) {
this.allowMultiplePrincipalAttributeValues = allowed;
}
/**
* Sets the mapping of additional principal attributes where the key is the LDAP attribute
* name and the value is the principal attribute name. The key set defines the set of
* attributes read from the LDAP entry at authentication time. Note that the principal ID attribute
* should not be listed among these attributes.
*
* @param attributeNameMap Map of LDAP attribute name to principal attribute name.
*/
public void setPrincipalAttributeMap(final Map<String, String> attributeNameMap) {
this.principalAttributeMap = attributeNameMap;
}
/**
* Sets the mapping of additional principal attributes where the key and value is the LDAP attribute
* name. Note that the principal ID attribute
* should not be listed among these attributes.
*
* @param attributeList List of LDAP attribute names
*/
public void setPrincipalAttributeList(final List<String> attributeList) {
this.principalAttributeMap = attributeList.stream().collect(Collectors.toMap(Object::toString, Function.identity()));
}
/**
* Sets the list of additional attributes to be fetched from the user entry during authentication.
* These attributes are <em>not</em> bound to the principal.
* <p>
* A common use case for these attributes is to support password policy machinery.
*
* @param additionalAttributes List of operational attributes to fetch when resolving an entry.
*/
public void setAdditionalAttributes(final List<String> additionalAttributes) {
this.additionalAttributes = additionalAttributes;
}
@Override
protected HandlerResult authenticateUsernamePasswordInternal(final UsernamePasswordCredential upc,
final String originalPassword) throws GeneralSecurityException, PreventedException {
final AuthenticationResponse response;
try {
LOGGER.debug("Attempting LDAP authentication for [{}]. Authenticator pre-configured attributes are [{}], "
+ "additional requested attributes for this authentication request are [{}]",
upc, authenticator.getReturnAttributes(), authenticatedEntryAttributes);
final AuthenticationRequest request = new AuthenticationRequest(upc.getUsername(),
new org.ldaptive.Credential(upc.getPassword()), authenticatedEntryAttributes);
response = authenticator.authenticate(request);
} catch (final LdapException e) {
LOGGER.trace(e.getMessage(), e);
throw new PreventedException("Unexpected LDAP error", e);
}
LOGGER.debug("LDAP response: [{}]", response);
final List<MessageDescriptor> messageList;
final LdapPasswordPolicyConfiguration ldapPasswordPolicyConfiguration = (LdapPasswordPolicyConfiguration) super.getPasswordPolicyConfiguration();
if (ldapPasswordPolicyConfiguration != null) {
LOGGER.debug("Applying password policy to [{}]", response);
messageList = ldapPasswordPolicyConfiguration.getAccountStateHandler().handle(response, ldapPasswordPolicyConfiguration);
} else {
LOGGER.debug("No ldap password policy configuration is defined");
messageList = Collections.emptyList();
}
if (response.getResult()) {
LOGGER.debug("LDAP response returned a result. Creating the final LDAP principal");
return createHandlerResult(upc, createPrincipal(upc.getUsername(), response.getLdapEntry()), messageList);
}
if (AuthenticationResultCode.DN_RESOLUTION_FAILURE == response.getAuthenticationResultCode()) {
LOGGER.warn("DN resolution failed. [{}]", response.getMessage());
throw new AccountNotFoundException(upc.getUsername() + " not found.");
}
throw new FailedLoginException("Invalid credentials");
}
/**
* Creates a CAS principal with attributes if the LDAP entry contains principal attributes.
*
* @param username Username that was successfully authenticated which is used for principal ID when
* {@link #setPrincipalIdAttribute(String)} is not specified.
* @param ldapEntry LDAP entry that may contain principal attributes.
* @return Principal if the LDAP entry contains at least a principal ID attribute value, null otherwise.
* @throws LoginException On security policy errors related to principal creation.
*/
protected Principal createPrincipal(final String username, final LdapEntry ldapEntry) throws LoginException {
LOGGER.debug("Creating LDAP principal for [{}] based on [{}] and attributes [{}]",
username, ldapEntry.getDn(), ldapEntry.getAttributeNames());
final String id = getLdapPrincipalIdentifier(username, ldapEntry);
final Map<String, Object> attributeMap = new LinkedHashMap<>(this.principalAttributeMap.size());
for (final Map.Entry<String, String> ldapAttr : this.principalAttributeMap.entrySet()) {
final LdapAttribute attr = ldapEntry.getAttribute(ldapAttr.getKey());
if (attr != null) {
LOGGER.debug("Found principal attribute: [{}]", attr);
final String principalAttrName = ldapAttr.getValue();
if (attr.size() > 1) {
LOGGER.debug("Principal attribute: [{}] is multivalued", attr);
attributeMap.put(principalAttrName, attr.getStringValues());
} else {
attributeMap.put(principalAttrName, attr.getStringValue());
}
} else {
LOGGER.warn("Requested LDAP attribute [{}] could not be found on the resolved LDAP entry for [{}]",
ldapAttr.getKey(), ldapEntry.getDn());
}
}
final String dnAttribute = getName().concat(".").concat(username);
LOGGER.debug("Recording principal DN attribute as [{}]", dnAttribute);
attributeMap.put(dnAttribute, ldapEntry.getDn());
LOGGER.debug("Created LDAP principal for id [{}] and [{}] attributes", id, attributeMap.size());
return this.principalFactory.createPrincipal(id, attributeMap);
}
/**
* Gets ldap principal identifier. If the principal id attribute is defined, it's retrieved.
* If no attribute value is found, a warning is generated and the provided username is used instead.
* If no attribute is defined, username is used instead.
*
* @param username the username
* @param ldapEntry the ldap entry
* @return the ldap principal identifier
* @throws LoginException in case the principal id cannot be determined.
*/
protected String getLdapPrincipalIdentifier(final String username, final LdapEntry ldapEntry) throws LoginException {
if (StringUtils.isNotBlank(this.principalIdAttribute)) {
final LdapAttribute principalAttr = ldapEntry.getAttribute(this.principalIdAttribute);
if (principalAttr == null || principalAttr.size() == 0) {
if (this.allowMissingPrincipalAttributeValue) {
LOGGER.warn("The principal id attribute [{}] is not found. CAS cannot construct the final authenticated principal "
+ "if it's unable to locate the attribute that is designated as the principal id. "
+ "Attributes available on the LDAP entry are [{}]. Since principal id attribute is not available, CAS will "
+ "fallback to construct the principal based on the provided user id: [{}]",
this.principalIdAttribute, ldapEntry.getAttributes(), username);
return username;
}
LOGGER.error("The principal id attribute [{}] is not found. CAS is configured to disallow missing principal attributes");
throw new LoginException("Principal id attribute is not found for " + principalAttr);
}
if (principalAttr.size() > 1) {
if (!this.allowMultiplePrincipalAttributeValues) {
throw new LoginException("Multiple principal values are not allowed: " + principalAttr);
}
LOGGER.warn("Found multiple values for principal id attribute: [{}]. Using first value=[{}].", principalAttr, principalAttr.getStringValue());
}
LOGGER.debug("Retrieved principal id attribute [{}]", principalAttr.getStringValue());
return principalAttr.getStringValue();
}
LOGGER.debug("Principal id attribute is not defined. Using the default provided user id [{}]", username);
return username;
}
public void setAllowMissingPrincipalAttributeValue(final boolean allowMissingPrincipalAttributeValue) {
this.allowMissingPrincipalAttributeValue = allowMissingPrincipalAttributeValue;
}
/**
* Initialize the handler, setup the authentication entry attributes.
*/
@PostConstruct
public void initialize() {
/*
* Use a set to ensure we ignore duplicates.
*/
final Set<String> attributes = new HashSet<>();
LOGGER.debug("Initializing LDAP attribute configuration...");
if (StringUtils.isNotBlank(this.principalIdAttribute)) {
LOGGER.debug("Configured to retrieve principal id attribute [{}]", this.principalIdAttribute);
attributes.add(this.principalIdAttribute);
}
if (this.principalAttributeMap != null && !this.principalAttributeMap.isEmpty()) {
final Set<String> attrs = this.principalAttributeMap.keySet();
attributes.addAll(attrs);
LOGGER.debug("Configured to retrieve principal attribute collection of [{}]", attrs);
}
if (this.additionalAttributes != null && !this.additionalAttributes.isEmpty()) {
attributes.addAll(this.additionalAttributes);
LOGGER.debug("Configured to retrieve additional attributes [{}]", this.additionalAttributes);
}
final List<String> authenticatorAttributes = Arrays.asList(authenticator.getReturnAttributes());
LOGGER.debug("Filtering authentication entry attributes [{}] based on authenticator attributes [{}]",
authenticatedEntryAttributes, authenticatorAttributes);
attributes.removeIf(authenticatorAttributes::contains);
if (!attributes.isEmpty()) {
this.authenticatedEntryAttributes = attributes.toArray(new String[attributes.size()]);
}
LOGGER.debug("LDAP authentication entry attributes for the authentication request are [{}]",
(Object[]) this.authenticatedEntryAttributes);
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2019 - 2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datasourcesummary.datamodel;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.apache.commons.lang.StringUtils;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.Type;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM;
import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
/**
* Utilities for getting information about a data source or all data sources
* from the case database.
*/
final class DataSourceInfoUtilities {
/**
* Gets a count of tsk_files for a particular datasource.
*
* @param skCase The current SleuthkitCase.
* @param currentDataSource The datasource.
* @param additionalWhere Additional sql where clauses.
*
* @return The count of files or null on error.
*
* @throws TskCoreException
* @throws SQLException
*/
static Long getCountOfTskFiles(SleuthkitCase skCase, DataSource currentDataSource, String additionalWhere)
throws TskCoreException, SQLException {
if (currentDataSource != null) {
return skCase.countFilesWhere(
"data_source_obj_id=" + currentDataSource.getId()
+ (StringUtils.isBlank(additionalWhere) ? "" : (" AND " + additionalWhere)));
}
return null;
}
/**
* Gets a count of regular files for a particular datasource.
*
* @param skCase The current SleuthkitCase.
* @param currentDataSource The datasource.
* @param additionalWhere Additional sql where clauses.
*
* @return The count of files or null on error.
*
* @throws TskCoreException
* @throws SQLException
*/
static Long getCountOfRegularFiles(SleuthkitCase skCase, DataSource currentDataSource, String additionalWhere)
throws TskCoreException, SQLException {
String whereClause = "meta_type=" + TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue();
if (StringUtils.isNotBlank(additionalWhere)) {
whereClause += " AND " + additionalWhere;
}
return getCountOfTskFiles(skCase, currentDataSource, whereClause);
}
/**
* Gets a count of regular non-slack files for a particular datasource.
*
* @param skCase The current SleuthkitCase.
* @param currentDataSource The datasource.
* @param additionalWhere Additional sql where clauses.
*
* @return The count of files or null on error.
*
* @throws TskCoreException
* @throws SQLException
*/
static Long getCountOfRegNonSlackFiles(SleuthkitCase skCase, DataSource currentDataSource, String additionalWhere)
throws TskCoreException, SQLException {
String whereClause = "meta_type=" + TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()
+ " AND type<>" + TSK_DB_FILES_TYPE_ENUM.SLACK.getFileType();
if (StringUtils.isNotBlank(additionalWhere)) {
whereClause += " AND " + additionalWhere;
}
return getCountOfTskFiles(skCase, currentDataSource, whereClause);
}
/**
* An interface for handling a result set and returning a value.
*/
interface ResultSetHandler<T> {
T process(ResultSet resultset) throws SQLException;
}
/**
* Retrieves a result based on the provided query.
*
* @param skCase The current SleuthkitCase.
* @param query The query.
* @param processor The result set handler.
*
* @return The ResultSetHandler value or null if no ResultSet could be
* obtained.
*
* @throws TskCoreException
* @throws SQLException
*/
static <T> T getBaseQueryResult(SleuthkitCase skCase, String query, ResultSetHandler<T> processor)
throws TskCoreException, SQLException {
try (SleuthkitCase.CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
return processor.process(resultSet);
}
}
/**
* Creates sql where clause that does a bitwise check to see if flag is
* present.
*
* @param flag The flag for which to check.
*
* @return The clause.
*/
static String getMetaFlagsContainsStatement(TSK_FS_META_FLAG_ENUM flag) {
return "meta_flags & " + flag.getValue() + " > 0";
}
/**
* Enum for specifying the sort order for getAttributes.
*/
enum SortOrder {
DESCENDING,
ASCENDING
}
/**
* Returns a list of all artifacts of the given type that have an attribute
* of the given type sorted by given attribute type value. Artifacts that do
* not have the given attribute will not be included in the list.
*
* Sorting on attributes of type byte[] and JSON is not currently supported.
*
* @param skCase SleuthkitCase instance.
* @param artifactType Type of artifacts to sort.
* @param dataSource Data Source that the artifact belongs to.
* @param attributeType Attribute type to sort by.
* @param sortOrder Sort order of the attributes, either ascending or
* descending.
*
* @return A list of artifacts of type artifactType sorted by the attribute
* of attributeType in the given sortOrder. If no artifacts are
* found an empty list will be returned.
*
* @throws TskCoreException
*/
static List<BlackboardArtifact> getArtifacts(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder) throws TskCoreException {
return getArtifacts(skCase, artifactType, dataSource, attributeType, sortOrder, 0);
}
/**
* Return a list of artifacts that have been sorted by their attribute of
* attributeType. If an artifact of the given type does not have the given
* attribute it will not be included in the returned list.
*
* Sorting on attributes of type byte[] and JSON is not currently supported.
*
* @param skCase SleuthkitCase instance.
* @param artifactType Type of artifacts to sort.
* @param dataSource Data Source that the artifact belongs to.
* @param attributeType Attribute type to sort by.
* @param sortOrder Sort order of the attributes, either ascending or
* descending.
* @param maxCount Maximum number of results to return. To return all
* values maxCount should be 0.
*
* @return A list of artifacts of type artifactType sorted by the attribute
* of attributeType in the given sortOrder. If no artifacts are
* found an empty list will be returned.
*
* @throws TskCoreException
*/
static List<BlackboardArtifact> getArtifacts(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder, int maxCount) throws TskCoreException {
if (maxCount < 0) {
throw new IllegalArgumentException("Invalid maxCount passed to getArtifacts, value must be equal to or greater than 0");
}
return createListFromMap(getArtifactMap(skCase, artifactType, dataSource, attributeType, sortOrder), maxCount);
}
/**
* Empty private constructor
*/
private DataSourceInfoUtilities() {
}
/**
* Create a Map of lists of artifacts sorted by the given attribute.
*
* @param skCase SleuthkitCase instance.
* @param artifactType Type of artifacts to sort.
* @param dataSource Data Source that the artifact belongs to.
* @param attributeType Attribute type to sort by.
* @param sortOrder Sort order of the attributes, either ascending or
* descending.
*
* @return A Map of lists of artifacts sorted by the value of attribute
* given type. Artifacts that do not have an attribute of the given
* type will not be included.
*
* @throws TskCoreException
*/
static private SortedMap<BlackboardAttribute, List<BlackboardArtifact>> getArtifactMap(SleuthkitCase skCase, BlackboardArtifact.Type artifactType, DataSource dataSource, BlackboardAttribute.Type attributeType, SortOrder sortOrder) throws TskCoreException {
SortedMap<BlackboardAttribute, List<BlackboardArtifact>> sortedMap = new TreeMap<>(new AttributeComparator(sortOrder));
List<BlackboardArtifact> artifactList = skCase.getBlackboard().getArtifacts(artifactType.getTypeID(), dataSource.getId());
for (BlackboardArtifact artifact : artifactList) {
BlackboardAttribute attribute = artifact.getAttribute(attributeType);
if (attribute == null) {
continue;
}
List<BlackboardArtifact> mapArtifactList = sortedMap.get(attribute);
if (mapArtifactList == null) {
mapArtifactList = new ArrayList<>();
sortedMap.put(attribute, mapArtifactList);
}
mapArtifactList.add(artifact);
}
return sortedMap;
}
/**
* Creates the list of artifacts from the sorted map and the given count.
*
* @param sortedMap Sorted map of artifact lists.
* @param maxCount Maximum number of artifacts to return.
*
* @return List of artifacts, list will be empty if none were found.
*/
static private List<BlackboardArtifact> createListFromMap(SortedMap<BlackboardAttribute, List<BlackboardArtifact>> sortedMap, int maxCount) {
List<BlackboardArtifact> artifactList = new ArrayList<>();
for (List<BlackboardArtifact> mapArtifactList : sortedMap.values()) {
if (maxCount == 0 || (artifactList.size() + mapArtifactList.size()) <= maxCount) {
artifactList.addAll(mapArtifactList);
continue;
}
if (maxCount == artifactList.size()) {
break;
}
for (BlackboardArtifact artifact : mapArtifactList) {
if (artifactList.size() < maxCount) {
artifactList.add(artifact);
} else {
break;
}
}
}
return artifactList;
}
/**
* Compares the value of two BlackboardAttributes that are of the same type.
* This comparator is specialized for data source summary and only supports
* the basic attribute types of string, integer, long, datetime (long), and
* double.
*
* Note: A runtime exception will be thrown from the compare if the
* attributes are not of the same type or if their type is not supported.
*/
private static class AttributeComparator implements Comparator<BlackboardAttribute> {
private final SortOrder direction;
AttributeComparator(SortOrder direction) {
this.direction = direction;
}
@Override
public int compare(BlackboardAttribute attribute1, BlackboardAttribute attribute2) {
if (!attribute1.getAttributeType().equals(attribute2.getAttributeType())) {
throw new IllegalArgumentException("Unable to compare attributes of different types");
}
int result = compare(attribute1.getAttributeType(), attribute1, attribute2);
if (direction == SortOrder.DESCENDING) {
result *= -1;
}
return result;
}
/**
* Compared two attributes of the given type. Note, that not all
* attribute types are supported. A runtime exception will be thrown if
* an unsupported attribute is supplied.
*
* @param type Attribute type.
* @param attribute1 First attribute to compare.
* @param attribute2 Second attribute to compare.
*
* @return Compare result.
*/
private int compare(BlackboardAttribute.Type type, BlackboardAttribute attribute1, BlackboardAttribute attribute2) {
switch (type.getValueType()) {
case STRING:
return attribute1.getValueString().compareToIgnoreCase(attribute2.getValueString());
case INTEGER:
return Integer.compare(attribute1.getValueInt(), attribute2.getValueInt());
case LONG:
case DATETIME:
return Long.compare(attribute1.getValueLong(), attribute2.getValueLong());
case DOUBLE:
return Double.compare(attribute1.getValueDouble(), attribute2.getValueDouble());
case BYTE:
case JSON:
default:
throw new IllegalArgumentException("Unable to compare attributes of type " + attribute1.getAttributeType().getTypeName());
}
}
}
/**
* Retrieves attribute from artifact if exists. Returns null if attribute is
* null or underlying call throws exception.
*
* @param artifact The artifact.
* @param attributeType The attribute type to retrieve from the artifact.
*
* @return The attribute or null if could not be received.
*/
private static BlackboardAttribute getAttributeOrNull(BlackboardArtifact artifact, Type attributeType) {
try {
return artifact.getAttribute(attributeType);
} catch (TskCoreException ex) {
return null;
}
}
/**
* Retrieves the string value of a certain attribute type from an artifact.
*
* @param artifact The artifact.
* @param attributeType The attribute type.
*
* @return The 'getValueString()' value or null if the attribute or String
* could not be retrieved.
*/
static String getStringOrNull(BlackboardArtifact artifact, Type attributeType) {
BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType);
return (attr == null) ? null : attr.getValueString();
}
/**
* Retrieves the long value of a certain attribute type from an artifact.
*
* @param artifact The artifact.
* @param attributeType The attribute type.
*
* @return The 'getValueLong()' value or null if the attribute could not be
* retrieved.
*/
static Long getLongOrNull(BlackboardArtifact artifact, Type attributeType) {
BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType);
return (attr == null) ? null : attr.getValueLong();
}
/**
* Retrieves the int value of a certain attribute type from an artifact.
*
* @param artifact The artifact.
* @param attributeType The attribute type.
*
* @return The 'getValueInt()' value or null if the attribute could not be
* retrieved.
*/
static Integer getIntOrNull(BlackboardArtifact artifact, Type attributeType) {
BlackboardAttribute attr = getAttributeOrNull(artifact, attributeType);
return (attr == null) ? null : attr.getValueInt();
}
/**
* Retrieves the long value of a certain attribute type from an artifact and
* converts to date (seconds since epoch).
*
* @param artifact The artifact.
* @param attributeType The attribute type.
*
* @return The date determined from the 'getValueLong()' as seconds from
* epoch or null if the attribute could not be retrieved or is 0.
*/
static Date getDateOrNull(BlackboardArtifact artifact, Type attributeType) {
Long longVal = getLongOrNull(artifact, attributeType);
return (longVal == null || longVal == 0) ? null : new Date(longVal * 1000);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.undo.UndoUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.psi.*;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.search.PsiElementProcessorAdapter;
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.util.PsiFormatUtil;
import com.intellij.psi.util.PsiFormatUtilBase;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.VisibilityUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
public class ModifierFix extends LocalQuickFixAndIntentionActionOnPsiElement {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.quickfix.ModifierFix");
@PsiModifier.ModifierConstant private final String myModifier;
private final boolean myShouldHave;
private final boolean myShowContainingClass;
private final String myName;
private final SmartPsiElementPointer<PsiVariable> myVariable;
public ModifierFix(PsiModifierList modifierList, @PsiModifier.ModifierConstant @NotNull String modifier, boolean shouldHave, boolean showContainingClass) {
super(modifierList);
myModifier = modifier;
myShouldHave = shouldHave;
myShowContainingClass = showContainingClass;
myName = format(null, modifierList);
myVariable = null;
}
public ModifierFix(@NotNull PsiModifierListOwner owner, @PsiModifier.ModifierConstant @NotNull String modifier, boolean shouldHave, boolean showContainingClass) {
super(owner.getModifierList());
myModifier = modifier;
myShouldHave = shouldHave;
myShowContainingClass = showContainingClass;
PsiVariable variable = owner instanceof PsiVariable ? (PsiVariable)owner : null;
myName = format(variable, owner.getModifierList());
myVariable = variable == null ? null : SmartPointerManager.getInstance(owner.getProject()).createSmartPsiElementPointer(variable);
}
@NotNull
@Override
public String getText() {
return myName;
}
private String format(PsiVariable variable, PsiModifierList modifierList) {
String name = null;
PsiElement parent = variable == null ? modifierList == null ? null : modifierList.getParent() : variable;
if (parent instanceof PsiClass) {
name = ((PsiClass)parent).getName();
}
else {
int options = PsiFormatUtilBase.SHOW_NAME | (myShowContainingClass ? PsiFormatUtilBase.SHOW_CONTAINING_CLASS : 0);
if (parent instanceof PsiMethod) {
name = PsiFormatUtil.formatMethod((PsiMethod)parent, PsiSubstitutor.EMPTY, options, 0);
}
else if (parent instanceof PsiVariable) {
name = PsiFormatUtil.formatVariable((PsiVariable)parent, options, PsiSubstitutor.EMPTY);
}
else if (parent instanceof PsiClassInitializer) {
PsiClass containingClass = ((PsiClassInitializer)parent).getContainingClass();
String className = containingClass instanceof PsiAnonymousClass
? QuickFixBundle.message("anonymous.class.presentation",
((PsiAnonymousClass)containingClass).getBaseClassType().getPresentableText())
: containingClass != null ? containingClass.getName() : "unknown";
name = QuickFixBundle.message("class.initializer.presentation", className);
}
}
String modifierText = VisibilityUtil.toPresentableText(myModifier);
return QuickFixBundle.message(myShouldHave ? "add.modifier.fix" : "remove.modifier.fix", name, modifierText);
}
@Override
@NotNull
public String getFamilyName() {
return QuickFixBundle.message("fix.modifiers.family");
}
@Override
public boolean isAvailable(@NotNull Project project,
@NotNull PsiFile file,
@NotNull PsiElement startElement,
@NotNull PsiElement endElement) {
final PsiModifierList myModifierList = (PsiModifierList)startElement;
PsiVariable variable = myVariable == null ? null : myVariable.getElement();
return myModifierList.isValid() &&
myModifierList.getManager().isInProject(myModifierList) &&
myModifierList.hasExplicitModifier(myModifier) != myShouldHave &&
(variable == null || variable.isValid());
}
private void changeModifierList (PsiModifierList modifierList) {
try {
modifierList.setModifierProperty(myModifier, myShouldHave);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
@Override
public void invoke(@NotNull Project project,
@NotNull PsiFile file,
@Nullable("is null when called from inspection") Editor editor,
@NotNull PsiElement startElement,
@NotNull PsiElement endElement) {
final PsiModifierList myModifierList = (PsiModifierList)startElement;
final PsiVariable variable = myVariable == null ? null : myVariable.getElement();
if (!FileModificationService.getInstance().preparePsiElementForWrite(myModifierList)) return;
if (variable != null && !FileModificationService.getInstance().preparePsiElementForWrite(variable)) return;
final List<PsiModifierList> modifierLists = new ArrayList<>();
final PsiFile containingFile = myModifierList.getContainingFile();
final PsiModifierList modifierList;
if (variable != null && variable.isValid()) {
ApplicationManager.getApplication().runWriteAction(() -> {
try {
variable.normalizeDeclaration();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
});
modifierList = variable.getModifierList();
assert modifierList != null;
}
else {
modifierList = myModifierList;
}
PsiElement owner = modifierList.getParent();
if (owner instanceof PsiMethod) {
PsiModifierList copy = (PsiModifierList)myModifierList.copy();
changeModifierList(copy);
final int accessLevel = PsiUtil.getAccessLevel(copy);
OverridingMethodsSearch.search((PsiMethod)owner, owner.getResolveScope(), true).forEach(
new PsiElementProcessorAdapter<>(new PsiElementProcessor<PsiMethod>() {
@Override
public boolean execute(@NotNull PsiMethod inheritor) {
PsiModifierList list = inheritor.getModifierList();
if (inheritor.getManager().isInProject(inheritor) && PsiUtil.getAccessLevel(list) < accessLevel) {
modifierLists.add(list);
}
return true;
}
}));
}
if (!modifierLists.isEmpty()) {
if (Messages.showYesNoDialog(project,
QuickFixBundle.message("change.inheritors.visibility.warning.text"),
QuickFixBundle.message("change.inheritors.visibility.warning.title"),
Messages.getQuestionIcon()) == Messages.YES) {
ApplicationManager.getApplication().runWriteAction(() -> {
if (!FileModificationService.getInstance().preparePsiElementsForWrite(modifierLists)) {
return;
}
for (final PsiModifierList modifierList1 : modifierLists) {
changeModifierList(modifierList1);
}
});
}
}
ApplicationManager.getApplication().runWriteAction(() -> {
changeModifierList(modifierList);
UndoUtil.markPsiFileForUndo(containingFile);
});
}
@Override
public boolean startInWriteAction() {
return false;
}
}
| |
package carbon.widget;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.drawable.ColorDrawable;
import android.os.Handler;
import android.util.TypedValue;
import android.view.ContextThemeWrapper;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.DecelerateInterpolator;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.animation.ObjectAnimator;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.view.ViewHelper;
import java.util.ArrayList;
import java.util.List;
import carbon.R;
import carbon.animation.AnimUtils;
/**
* Created by Marcin on 2015-01-07.
*/
public class Snackbar extends FrameLayout implements GestureDetector.OnGestureListener {
public static int INFINITE = -1;
private Context context;
private float swipe;
private ValueAnimator animator;
private List<View> pushedViews = new ArrayList<>();
GestureDetector gestureDetector = new GestureDetector(this);
private Rect rect = new Rect();
private boolean tapOutsideToDismiss;
private ViewGroup container;
@Override
public boolean onDown(MotionEvent e) {
return false;
}
@Override
public void onShowPress(MotionEvent e) {
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
return false;
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (swipeToDismiss && animator == null && getParent() != null) {
swipe = e2.getX() - e1.getX();
ViewHelper.setTranslationX(content, swipe);
ViewHelper.setAlpha(content, Math.max(0, 1 - 2 * Math.abs(swipe) / content.getMeasuredWidth()));
if (Math.abs(swipe) > content.getMeasuredWidth() / 4) {
handler.removeCallbacks(hideRunnable);
animator = ObjectAnimator.ofFloat(swipe, content.getMeasuredWidth() / 2.0f * Math.signum(swipe));
animator.setDuration(200);
animator.addUpdateListener(valueAnimator -> {
float s = (Float) valueAnimator.getAnimatedValue();
ViewHelper.setTranslationX(content, s);
float alpha = Math.max(0, 1 - 2 * Math.abs((Float) valueAnimator.getAnimatedValue()) / content.getMeasuredWidth());
ViewHelper.setAlpha(content, alpha);
});
animator.start();
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
hideInternal();
animator = null;
}
});
for (final View pushedView : pushedViews) {
ValueAnimator animator = ValueAnimator.ofFloat(-1, 0);
animator.setDuration(200);
animator.setInterpolator(new DecelerateInterpolator());
animator.addUpdateListener(valueAnimator -> {
MarginLayoutParams lp = (MarginLayoutParams) content.getLayoutParams();
ViewHelper.setTranslationY(pushedView, (content.getHeight() + lp.bottomMargin) * (Float) valueAnimator.getAnimatedValue());
if (pushedView.getParent() != null)
((View) pushedView.getParent()).postInvalidate();
});
animator.start();
}
}
return true;
}
return false;
}
@Override
public void onLongPress(MotionEvent e) {
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
return false;
}
@Deprecated
public interface OnDismissedListener {
void onDismissed();
}
public interface OnDismissListener {
void onDismiss();
}
private TextView message;
private Button button;
private Style style;
private long duration;
private Runnable hideRunnable = this::dismiss;
private Handler handler;
private LinearLayout content;
OnDismissListener onDismissListener;
boolean swipeToDismiss = true;
static List<Snackbar> next = new ArrayList<>();
public enum Style {
Floating, Docked, Auto
}
public Snackbar(Context context) {
super(context);
this.context = context;
initSnackbar(R.attr.carbon_snackbarTheme);
}
public Snackbar(Context context, String message, String action, int duration) {
super(context);
this.context = context;
initSnackbar(R.attr.carbon_snackbarTheme);
setMessage(message);
setAction(action);
setDuration(duration);
setTapOutsideToDismissEnabled(false);
}
private void initSnackbar(int defStyleAttr) {
setBackgroundDrawable(new ColorDrawable(context.getResources().getColor(android.R.color.transparent)));
TypedValue outValue = new TypedValue();
getContext().getTheme().resolveAttribute(defStyleAttr, outValue, true);
int theme = outValue.resourceId;
Context themedContext = new ContextThemeWrapper(getContext(), theme);
View.inflate(themedContext, R.layout.carbon_snackbar, this);
content = (LinearLayout) findViewById(R.id.carbon_snackbarContent);
content.setElevation(getResources().getDimension(R.dimen.carbon_elevationSnackbar));
setStyle(Style.Auto);
message = (TextView) content.findViewById(R.id.carbon_messageText);
button = (Button) content.findViewById(R.id.carbon_actionButton);
handler = new Handler();
}
public void setContainer(ViewGroup container) {
this.container = container;
}
public void show(final ViewGroup container) {
synchronized (Snackbar.class) {
this.container = container;
if (!next.contains(this))
next.add(this);
if (next.indexOf(this) == 0) {
Rect windowFrame = new Rect();
container.getWindowVisibleDisplayFrame(windowFrame);
Rect drawingRect = new Rect();
container.getDrawingRect(drawingRect);
setPadding(0, 0, 0, drawingRect.bottom - windowFrame.bottom);
container.addView(this);
ViewHelper.setAlpha(content, 0);
AnimUtils.flyIn(content, null);
for (final View pushedView : pushedViews) {
ValueAnimator animator = ValueAnimator.ofFloat(0, -1);
animator.setDuration(200);
animator.setInterpolator(new DecelerateInterpolator());
animator.addUpdateListener(valueAnimator -> {
MarginLayoutParams lp = (MarginLayoutParams) content.getLayoutParams();
ViewHelper.setTranslationY(pushedView, (content.getHeight() + lp.bottomMargin) * (Float) valueAnimator.getAnimatedValue());
if (pushedView.getParent() != null)
((View) pushedView.getParent()).postInvalidate();
});
animator.start();
}
if (duration != INFINITE)
handler.postDelayed(hideRunnable, duration);
}
}
}
public void show() {
show(container);
}
public static void clearQueue() {
next.clear();
}
public void dismiss() {
synchronized (Snackbar.class) {
handler.removeCallbacks(hideRunnable);
if (onDismissListener != null)
onDismissListener.onDismiss();
AnimUtils.flyOut(content, new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animator) {
hideInternal();
}
});
for (final View pushedView : pushedViews) {
ValueAnimator animator = ValueAnimator.ofFloat(-1, 0);
animator.setDuration(200);
animator.setInterpolator(new DecelerateInterpolator());
animator.addUpdateListener(valueAnimator -> {
MarginLayoutParams lp = (MarginLayoutParams) content.getLayoutParams();
ViewHelper.setTranslationY(pushedView, (content.getHeight() + lp.bottomMargin) * (Float) valueAnimator.getAnimatedValue());
if (pushedView.getParent() != null)
((View) pushedView.getParent()).postInvalidate();
});
animator.start();
}
}
}
private void hideInternal() {
synchronized (Snackbar.class) {
if (getParent() == null)
return;
((ViewGroup) getParent()).removeView(this);
if (next.contains(this))
next.remove(this);
if (next.size() != 0)
next.get(0).show();
}
}
public void setOnClickListener(View.OnClickListener l) {
button.setOnClickListener(l);
}
public void addPushedView(View view) {
pushedViews.add(view);
}
public void removePushedView(View view) {
pushedViews.remove(view);
}
public void setAction(String action) {
if (action != null) {
button.setText(action);
button.setVisibility(View.VISIBLE);
content.setPadding(content.getPaddingLeft(), 0, (int) context.getResources().getDimension(R.dimen.carbon_paddingHalf), 0);
} else {
content.setPadding(content.getPaddingLeft(), 0, content.getPaddingLeft(), 0);
button.setVisibility(View.GONE);
}
}
public String getAction() {
return button.getText().toString();
}
public void setMessage(String message) {
this.message.setText(message);
}
public String getMessage() {
return message.getText().toString();
}
public Style getStyle() {
return style;
}
public void setStyle(Style style) {
this.style = style;
if (style == Style.Auto)
this.style = getResources().getBoolean(R.bool.carbon_isPhone) ? Style.Docked : Style.Floating;
FrameLayout.LayoutParams layoutParams = generateDefaultLayoutParams();
if (style == Style.Floating) {
layoutParams.width = ViewGroup.LayoutParams.WRAP_CONTENT;
layoutParams.height = ViewGroup.LayoutParams.WRAP_CONTENT;
int margin = (int) context.getResources().getDimension(R.dimen.carbon_margin);
layoutParams.setMargins(margin, 0, margin, margin);
layoutParams.gravity = Gravity.START | Gravity.BOTTOM;
content.setCornerRadius((int) context.getResources().getDimension(R.dimen.carbon_cornerRadiusButton));
} else {
layoutParams.width = ViewGroup.LayoutParams.MATCH_PARENT;
layoutParams.height = ViewGroup.LayoutParams.WRAP_CONTENT;
layoutParams.setMargins(0, 0, 0, 0);
layoutParams.gravity = Gravity.BOTTOM;
content.setCornerRadius(0);
}
content.setLayoutParams(layoutParams);
}
public long getDuration() {
return duration;
}
public void setDuration(long duration) {
this.duration = duration;
}
public boolean isSwipeToDismissEnabled() {
return swipeToDismiss;
}
public void setSwipeToDismissEnabled(boolean swipeToDismiss) {
this.swipeToDismiss = swipeToDismiss;
setOnDispatchTouchListener((v, event) -> {
content.getHitRect(rect);
if (rect.contains((int) event.getX(), (int) event.getY())) {
return gestureDetector.onTouchEvent(event);
} else if (isTapOutsideToDismissEnabled()) {
dismiss();
}
return false;
});
content.setOnTouchListener((v, event) -> {
if (isSwipeToDismissEnabled()) {
if (event.getAction() == MotionEvent.ACTION_DOWN) {
swipe = 0;
handler.removeCallbacks(hideRunnable);
if (animator != null) {
animator.cancel();
animator = null;
swipe = ViewHelper.getTranslationX(content);
}
return true;
} else if ((event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL) && animator == null) {
animator = ObjectAnimator.ofFloat(swipe, 0);
animator.setDuration(200);
animator.addUpdateListener(animation -> {
float s = (Float) animation.getAnimatedValue();
ViewHelper.setTranslationX(content, s);
ViewHelper.setAlpha(content, Math.max(0, 1 - 2 * Math.abs(s) / content.getWidth()));
postInvalidate();
});
animator.start();
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
animator.cancel();
animator = null;
if (duration != INFINITE)
handler.postDelayed(hideRunnable, duration);
}
});
return true;
}
}
return false;
});
}
public boolean isTapOutsideToDismissEnabled() {
return tapOutsideToDismiss;
}
public void setTapOutsideToDismissEnabled(boolean tapOutsideToDismiss) {
this.tapOutsideToDismiss = tapOutsideToDismiss;
}
@Deprecated
public void setOnDismissedListener(final OnDismissedListener onDismissedListener) {
this.onDismissListener = new OnDismissListener() {
@Override
public void onDismiss() {
if (onDismissedListener != null)
onDismissedListener.onDismissed();
}
};
}
public void setOnDismissListener(OnDismissListener onDismissListener) {
this.onDismissListener = onDismissListener;
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2014 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.view.table;
import java.awt.Component;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.swing.JLabel;
import javax.swing.JPopupMenu;
import javax.swing.ListSelectionModel;
import javax.swing.SortOrder;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.TableModel;
import org.apache.log4j.Logger;
import org.jdesktop.swingx.JXTable;
import org.jdesktop.swingx.renderer.DefaultTableRenderer;
import org.jdesktop.swingx.renderer.IconValues;
import org.jdesktop.swingx.renderer.MappedValue;
import org.jdesktop.swingx.renderer.StringValues;
import org.jdesktop.swingx.table.ColumnFactory;
import org.jdesktop.swingx.table.TableColumnExt;
import org.parosproxy.paros.db.DatabaseException;
import org.parosproxy.paros.model.HistoryReference;
import org.parosproxy.paros.network.HttpMalformedHeaderException;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.view.View;
import org.zaproxy.zap.utils.PagingTableModel;
import org.zaproxy.zap.view.ZapTable;
import org.zaproxy.zap.view.messagecontainer.http.DefaultSelectableHistoryReferencesContainer;
import org.zaproxy.zap.view.messagecontainer.http.SelectableHistoryReferencesContainer;
import org.zaproxy.zap.view.renderer.DateFormatStringValue;
import org.zaproxy.zap.view.renderer.SizeBytesStringValue;
import org.zaproxy.zap.view.renderer.TimeDurationStringValue;
import org.zaproxy.zap.view.table.HistoryReferencesTableModel.Column;
import org.zaproxy.zap.view.table.decorator.AlertRiskTableCellItemIconHighlighter;
import org.zaproxy.zap.view.table.decorator.NoteTableCellItemIconHighlighter;
/**
* A table specialised in showing data from {@code HistoryReference}s obtained from {@code HistoryReferencesTableModel}s.
*/
public class HistoryReferencesTable extends ZapTable {
private static final long serialVersionUID = -6988769961088738602L;
private static final Logger LOGGER = Logger.getLogger(HistoryReferencesTable.class);
private static final HistoryReferencesTableColumnFactory DEFAULT_COLUMN_FACTORY = new HistoryReferencesTableColumnFactory();
private static final int MAXIMUM_ROWS_FOR_TABLE_CONFIG = 75;
private int maximumRowsForTableConfig;
public HistoryReferencesTable() {
this(new DefaultHistoryReferencesTableModel());
}
public HistoryReferencesTable(final Column[] columns) {
this(new DefaultHistoryReferencesTableModel(columns));
}
public HistoryReferencesTable(HistoryReferencesTableModel<?> model) {
this(model, true);
}
public HistoryReferencesTable(HistoryReferencesTableModel<?> model, boolean useDefaultSelectionListener) {
super(model);
maximumRowsForTableConfig = MAXIMUM_ROWS_FOR_TABLE_CONFIG;
setName("GenericHistoryReferenceTable");
installColumnFactory();
setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
setSortOrderCycle(SortOrder.ASCENDING, SortOrder.DESCENDING, SortOrder.UNSORTED);
setColumnSelectionAllowed(false);
setCellSelectionEnabled(false);
setRowSelectionAllowed(true);
if (useDefaultSelectionListener) {
getSelectionModel().addListSelectionListener(new DisplayMessageOnSelectionValueChange());
}
setComponentPopupMenu(new CustomPopupMenu());
}
@Override
protected void createDefaultRenderers() {
super.createDefaultRenderers();
setDefaultRenderer(Date.class, new DefaultTableRenderer(new DateFormatStringValue()));
}
protected void installColumnFactory() {
setColumnFactory(DEFAULT_COLUMN_FACTORY);
createDefaultColumnsFromModel();
initializeColumnWidths();
}
protected void displayMessage(final HttpMessage msg) {
if (msg == null) {
return;
}
if (msg.getRequestHeader().isEmpty()) {
View.getSingleton().getRequestPanel().clearView(true);
} else {
View.getSingleton().getRequestPanel().setMessage(msg);
}
if (msg.getResponseHeader().isEmpty()) {
View.getSingleton().getResponsePanel().clearView(false);
} else {
View.getSingleton().getResponsePanel().setMessage(msg, true);
}
}
public HistoryReference getSelectedHistoryReference() {
final int selectedRow = getSelectedRow();
if (selectedRow != -1) {
return getHistoryReferenceAtViewRow(selectedRow);
}
return null;
}
public List<HistoryReference> getSelectedHistoryReferences() {
final int[] rows = this.getSelectedRows();
if (rows.length == 0) {
return Collections.emptyList();
}
final List<HistoryReference> hrefList = new ArrayList<>(rows.length);
for (int row : rows) {
HistoryReference hRef = getHistoryReferenceAtViewRow(row);
if (hRef != null) {
hrefList.add(hRef);
}
}
return hrefList;
}
protected HistoryReference getHistoryReferenceAtViewRow(final int row) {
HistoryReferencesTableEntry entry = getModel().getEntry(convertRowIndexToModel(row));
if (entry != null) {
return entry.getHistoryReference();
}
return null;
}
/**
* {@inheritDoc}
* <p>
* Overridden to only accept models of type {@code HistoryReferencesTableModel}. If the given model extends from
* {@code PagingTableModel} the maximum page size will be used to set the maximum rows for table configuration.
*
* @throws IllegalArgumentException if the {@code dataModel} is not a {@code HistoryReferencesTableModel}.
* @see PagingTableModel
* @see PagingTableModel#getMaxPageSize()
* @see #setMaximumRowsForTableConfiguration(int)
*/
@Override
public void setModel(final TableModel dataModel) {
if (!(dataModel instanceof HistoryReferencesTableModel)) {
throw new IllegalArgumentException("Parameter dataModel must be a subclass of HistoryReferencesTableModel.");
}
if (dataModel instanceof PagingTableModel) {
setMaximumRowsForTableConfiguration(((PagingTableModel<?>) dataModel).getMaxPageSize());
}
super.setModel(dataModel);
}
@Override
public HistoryReferencesTableModel<?> getModel() {
return (HistoryReferencesTableModel<?>) super.getModel();
}
/**
* Sets the maximum rows that should be taken into account when configuring the table (for example, packing the columns).
*
* @param maximumRows the maximum rows that should be taken into account when configuring the table
* @see #packAll()
*/
public void setMaximumRowsForTableConfiguration(int maximumRows) {
this.maximumRowsForTableConfig = maximumRows;
}
/**
* Returns the maximum rows that will be taken into account when configuring the table (for example, packing the columns).
*
* @return the maximum rows that will be taken into account when configuring the table
*/
public int getMaximumRowsForTableConfiguration() {
return maximumRowsForTableConfig;
}
public void selectHistoryReference(final int historyReferenceId) {
final int modelRowIndex = getModel().getEntryRowIndex(historyReferenceId);
if (modelRowIndex > -1) {
final int viewRowIndex = convertRowIndexToView(modelRowIndex);
this.getSelectionModel().setSelectionInterval(viewRowIndex, viewRowIndex);
this.scrollRowToVisible(viewRowIndex);
}
}
protected class DisplayMessageOnSelectionValueChange implements ListSelectionListener {
@Override
public void valueChanged(final ListSelectionEvent evt) {
if (!evt.getValueIsAdjusting()) {
HistoryReference hRef = getSelectedHistoryReference();
if (hRef == null) {
return;
}
try {
displayMessage(hRef.getHttpMessage());
} catch (HttpMalformedHeaderException | DatabaseException e) {
LOGGER.error(e.getMessage(), e);
}
}
}
}
protected class CustomPopupMenu extends JPopupMenu {
private static final long serialVersionUID = 1L;
@Override
public void show(Component invoker, int x, int y) {
SelectableHistoryReferencesContainer messageContainer = new DefaultSelectableHistoryReferencesContainer(
HistoryReferencesTable.this.getName(),
HistoryReferencesTable.this,
Collections.<HistoryReference> emptyList(),
getSelectedHistoryReferences());
View.getSingleton().getPopupMenu().show(messageContainer, x, y);
}
/**
* Returns the selected history references.
* <p>
* Defaults to call {@code HistoryReferencesTable#getSelectedHistoryReferences()}
* </p>
*
* @return the selected history references.
* @see HistoryReferencesTable#getSelectedHistoryReferences()
*/
protected List<HistoryReference> getSelectedHistoryReferences() {
return HistoryReferencesTable.this.getSelectedHistoryReferences();
}
}
protected static class HistoryReferencesTableColumnFactory extends ColumnFactory {
public HistoryReferencesTableColumnFactory() {
}
@Override
protected int getRowCount(final JXTable table) {
final int rowCount = super.getRowCount(table);
final int maxRowCount = ((HistoryReferencesTable) table).getMaximumRowsForTableConfiguration();
if (maxRowCount > 0 && rowCount > maxRowCount) {
return maxRowCount;
}
return rowCount;
}
@Override
public void configureTableColumn(final TableModel model, final TableColumnExt columnExt) {
super.configureTableColumn(model, columnExt);
HistoryReferencesTableModel<?> hRefModel = (HistoryReferencesTableModel<?>) model;
columnExt.setPrototypeValue(hRefModel.getPrototypeValue(columnExt.getModelIndex()));
final int highestAlertColumnIndex = hRefModel.getColumnIndex(Column.HIGHEST_ALERT);
if (highestAlertColumnIndex != -1) {
if (columnExt.getModelIndex() == highestAlertColumnIndex
&& model.getColumnClass(highestAlertColumnIndex) == AlertRiskTableCellItem.class) {
columnExt.setHighlighters(new AlertRiskTableCellItemIconHighlighter(highestAlertColumnIndex));
}
}
final int rttColumnIndex = hRefModel.getColumnIndex(Column.RTT);
if (rttColumnIndex != -1) {
if (columnExt.getModelIndex() == rttColumnIndex
&& TimeDurationStringValue.isTargetClass(model.getColumnClass(rttColumnIndex))) {
columnExt.setCellRenderer(new DefaultTableRenderer(new TimeDurationStringValue()));
}
}
final int noteColumnIndex = hRefModel.getColumnIndex(Column.NOTE);
if (noteColumnIndex != -1) {
if (columnExt.getModelIndex() == noteColumnIndex && model.getColumnClass(noteColumnIndex) == Boolean.class) {
columnExt.setCellRenderer(new DefaultTableRenderer(
new MappedValue(StringValues.EMPTY, IconValues.NONE),
JLabel.CENTER));
columnExt.setHighlighters(new NoteTableCellItemIconHighlighter(noteColumnIndex));
}
}
installSizeBytesRenderer(columnExt, hRefModel.getColumnIndex(Column.SIZE_MESSAGE), model);
installSizeBytesRenderer(columnExt, hRefModel.getColumnIndex(Column.SIZE_REQUEST_HEADER), model);
installSizeBytesRenderer(columnExt, hRefModel.getColumnIndex(Column.SIZE_REQUEST_BODY), model);
installSizeBytesRenderer(columnExt, hRefModel.getColumnIndex(Column.SIZE_RESPONSE_HEADER), model);
installSizeBytesRenderer(columnExt, hRefModel.getColumnIndex(Column.SIZE_RESPONSE_BODY), model);
}
private void installSizeBytesRenderer(TableColumnExt columnExt, int columnIndex, TableModel model) {
if (columnIndex != -1) {
if (columnExt.getModelIndex() == columnIndex
&& SizeBytesStringValue.isTargetClass(model.getColumnClass(columnIndex))) {
columnExt.setCellRenderer(new DefaultTableRenderer(new SizeBytesStringValue()));
}
}
}
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk;
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.MultiByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
@Category({IOTests.class, MediumTests.class})
public class TestHFileBlockIndex {
@Parameters
public static Collection<Object[]> compressionAlgorithms() {
return HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS_PARAMETERIZED;
}
public TestHFileBlockIndex(Compression.Algorithm compr) {
this.compr = compr;
}
private static final Log LOG = LogFactory.getLog(TestHFileBlockIndex.class);
private static final int NUM_DATA_BLOCKS = 1000;
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
private static final int SMALL_BLOCK_SIZE = 4096;
private static final int NUM_KV = 10000;
private static FileSystem fs;
private Path path;
private Random rand;
private long rootIndexOffset;
private int numRootEntries;
private int numLevels;
private static final List<byte[]> keys = new ArrayList<>();
private final Compression.Algorithm compr;
private byte[] firstKeyInFile;
private Configuration conf;
private static final int[] INDEX_CHUNK_SIZES = { 4096, 512, 384 };
private static final int[] EXPECTED_NUM_LEVELS = { 2, 3, 4 };
private static final int[] UNCOMPRESSED_INDEX_SIZES =
{ 19187, 21813, 23086 };
private static final boolean includesMemstoreTS = true;
static {
assert INDEX_CHUNK_SIZES.length == EXPECTED_NUM_LEVELS.length;
assert INDEX_CHUNK_SIZES.length == UNCOMPRESSED_INDEX_SIZES.length;
}
@Before
public void setUp() throws IOException {
keys.clear();
rand = new Random(2389757);
firstKeyInFile = null;
conf = TEST_UTIL.getConfiguration();
// This test requires at least HFile format version 2.
conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MAX_FORMAT_VERSION);
fs = HFileSystem.get(conf);
}
@Test
public void testBlockIndex() throws IOException {
testBlockIndexInternals(false);
clear();
testBlockIndexInternals(true);
}
private void clear() throws IOException {
keys.clear();
rand = new Random(2389757);
firstKeyInFile = null;
conf = TEST_UTIL.getConfiguration();
// This test requires at least HFile format version 2.
conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
fs = HFileSystem.get(conf);
}
private void testBlockIndexInternals(boolean useTags) throws IOException {
path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags);
writeWholeIndex(useTags);
readIndex(useTags);
}
/**
* A wrapper around a block reader which only caches the results of the last
* operation. Not thread-safe.
*/
private static class BlockReaderWrapper implements HFile.CachingBlockReader {
private HFileBlock.FSReader realReader;
private long prevOffset;
private long prevOnDiskSize;
private boolean prevPread;
private HFileBlock prevBlock;
public int hitCount = 0;
public int missCount = 0;
public BlockReaderWrapper(HFileBlock.FSReader realReader) {
this.realReader = realReader;
}
@Override
public void returnBlock(HFileBlock block) {
}
@Override
public HFileBlock readBlock(long offset, long onDiskSize,
boolean cacheBlock, boolean pread, boolean isCompaction,
boolean updateCacheMetrics, BlockType expectedBlockType,
DataBlockEncoding expectedDataBlockEncoding)
throws IOException {
if (offset == prevOffset && onDiskSize == prevOnDiskSize &&
pread == prevPread) {
hitCount += 1;
return prevBlock;
}
missCount += 1;
prevBlock = realReader.readBlockData(offset, onDiskSize, pread, false);
prevOffset = offset;
prevOnDiskSize = onDiskSize;
prevPread = pread;
return prevBlock;
}
}
private void readIndex(boolean useTags) throws IOException {
long fileSize = fs.getFileStatus(path).getLen();
LOG.info("Size of " + path + ": " + fileSize);
FSDataInputStream istream = fs.open(path);
HFileContext meta = new HFileContextBuilder()
.withHBaseCheckSum(true)
.withIncludesMvcc(includesMemstoreTS)
.withIncludesTags(useTags)
.withCompression(compr)
.build();
HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(istream, fs.getFileStatus(path)
.getLen(), meta);
BlockReaderWrapper brw = new BlockReaderWrapper(blockReader);
HFileBlockIndex.BlockIndexReader indexReader =
new HFileBlockIndex.CellBasedKeyBlockIndexReader(
CellComparator.COMPARATOR, numLevels, brw);
indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset,
fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries);
long prevOffset = -1;
int i = 0;
int expectedHitCount = 0;
int expectedMissCount = 0;
LOG.info("Total number of keys: " + keys.size());
for (byte[] key : keys) {
assertTrue(key != null);
assertTrue(indexReader != null);
KeyValue.KeyOnlyKeyValue keyOnlyKey = new KeyValue.KeyOnlyKeyValue(key, 0, key.length);
HFileBlock b =
indexReader.seekToDataBlock(keyOnlyKey, null, true,
true, false, null);
if (CellComparator.COMPARATOR.compare(keyOnlyKey, firstKeyInFile,
0, firstKeyInFile.length) < 0) {
assertTrue(b == null);
++i;
continue;
}
String keyStr = "key #" + i + ", " + Bytes.toStringBinary(key);
assertTrue("seekToDataBlock failed for " + keyStr, b != null);
if (prevOffset == b.getOffset()) {
assertEquals(++expectedHitCount, brw.hitCount);
} else {
LOG.info("First key in a new block: " + keyStr + ", block offset: "
+ b.getOffset() + ")");
assertTrue(b.getOffset() > prevOffset);
assertEquals(++expectedMissCount, brw.missCount);
prevOffset = b.getOffset();
}
++i;
}
istream.close();
}
private void writeWholeIndex(boolean useTags) throws IOException {
assertEquals(0, keys.size());
HFileContext meta = new HFileContextBuilder()
.withHBaseCheckSum(true)
.withIncludesMvcc(includesMemstoreTS)
.withIncludesTags(useTags)
.withCompression(compr)
.withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
.build();
HFileBlock.Writer hbw = new HFileBlock.Writer(null,
meta);
FSDataOutputStream outputStream = fs.create(path);
HFileBlockIndex.BlockIndexWriter biw =
new HFileBlockIndex.BlockIndexWriter(hbw, null, null);
for (int i = 0; i < NUM_DATA_BLOCKS; ++i) {
hbw.startWriting(BlockType.DATA).write(String.valueOf(rand.nextInt(1000)).getBytes());
long blockOffset = outputStream.getPos();
hbw.writeHeaderAndData(outputStream);
byte[] firstKey = null;
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
for (int j = 0; j < 16; ++j) {
byte[] k =
new KeyValue(RandomKeyValueUtil.randomOrderedKey(rand, i * 16 + j), family, qualifier,
EnvironmentEdgeManager.currentTime(), KeyValue.Type.Put).getKey();
keys.add(k);
if (j == 8) {
firstKey = k;
}
}
assertTrue(firstKey != null);
if (firstKeyInFile == null) {
firstKeyInFile = firstKey;
}
biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader());
writeInlineBlocks(hbw, outputStream, biw, false);
}
writeInlineBlocks(hbw, outputStream, biw, true);
rootIndexOffset = biw.writeIndexBlocks(outputStream);
outputStream.close();
numLevels = biw.getNumLevels();
numRootEntries = biw.getNumRootEntries();
LOG.info("Index written: numLevels=" + numLevels + ", numRootEntries=" +
numRootEntries + ", rootIndexOffset=" + rootIndexOffset);
}
private void writeInlineBlocks(HFileBlock.Writer hbw,
FSDataOutputStream outputStream, HFileBlockIndex.BlockIndexWriter biw,
boolean isClosing) throws IOException {
while (biw.shouldWriteBlock(isClosing)) {
long offset = outputStream.getPos();
biw.writeInlineBlock(hbw.startWriting(biw.getInlineBlockType()));
hbw.writeHeaderAndData(outputStream);
biw.blockWritten(offset, hbw.getOnDiskSizeWithHeader(),
hbw.getUncompressedSizeWithoutHeader());
LOG.info("Wrote an inline index block at " + offset + ", size " +
hbw.getOnDiskSizeWithHeader());
}
}
private static final long getDummyFileOffset(int i) {
return i * 185 + 379;
}
private static final int getDummyOnDiskSize(int i) {
return i * i * 37 + i * 19 + 13;
}
@Test
public void testSecondaryIndexBinarySearch() throws IOException {
int numTotalKeys = 99;
assertTrue(numTotalKeys % 2 == 1); // Ensure no one made this even.
// We only add odd-index keys into the array that we will binary-search.
int numSearchedKeys = (numTotalKeys - 1) / 2;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
dos.writeInt(numSearchedKeys);
int curAllEntriesSize = 0;
int numEntriesAdded = 0;
// Only odd-index elements of this array are used to keep the secondary
// index entries of the corresponding keys.
int secondaryIndexEntries[] = new int[numTotalKeys];
for (int i = 0; i < numTotalKeys; ++i) {
byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i * 2);
KeyValue cell = new KeyValue(k, Bytes.toBytes("f"), Bytes.toBytes("q"),
Bytes.toBytes("val"));
//KeyValue cell = new KeyValue.KeyOnlyKeyValue(k, 0, k.length);
keys.add(cell.getKey());
String msgPrefix = "Key #" + i + " (" + Bytes.toStringBinary(k) + "): ";
StringBuilder padding = new StringBuilder();
while (msgPrefix.length() + padding.length() < 70)
padding.append(' ');
msgPrefix += padding;
if (i % 2 == 1) {
dos.writeInt(curAllEntriesSize);
secondaryIndexEntries[i] = curAllEntriesSize;
LOG.info(msgPrefix + "secondary index entry #" + ((i - 1) / 2) +
", offset " + curAllEntriesSize);
curAllEntriesSize += cell.getKey().length
+ HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;
++numEntriesAdded;
} else {
secondaryIndexEntries[i] = -1;
LOG.info(msgPrefix + "not in the searched array");
}
}
// Make sure the keys are increasing.
for (int i = 0; i < keys.size() - 1; ++i)
assertTrue(CellComparator.COMPARATOR.compare(
new KeyValue.KeyOnlyKeyValue(keys.get(i), 0, keys.get(i).length),
new KeyValue.KeyOnlyKeyValue(keys.get(i + 1), 0, keys.get(i + 1).length)) < 0);
dos.writeInt(curAllEntriesSize);
assertEquals(numSearchedKeys, numEntriesAdded);
int secondaryIndexOffset = dos.size();
assertEquals(Bytes.SIZEOF_INT * (numSearchedKeys + 2),
secondaryIndexOffset);
for (int i = 1; i <= numTotalKeys - 1; i += 2) {
assertEquals(dos.size(),
secondaryIndexOffset + secondaryIndexEntries[i]);
long dummyFileOffset = getDummyFileOffset(i);
int dummyOnDiskSize = getDummyOnDiskSize(i);
LOG.debug("Storing file offset=" + dummyFileOffset + " and onDiskSize=" +
dummyOnDiskSize + " at offset " + dos.size());
dos.writeLong(dummyFileOffset);
dos.writeInt(dummyOnDiskSize);
LOG.debug("Stored key " + ((i - 1) / 2) +" at offset " + dos.size());
dos.write(keys.get(i));
}
dos.writeInt(curAllEntriesSize);
ByteBuffer nonRootIndex = ByteBuffer.wrap(baos.toByteArray());
for (int i = 0; i < numTotalKeys; ++i) {
byte[] searchKey = keys.get(i);
byte[] arrayHoldingKey = new byte[searchKey.length +
searchKey.length / 2];
// To make things a bit more interesting, store the key we are looking
// for at a non-zero offset in a new array.
System.arraycopy(searchKey, 0, arrayHoldingKey, searchKey.length / 2,
searchKey.length);
KeyValue.KeyOnlyKeyValue cell = new KeyValue.KeyOnlyKeyValue(
arrayHoldingKey, searchKey.length / 2, searchKey.length);
int searchResult = BlockIndexReader.binarySearchNonRootIndex(cell,
new MultiByteBuff(nonRootIndex), CellComparator.COMPARATOR);
String lookupFailureMsg = "Failed to look up key #" + i + " ("
+ Bytes.toStringBinary(searchKey) + ")";
int expectedResult;
int referenceItem;
if (i % 2 == 1) {
// This key is in the array we search as the element (i - 1) / 2. Make
// sure we find it.
expectedResult = (i - 1) / 2;
referenceItem = i;
} else {
// This key is not in the array but between two elements on the array,
// in the beginning, or in the end. The result should be the previous
// key in the searched array, or -1 for i = 0.
expectedResult = i / 2 - 1;
referenceItem = i - 1;
}
assertEquals(lookupFailureMsg, expectedResult, searchResult);
// Now test we can get the offset and the on-disk-size using a
// higher-level API function.s
boolean locateBlockResult =
(BlockIndexReader.locateNonRootIndexEntry(new MultiByteBuff(nonRootIndex), cell,
CellComparator.COMPARATOR) != -1);
if (i == 0) {
assertFalse(locateBlockResult);
} else {
assertTrue(locateBlockResult);
String errorMsg = "i=" + i + ", position=" + nonRootIndex.position();
assertEquals(errorMsg, getDummyFileOffset(referenceItem),
nonRootIndex.getLong());
assertEquals(errorMsg, getDummyOnDiskSize(referenceItem),
nonRootIndex.getInt());
}
}
}
@Test
public void testBlockIndexChunk() throws IOException {
BlockIndexChunk c = new BlockIndexChunk();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int N = 1000;
int[] numSubEntriesAt = new int[N];
int numSubEntries = 0;
for (int i = 0; i < N; ++i) {
baos.reset();
DataOutputStream dos = new DataOutputStream(baos);
c.writeNonRoot(dos);
assertEquals(c.getNonRootSize(), dos.size());
baos.reset();
dos = new DataOutputStream(baos);
c.writeRoot(dos);
assertEquals(c.getRootSize(), dos.size());
byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);
numSubEntries += rand.nextInt(5) + 1;
keys.add(k);
c.add(k, getDummyFileOffset(i), getDummyOnDiskSize(i), numSubEntries);
}
// Test the ability to look up the entry that contains a particular
// deeper-level index block's entry ("sub-entry"), assuming a global
// 0-based ordering of sub-entries. This is needed for mid-key calculation.
for (int i = 0; i < N; ++i) {
for (int j = i == 0 ? 0 : numSubEntriesAt[i - 1];
j < numSubEntriesAt[i];
++j) {
assertEquals(i, c.getEntryBySubEntry(j));
}
}
}
/** Checks if the HeapSize calculator is within reason */
@Test
public void testHeapSizeForBlockIndex() throws IOException {
Class<HFileBlockIndex.BlockIndexReader> cl =
HFileBlockIndex.BlockIndexReader.class;
long expected = ClassSize.estimateBase(cl, false);
HFileBlockIndex.BlockIndexReader bi =
new HFileBlockIndex.ByteArrayKeyBlockIndexReader(1);
long actual = bi.heapSize();
// Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
// int [] blockDataSizes) are all null they are not going to show up in the
// HeapSize calculation, so need to remove those array costs from expected.
// Already the block keys are not there in this case
expected -= ClassSize.align(2 * ClassSize.ARRAY);
if (expected != actual) {
expected = ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
/**
* to check if looks good when midKey on a leaf index block boundary
* @throws IOException
*/
@Test
public void testMidKeyOnLeafIndexBlockBoundary() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"hfile_for_midkey");
int maxChunkSize = 512;
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);
// should open hfile.block.index.cacheonwrite
conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);
CacheConfig cacheConf = new CacheConfig(conf);
BlockCache blockCache = cacheConf.getBlockCache();
// Evict all blocks that were cached-on-write by the previous invocation.
blockCache.evictBlocksByHfileName(hfilePath.getName());
// Write the HFile
{
HFileContext meta = new HFileContextBuilder()
.withBlockSize(SMALL_BLOCK_SIZE)
.withCompression(Algorithm.NONE)
.withDataBlockEncoding(DataBlockEncoding.NONE)
.build();
HFile.Writer writer =
HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, hfilePath)
.withFileContext(meta)
.create();
Random rand = new Random(19231737);
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
int kvNumberToBeWritten = 16;
// the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,
// midkey is just on the boundary of the first leaf-index block
for (int i = 0; i < kvNumberToBeWritten; ++i) {
byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);
// Key will be interpreted by KeyValue.KEY_COMPARATOR
KeyValue kv =
new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));
writer.append(kv);
}
writer.close();
}
// close hfile.block.index.cacheonwrite
conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);
// Read the HFile
HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);
boolean hasArrayIndexOutOfBoundsException = false;
try {
// get the mid-key.
reader.midKey();
} catch (ArrayIndexOutOfBoundsException e) {
hasArrayIndexOutOfBoundsException = true;
} finally {
reader.close();
}
// to check if ArrayIndexOutOfBoundsException occurred
assertFalse(hasArrayIndexOutOfBoundsException);
}
/**
* Testing block index through the HFile writer/reader APIs. Allows to test
* setting index block size through configuration, intermediate-level index
* blocks, and caching index blocks on write.
*
* @throws IOException
*/
@Test
public void testHFileWriterAndReader() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"hfile_for_block_index");
CacheConfig cacheConf = new CacheConfig(conf);
BlockCache blockCache = cacheConf.getBlockCache();
for (int testI = 0; testI < INDEX_CHUNK_SIZES.length; ++testI) {
int indexBlockSize = INDEX_CHUNK_SIZES[testI];
int expectedNumLevels = EXPECTED_NUM_LEVELS[testI];
LOG.info("Index block size: " + indexBlockSize + ", compression: "
+ compr);
// Evict all blocks that were cached-on-write by the previous invocation.
blockCache.evictBlocksByHfileName(hfilePath.getName());
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);
Set<String> keyStrSet = new HashSet<>();
byte[][] keys = new byte[NUM_KV][];
byte[][] values = new byte[NUM_KV][];
// Write the HFile
{
HFileContext meta = new HFileContextBuilder()
.withBlockSize(SMALL_BLOCK_SIZE)
.withCompression(compr)
.build();
HFile.Writer writer =
HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, hfilePath)
.withFileContext(meta)
.create();
Random rand = new Random(19231737);
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
for (int i = 0; i < NUM_KV; ++i) {
byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);
// Key will be interpreted by KeyValue.KEY_COMPARATOR
KeyValue kv =
new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
RandomKeyValueUtil.randomValue(rand));
byte[] k = kv.getKey();
writer.append(kv);
keys[i] = k;
values[i] = CellUtil.cloneValue(kv);
keyStrSet.add(Bytes.toStringBinary(k));
if (i > 0) {
assertTrue((CellComparator.COMPARATOR.compare(kv, keys[i - 1],
0, keys[i - 1].length)) > 0);
}
}
writer.close();
}
// Read the HFile
HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);
assertEquals(expectedNumLevels,
reader.getTrailer().getNumDataIndexLevels());
assertTrue(Bytes.equals(keys[0], ((KeyValue)reader.getFirstKey().get()).getKey()));
assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue)reader.getLastKey().get()).getKey()));
LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1]));
for (boolean pread : new boolean[] { false, true }) {
HFileScanner scanner = reader.getScanner(true, pread);
for (int i = 0; i < NUM_KV; ++i) {
checkSeekTo(keys, scanner, i);
checkKeyValue("i=" + i, keys[i], values[i],
ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());
}
assertTrue(scanner.seekTo());
for (int i = NUM_KV - 1; i >= 0; --i) {
checkSeekTo(keys, scanner, i);
checkKeyValue("i=" + i, keys[i], values[i],
ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());
}
}
// Manually compute the mid-key and validate it.
HFile.Reader reader2 = reader;
HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader();
HFileBlock.BlockIterator iter = fsReader.blockRange(0,
reader.getTrailer().getLoadOnOpenDataOffset());
HFileBlock block;
List<byte[]> blockKeys = new ArrayList<>();
while ((block = iter.nextBlock()) != null) {
if (block.getBlockType() != BlockType.LEAF_INDEX)
return;
ByteBuff b = block.getBufferReadOnly();
int n = b.getIntAfterPosition(0);
// One int for the number of items, and n + 1 for the secondary index.
int entriesOffset = Bytes.SIZEOF_INT * (n + 2);
// Get all the keys from the leaf index block. S
for (int i = 0; i < n; ++i) {
int keyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 1));
int nextKeyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 2));
int keyLen = nextKeyRelOffset - keyRelOffset;
int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset +
HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;
byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset
+ keyLen);
String blockKeyStr = Bytes.toString(blockKey);
blockKeys.add(blockKey);
// If the first key of the block is not among the keys written, we
// are not parsing the non-root index block format correctly.
assertTrue("Invalid block key from leaf-level block: " + blockKeyStr,
keyStrSet.contains(blockKeyStr));
}
}
// Validate the mid-key.
assertEquals(
Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)),
reader.midKey());
assertEquals(UNCOMPRESSED_INDEX_SIZES[testI],
reader.getTrailer().getUncompressedDataIndexSize());
reader.close();
reader2.close();
}
}
private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i)
throws IOException {
assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0,
scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i])));
}
private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr,
ByteBuffer buf) {
assertEquals(msgPrefix + ": expected " + Bytes.toStringBinary(arr)
+ ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0,
arr.length, buf.array(), buf.arrayOffset(), buf.limit()));
}
/** Check a key/value pair after it was read by the reader */
private void checkKeyValue(String msgPrefix, byte[] expectedKey,
byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) {
if (!msgPrefix.isEmpty())
msgPrefix += ". ";
assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead);
assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue,
valueRead);
}
@Test(timeout=10000)
public void testIntermediateLevelIndicesWithLargeKeys() throws IOException {
testIntermediateLevelIndicesWithLargeKeys(16);
}
@Test(timeout=10000)
public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException {
// because of the large rowKeys, we will end up with a 50-level block index without sanity check
testIntermediateLevelIndicesWithLargeKeys(2);
}
public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws IOException {
Path hfPath = new Path(TEST_UTIL.getDataTestDir(),
"testIntermediateLevelIndicesWithLargeKeys.hfile");
int maxChunkSize = 1024;
FileSystem fs = FileSystem.get(conf);
CacheConfig cacheConf = new CacheConfig(conf);
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);
conf.setInt(HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY, minNumEntries);
HFileContext context = new HFileContextBuilder().withBlockSize(16).build();
HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)
.withFileContext(context)
.withPath(fs, hfPath).create();
List<byte[]> keys = new ArrayList<>();
// This should result in leaf-level indices and a root level index
for (int i=0; i < 100; i++) {
byte[] rowkey = new byte[maxChunkSize + 1];
byte[] b = Bytes.toBytes(i);
System.arraycopy(b, 0, rowkey, rowkey.length - b.length, b.length);
keys.add(rowkey);
hfw.append(CellUtil.createCell(rowkey));
}
hfw.close();
HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);
// Scanner doesn't do Cells yet. Fix.
HFileScanner scanner = reader.getScanner(true, true);
for (int i = 0; i < keys.size(); ++i) {
scanner.seekTo(CellUtil.createCell(keys.get(i)));
}
reader.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.testframework.junits;
import java.util.Map;
import javax.cache.Cache;
import javax.cache.configuration.Factory;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteTransactions;
import org.apache.ignite.Ignition;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMemoryMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreAdapter;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.util.lang.GridAbsPredicateX;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.configvariations.CacheStartMode;
import org.apache.ignite.transactions.Transaction;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMemoryMode.OFFHEAP_TIERED;
import static org.apache.ignite.cache.CacheMemoryMode.ONHEAP_TIERED;
/**
* Abstract class for cache configuration variations tests.
*/
public abstract class IgniteCacheConfigVariationsAbstractTest extends IgniteConfigVariationsAbstractTest {
/** */
protected static final int CLIENT_NEAR_ONLY_IDX = 2;
/** Test timeout. */
private static final long TEST_TIMEOUT = 30 * 1000;
/** Store map. */
protected static final Map<Object, Object> map = new ConcurrentHashMap8<>();
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return TEST_TIMEOUT;
}
/** {@inheritDoc} */
@Override protected final void beforeTestsStarted() throws Exception {
assert testsCfg != null;
assert !testsCfg.withClients() || testsCfg.gridCount() >= 3;
assert testsCfg.testedNodeIndex() >= 0 : "testedNodeIdx: " + testedNodeIdx;
testedNodeIdx = testsCfg.testedNodeIndex();
if (testsCfg.isStartCache()) {
final CacheStartMode cacheStartMode = testsCfg.cacheStartMode();
final int cnt = testsCfg.gridCount();
if (cacheStartMode == CacheStartMode.STATIC) {
info("All nodes will be stopped, new " + cnt + " nodes will be started.");
Ignition.stopAll(true);
for (int i = 0; i < cnt; i++) {
String gridName = getTestGridName(i);
IgniteConfiguration cfg = optimize(getConfiguration(gridName));
if (i != CLIENT_NODE_IDX && i != CLIENT_NEAR_ONLY_IDX) {
CacheConfiguration cc = testsCfg.configurationFactory().cacheConfiguration(gridName);
cc.setName(cacheName());
cfg.setCacheConfiguration(cc);
}
startGrid(gridName, cfg, null);
}
if (testsCfg.withClients() && testsCfg.gridCount() > CLIENT_NEAR_ONLY_IDX)
grid(CLIENT_NEAR_ONLY_IDX).createNearCache(cacheName(), new NearCacheConfiguration());
}
else if (cacheStartMode == null || cacheStartMode == CacheStartMode.DYNAMIC) {
super.beforeTestsStarted();
startCachesDinamically();
}
else
throw new IllegalArgumentException("Unknown cache start mode: " + cacheStartMode);
}
if (testsCfg.gridCount() > 1)
checkTopology(testsCfg.gridCount());
awaitPartitionMapExchange();
for (int i = 0; i < gridCount(); i++)
info("Grid " + i + ": " + grid(i).localNode().id());
if (testsCfg.withClients()) {
boolean testedNodeNearEnabled = grid(testedNodeIdx).cachex(cacheName()).context().isNear();
if (testedNodeIdx != SERVER_NODE_IDX)
assertEquals(testedNodeIdx == CLIENT_NEAR_ONLY_IDX, testedNodeNearEnabled);
info(">>> Starting set of tests [testedNodeIdx=" + testedNodeIdx
+ ", id=" + grid(testedNodeIdx).localNode().id()
+ ", isClient=" + grid(testedNodeIdx).configuration().isClientMode()
+ ", nearEnabled=" + testedNodeNearEnabled + "]");
}
}
/**
* Starts caches dinamically.
*/
private void startCachesDinamically() throws Exception {
for (int i = 0; i < gridCount(); i++) {
info("Starting cache dinamically on grid: " + i);
IgniteEx grid = grid(i);
if (i != CLIENT_NODE_IDX && i != CLIENT_NEAR_ONLY_IDX) {
CacheConfiguration cc = testsCfg.configurationFactory().cacheConfiguration(grid.name());
cc.setName(cacheName());
grid.getOrCreateCache(cc);
}
if (testsCfg.withClients() && i == CLIENT_NEAR_ONLY_IDX)
grid(CLIENT_NEAR_ONLY_IDX).createNearCache(cacheName(), new NearCacheConfiguration());
}
awaitPartitionMapExchange();
for (int i = 0; i < gridCount(); i++)
assertNotNull(jcache(i));
for (int i = 0; i < gridCount(); i++)
assertEquals("Cache is not empty [idx=" + i + ", entrySet=" + jcache(i).localEntries() + ']',
0, jcache(i).localSize(CachePeekMode.ALL));
}
/** {@inheritDoc} */
@Override protected boolean expectedClient(String testGridName) {
return getTestGridName(CLIENT_NODE_IDX).equals(testGridName)
|| getTestGridName(CLIENT_NEAR_ONLY_IDX).equals(testGridName);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
if (testsCfg.isStopCache()) {
for (int i = 0; i < gridCount(); i++) {
info("Destroing cache on grid: " + i);
IgniteCache<String, Integer> cache = jcache(i);
assert i != 0 || cache != null;
if (cache != null)
cache.destroy();
}
}
map.clear();
super.afterTestsStopped();
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
awaitPartitionMapExchange();
assert jcache().unwrap(Ignite.class).transactions().tx() == null;
assertEquals(0, jcache().localSize());
assertEquals(0, jcache().size());
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
Transaction tx = jcache().unwrap(Ignite.class).transactions().tx();
if (tx != null) {
tx.close();
fail("Cache transaction remained after test completion: " + tx);
}
String cacheIsNotEmptyMsg = null;
for (int i = 0; i < gridCount(); i++) {
info("Checking grid: " + i);
while (true) {
try {
final int fi = i;
boolean cacheIsEmpty = GridTestUtils.waitForCondition(
// Preloading may happen as nodes leave, so we need to wait.
new GridAbsPredicateX() {
@Override public boolean applyx() throws IgniteCheckedException {
jcache(fi).removeAll();
if (jcache(fi).size(CachePeekMode.ALL) > 0) {
for (Cache.Entry<?, ?> k : jcache(fi).localEntries())
jcache(fi).remove(k.getKey());
}
int locSize = jcache(fi).localSize(CachePeekMode.ALL);
if (locSize != 0) {
info(">>>>> Debug localSize for grid: " + fi + " is " + locSize);
info(">>>>> Debug ONHEAP localSize for grid: " + fi + " is "
+ jcache(fi).localSize(CachePeekMode.ONHEAP));
info(">>>>> Debug OFFHEAP localSize for grid: " + fi + " is "
+ jcache(fi).localSize(CachePeekMode.OFFHEAP));
info(">>>>> Debug PRIMARY localSize for grid: " + fi + " is "
+ jcache(fi).localSize(CachePeekMode.PRIMARY));
info(">>>>> Debug BACKUP localSize for grid: " + fi + " is "
+ jcache(fi).localSize(CachePeekMode.BACKUP));
info(">>>>> Debug NEAR localSize for grid: " + fi + " is "
+ jcache(fi).localSize(CachePeekMode.NEAR));
info(">>>>> Debug SWAP localSize for grid: " + fi + " is "
+ jcache(fi).localSize(CachePeekMode.SWAP));
}
return locSize == 0;
}
}, 10_000);
if (cacheIsEmpty)
assertTrue("Cache is not empty: " + " localSize = " + jcache(fi).localSize(CachePeekMode.ALL)
+ ", local entries " + entrySet(jcache(fi).localEntries()), cacheIsEmpty);
int primaryKeySize = jcache(i).localSize(CachePeekMode.PRIMARY);
int keySize = jcache(i).localSize();
int size = jcache(i).localSize();
int globalSize = jcache(i).size();
int globalPrimarySize = jcache(i).size(CachePeekMode.PRIMARY);
info("Size after [idx=" + i +
", size=" + size +
", keySize=" + keySize +
", primarySize=" + primaryKeySize +
", globalSize=" + globalSize +
", globalPrimarySize=" + globalPrimarySize +
", entrySet=" + jcache(i).localEntries() + ']');
if (!cacheIsEmpty) {
cacheIsNotEmptyMsg = "Cache is not empty: localSize = "
+ jcache(fi).localSize(CachePeekMode.ALL) + ", local entries "
+ entrySet(jcache(fi).localEntries());
break;
}
assertEquals("Cache is not empty [idx=" + i + ", entrySet=" + jcache(i).localEntries() + ']',
0, jcache(i).localSize(CachePeekMode.ALL));
break;
}
catch (Exception e) {
if (X.hasCause(e, ClusterTopologyCheckedException.class)) {
info("Got topology exception while tear down (will retry in 1000ms).");
U.sleep(1000);
}
else
throw e;
}
}
if (cacheIsNotEmptyMsg != null)
break;
for (Cache.Entry entry : jcache(i).localEntries(CachePeekMode.SWAP))
jcache(i).remove(entry.getKey());
}
assert jcache().unwrap(Ignite.class).transactions().tx() == null;
if (cacheIsNotEmptyMsg == null)
assertEquals("Cache is not empty", 0, jcache().localSize(CachePeekMode.ALL));
resetStore();
// Restore cache if current cache has garbage.
if (cacheIsNotEmptyMsg != null) {
for (int i = 0; i < gridCount(); i++) {
info("Destroing cache on grid: " + i);
IgniteCache<String, Integer> cache = jcache(i);
assert i != 0 || cache != null;
if (cache != null)
cache.destroy();
}
assertTrue(GridTestUtils.waitForCondition(new GridAbsPredicateX() {
@Override public boolean applyx() {
for (int i = 0; i < gridCount(); i++) {
if (jcache(i) != null)
return false;
}
return true;
}
}, 10_000));
startCachesDinamically();
log.warning(cacheIsNotEmptyMsg);
throw new IllegalStateException(cacheIsNotEmptyMsg);
}
assertEquals(0, jcache().localSize());
assertEquals(0, jcache().size());
}
/**
* Cleans up cache store.
*/
protected void resetStore() {
map.clear();
}
/**
* Put entry to cache store.
*
* @param key Key.
* @param val Value.
*/
protected void putToStore(Object key, Object val) {
if (!storeEnabled())
throw new IllegalStateException("Failed to put to store because store is disabled.");
map.put(key, val);
}
/**
* @return Default cache mode.
*/
protected CacheMode cacheMode() {
CacheMode mode = cacheConfiguration().getCacheMode();
return mode == null ? CacheConfiguration.DFLT_CACHE_MODE : mode;
}
/**
* @return Load previous value flag.
*/
protected boolean isLoadPreviousValue() {
return cacheConfiguration().isLoadPreviousValue();
}
/**
* @return Cache atomicity mode.
*/
protected CacheAtomicityMode atomicityMode() {
return cacheConfiguration().getAtomicityMode();
}
/**
* @return {@code True} if values should be stored off-heap.
*/
protected CacheMemoryMode memoryMode() {
return cacheConfiguration().getMemoryMode();
}
/**
* @return {@code True} if swap should happend after localEvict() call.
*/
protected boolean swapAfterLocalEvict() {
if (memoryMode() == OFFHEAP_TIERED)
return false;
return memoryMode() == ONHEAP_TIERED ? (!offheapEnabled() && swapEnabled()) : swapEnabled();
}
/**
* @return {@code True} if store is enabled.
*/
protected boolean storeEnabled() {
return cacheConfiguration().getCacheStoreFactory() != null;
}
/**
* @return {@code True} if offheap memory is enabled.
*/
protected boolean offheapEnabled() {
return cacheConfiguration().getOffHeapMaxMemory() >= 0;
}
/**
* @return {@code True} if swap is enabled.
*/
protected boolean swapEnabled() {
return cacheConfiguration().isSwapEnabled();
}
/**
* @return Write through storage emulator.
*/
public static CacheStore<?, ?> cacheStore() {
return new CacheStoreAdapter<Object, Object>() {
@Override public void loadCache(IgniteBiInClosure<Object, Object> clo,
Object... args) {
for (Map.Entry<Object, Object> e : map.entrySet())
clo.apply(e.getKey(), e.getValue());
}
@Override public Object load(Object key) {
return map.get(key);
}
@Override public void write(Cache.Entry<? extends Object, ? extends Object> e) {
map.put(e.getKey(), e.getValue());
}
@Override public void delete(Object key) {
map.remove(key);
}
};
}
/**
* @return {@code true} if near cache should be enabled.
*/
protected boolean nearEnabled() {
return grid(testedNodeIdx).cachex(cacheName()).context().isNear();
}
/**
* @return {@code True} if transactions are enabled.
* @see #txShouldBeUsed()
*/
protected boolean txEnabled() {
return atomicityMode() == TRANSACTIONAL;
}
/**
* @return Cache configuration.
*/
protected CacheConfiguration cacheConfiguration() {
return testsCfg.configurationFactory().cacheConfiguration(getTestGridName(testedNodeIdx));
}
/**
* @return {@code True} if transactions should be used.
*/
protected boolean txShouldBeUsed() {
return txEnabled() && !isMultiJvm();
}
/**
* @return {@code True} if locking is enabled.
*/
protected boolean lockingEnabled() {
return txEnabled();
}
/**
* @return Default cache instance.
*/
@SuppressWarnings({"unchecked"})
@Override protected <K, V> IgniteCache<K, V> jcache() {
return jcache(testedNodeIdx);
}
/**
* @return A not near-only cache.
*/
protected IgniteCache<String, Integer> serverNodeCache() {
return jcache(SERVER_NODE_IDX);
}
/**
* @return Cache name.
*/
protected String cacheName() {
return "testcache-" + testsCfg.description().hashCode();
}
/**
* @return Transactions instance.
*/
protected IgniteTransactions transactions() {
return grid(0).transactions();
}
/**
* @param idx Index of grid.
* @return Default cache.
*/
@SuppressWarnings({"unchecked"})
@Override protected <K, V> IgniteCache<K, V> jcache(int idx) {
return ignite(idx).cache(cacheName());
}
/**
* @param idx Index of grid.
* @return Cache context.
*/
protected GridCacheContext<String, Integer> context(final int idx) {
if (isRemoteJvm(idx) && !isRemoteJvm())
throw new UnsupportedOperationException("Operation can't be done automatically via proxy. " +
"Send task with this logic on remote jvm instead.");
return ((IgniteKernal)grid(idx)).<String, Integer>internalCache(cacheName()).context();
}
/**
* @param cache Cache.
* @return {@code True} if cache has OFFHEAP_TIERED memory mode.
*/
protected static <K, V> boolean offheapTiered(IgniteCache<K, V> cache) {
return cache.getConfiguration(CacheConfiguration.class).getMemoryMode() == OFFHEAP_TIERED;
}
/**
* Executes regular peek or peek from swap.
*
* @param cache Cache projection.
* @param key Key.
* @return Value.
*/
@Nullable protected static <K, V> V peek(IgniteCache<K, V> cache, K key) {
return offheapTiered(cache) ? cache.localPeek(key, CachePeekMode.SWAP, CachePeekMode.OFFHEAP) :
cache.localPeek(key, CachePeekMode.ONHEAP);
}
/**
* @param cache Cache.
* @param key Key.
* @return {@code True} if cache contains given key.
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
protected static boolean containsKey(IgniteCache cache, Object key) throws Exception {
return offheapTiered(cache) ? cache.localPeek(key, CachePeekMode.OFFHEAP) != null : cache.containsKey(key);
}
/**
* Serializable factory.
*/
public static class TestStoreFactory implements Factory<CacheStore> {
@Override public CacheStore create() {
return cacheStore();
}
}
}
| |
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.jasperreports.crosstabs.design;
import java.awt.Color;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.net.URLStreamHandlerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.TimeZone;
import net.sf.jasperreports.crosstabs.CrosstabColumnCell;
import net.sf.jasperreports.crosstabs.CrosstabDeepVisitor;
import net.sf.jasperreports.crosstabs.JRCellContents;
import net.sf.jasperreports.crosstabs.JRCrosstab;
import net.sf.jasperreports.crosstabs.JRCrosstabBucket;
import net.sf.jasperreports.crosstabs.JRCrosstabCell;
import net.sf.jasperreports.crosstabs.JRCrosstabColumnGroup;
import net.sf.jasperreports.crosstabs.JRCrosstabDataset;
import net.sf.jasperreports.crosstabs.JRCrosstabGroup;
import net.sf.jasperreports.crosstabs.JRCrosstabMeasure;
import net.sf.jasperreports.crosstabs.JRCrosstabParameter;
import net.sf.jasperreports.crosstabs.JRCrosstabRowGroup;
import net.sf.jasperreports.crosstabs.base.JRBaseCrosstab;
import net.sf.jasperreports.engine.JRConstants;
import net.sf.jasperreports.engine.JRDefaultStyleProvider;
import net.sf.jasperreports.engine.JRElement;
import net.sf.jasperreports.engine.JRException;
import net.sf.jasperreports.engine.JRExpression;
import net.sf.jasperreports.engine.JRExpressionCollector;
import net.sf.jasperreports.engine.JRLineBox;
import net.sf.jasperreports.engine.JRParameter;
import net.sf.jasperreports.engine.JRVariable;
import net.sf.jasperreports.engine.JRVisitor;
import net.sf.jasperreports.engine.ReportContext;
import net.sf.jasperreports.engine.base.JRBaseLineBox;
import net.sf.jasperreports.engine.design.JRDesignElement;
import net.sf.jasperreports.engine.design.JRDesignVariable;
import net.sf.jasperreports.engine.type.CalculationEnum;
import net.sf.jasperreports.engine.type.HorizontalPosition;
import net.sf.jasperreports.engine.type.ModeEnum;
import net.sf.jasperreports.engine.type.RunDirectionEnum;
import net.sf.jasperreports.engine.util.ElementsVisitorUtils;
import net.sf.jasperreports.engine.util.FileResolver;
import net.sf.jasperreports.engine.util.FormatFactory;
import net.sf.jasperreports.engine.util.JRCloneUtils;
import net.sf.jasperreports.engine.util.JRStyleResolver;
import net.sf.jasperreports.engine.util.Pair;
import org.apache.commons.collections.map.LinkedMap;
/**
* Design-time {@link net.sf.jasperreports.crosstabs.JRCrosstab crosstab} implementation.
*
* @author Lucian Chirita (lucianc@users.sourceforge.net)
* @version $Id: JRDesignCrosstab.java 7199 2014-08-27 13:58:10Z teodord $
*/
public class JRDesignCrosstab extends JRDesignElement implements JRCrosstab
{
private static final long serialVersionUID = JRConstants.SERIAL_VERSION_UID;
public static final String PROPERTY_COLUMN_BREAK_OFFSET = "columnBreakOffset";
public static final String PROPERTY_DATASET = "dataset";
public static final String PROPERTY_TITLE_CELL = "titleCell";
public static final String PROPERTY_HEADER_CELL = "headerCell";
public static final String PROPERTY_PARAMETERS_MAP_EXPRESSION = "parametersMapExpression";
public static final String PROPERTY_REPEAT_COLUMN_HEADERS = "isRepeatColumnHeaders";
public static final String PROPERTY_REPEAT_ROW_HEADERS = "isRepeatRowHeaders";
public static final String PROPERTY_WHEN_NO_DATA_CELL = "whenNoDataCell";
public static final String PROPERTY_CELLS = "cells";
public static final String PROPERTY_ROW_GROUPS = "rowGroups";
public static final String PROPERTY_COLUMN_GROUPS = "columnGroups";
public static final String PROPERTY_MEASURES = "measures";
public static final String PROPERTY_PARAMETERS = "parameters";
public static final String PROPERTY_IGNORE_WIDTH = "ignoreWidth";
protected List<JRCrosstabParameter> parametersList;
protected Map<String, JRCrosstabParameter> parametersMap;
// used to be a org.apache.commons.collections.SequencedHashMap, we're now using LinkedMap
protected Map<String, JRVariable> variablesList;
protected JRExpression parametersMapExpression;
protected JRDesignCrosstabDataset dataset;
protected List<JRCrosstabRowGroup> rowGroups;
protected List<JRCrosstabColumnGroup> columnGroups;
protected List<JRCrosstabMeasure> measures;
protected Map<String, Integer> rowGroupsMap;
protected Map<String, Integer> columnGroupsMap;
protected Map<String, Integer> measuresMap;
protected int columnBreakOffset = DEFAULT_COLUMN_BREAK_OFFSET;
protected boolean repeatColumnHeaders = true;
protected boolean repeatRowHeaders = true;
protected RunDirectionEnum runDirectionValue;
protected HorizontalPosition horizontalPosition;
protected List<JRCrosstabCell> cellsList;
protected Map<Pair<String,String>,JRCrosstabCell> cellsMap;
protected JRDesignCrosstabCell[][] crossCells;
protected JRDesignCellContents whenNoDataCell;
protected DesignCrosstabColumnCell titleCell;
protected JRDesignCellContents headerCell;
protected Boolean ignoreWidth;
protected JRLineBox lineBox;
private class MeasureClassChangeListener implements PropertyChangeListener, Serializable
{
private static final long serialVersionUID = JRConstants.SERIAL_VERSION_UID;
public void propertyChange(PropertyChangeEvent evt)
{
measureClassChanged((JRDesignCrosstabMeasure) evt.getSource(), (String) evt.getNewValue());
}
}
private PropertyChangeListener measureClassChangeListener = new MeasureClassChangeListener();
@SuppressWarnings("deprecation")
private static final Object[] BUILT_IN_PARAMETERS = new Object[] {
JRParameter.REPORT_CONTEXT, ReportContext.class,
JRParameter.REPORT_PARAMETERS_MAP, java.util.Map.class,
JRParameter.REPORT_LOCALE, Locale.class,
JRParameter.REPORT_RESOURCE_BUNDLE, ResourceBundle.class,
JRParameter.REPORT_TIME_ZONE, TimeZone.class,
JRParameter.REPORT_FORMAT_FACTORY, FormatFactory.class,
JRParameter.REPORT_CLASS_LOADER, ClassLoader.class,
JRParameter.REPORT_URL_HANDLER_FACTORY, URLStreamHandlerFactory.class,
JRParameter.REPORT_FILE_RESOLVER, FileResolver.class};
private static final Object[] BUILT_IN_VARIABLES = new Object[] {
JRCrosstab.VARIABLE_ROW_COUNT, Integer.class,
JRCrosstab.VARIABLE_COLUMN_COUNT, Integer.class};
/**
* Creates a new crosstab.
*
* @param defaultStyleProvider default style provider
*/
public JRDesignCrosstab(JRDefaultStyleProvider defaultStyleProvider)
{
super(defaultStyleProvider);
parametersList = new ArrayList<JRCrosstabParameter>();
parametersMap = new HashMap<String, JRCrosstabParameter>();
rowGroupsMap = new HashMap<String, Integer>();
rowGroups = new ArrayList<JRCrosstabRowGroup>();
columnGroupsMap = new HashMap<String, Integer>();
columnGroups = new ArrayList<JRCrosstabColumnGroup>();
measuresMap = new HashMap<String, Integer>();
measures = new ArrayList<JRCrosstabMeasure>();
cellsMap = new HashMap<Pair<String,String>,JRCrosstabCell>();
cellsList = new ArrayList<JRCrosstabCell>();
addBuiltinParameters();
variablesList = new LinkedMap();
addBuiltinVariables();
dataset = new JRDesignCrosstabDataset();
lineBox = new JRBaseLineBox(this);
}
private void addBuiltinParameters()
{
for (int i = 0; i < BUILT_IN_PARAMETERS.length; i++)
{
JRDesignCrosstabParameter parameter = new JRDesignCrosstabParameter();
parameter.setName((String) BUILT_IN_PARAMETERS[i++]);
parameter.setValueClass((Class<?>) BUILT_IN_PARAMETERS[i]);
parameter.setSystemDefined(true);
try
{
addParameter(parameter);
}
catch (JRException e)
{
// never reached
}
}
}
private void addBuiltinVariables()
{
for (int i = 0; i < BUILT_IN_VARIABLES.length; ++i)
{
JRDesignVariable variable = new JRDesignVariable();
variable.setName((String) BUILT_IN_VARIABLES[i]);
variable.setValueClass((Class<?>) BUILT_IN_VARIABLES[++i]);
variable.setCalculation(CalculationEnum.SYSTEM);
variable.setSystemDefined(true);
addVariable(variable);
}
}
/**
* Creates a new crosstab.
*/
public JRDesignCrosstab()
{
this(null);
}
/**
* The ID of the crosstab is only generated at compile time.
*/
public int getId()
{
return 0;
}
public JRCrosstabDataset getDataset()
{
return dataset;
}
/**
* Returns the crosstab dataset object to be used for report designing.
*
* @return the crosstab dataset design object
*/
public JRDesignCrosstabDataset getDesignDataset()
{
return dataset;
}
public JRCrosstabRowGroup[] getRowGroups()
{
JRCrosstabRowGroup[] groups = new JRCrosstabRowGroup[rowGroups.size()];
rowGroups.toArray(groups);
return groups;
}
public JRCrosstabColumnGroup[] getColumnGroups()
{
JRCrosstabColumnGroup[] groups = new JRCrosstabColumnGroup[columnGroups.size()];
columnGroups.toArray(groups);
return groups;
}
public JRCrosstabMeasure[] getMeasures()
{
JRCrosstabMeasure[] measureArray = new JRCrosstabMeasure[measures.size()];
measures.toArray(measureArray);
return measureArray;
}
public void collectExpressions(JRExpressionCollector collector)
{
collector.collect(this);
}
/**
*
*/
public void visit(JRVisitor visitor)
{
visitor.visitCrosstab(this);
if (ElementsVisitorUtils.visitDeepElements(visitor))
{
new CrosstabDeepVisitor(visitor).deepVisitCrosstab(this);
}
}
/**
* Sets the crosstab input dataset.
*
* @param dataset the dataset
* @see JRCrosstab#getDataset()
*/
public void setDataset(JRDesignCrosstabDataset dataset)
{
Object old = this.dataset;
this.dataset = dataset;
getEventSupport().firePropertyChange(PROPERTY_DATASET, old, this.dataset);
}
/**
* Adds a row group.
* <p>
* This group will be a sub group of the last row group, if any.
*
* @param group the group
* @throws JRException
* @see JRCrosstab#getRowGroups()
*/
public void addRowGroup(JRDesignCrosstabRowGroup group) throws JRException
{
String groupName = group.getName();
if (rowGroupsMap.containsKey(groupName) ||
columnGroupsMap.containsKey(groupName) ||
measuresMap.containsKey(groupName))
{
throw new JRException("A group or measure having the same name already exists in the crosstab.");
}
rowGroupsMap.put(groupName, Integer.valueOf(rowGroups.size()));
rowGroups.add(group);
addRowGroupVars(group);
setParent(group);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_ROW_GROUPS, group, rowGroups.size() - 1);
}
/**
* Adds a row group.
* <p>
* This group will be a sub group of the last row group, if any.
*
* @param group the group
* @param index position
* @throws JRException
* @see JRCrosstab#getRowGroups()
*/
public void addRowGroup(int index, JRDesignCrosstabRowGroup group) throws JRException
{
String groupName = group.getName();
if (rowGroupsMap.containsKey(groupName) ||
columnGroupsMap.containsKey(groupName) ||
measuresMap.containsKey(groupName))
{
throw new JRException("A group or measure having the same name already exists in the crosstab.");
}
rowGroupsMap.put(groupName, Integer.valueOf(rowGroups.size()));
rowGroups.add(index, group);
addRowGroupVars(group);
setParent(group);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_ROW_GROUPS, group, index);
}
protected void addRowGroupVars(JRDesignCrosstabRowGroup rowGroup)
{
addVariable(rowGroup.getVariable());
for (Iterator<JRCrosstabMeasure> measureIt = measures.iterator(); measureIt.hasNext();)
{
JRCrosstabMeasure measure = measureIt.next();
addTotalVar(measure, rowGroup, null);
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
addTotalVar(measure, rowGroup, colGroup);
}
}
}
/**
* Adds a column group.
* <p>
* This group will be a sub group of the last column group, if any.
*
* @param group the group
* @throws JRException
* @see JRCrosstab#getColumnGroups()
*/
public void addColumnGroup(JRDesignCrosstabColumnGroup group) throws JRException
{
String groupName = group.getName();
if (rowGroupsMap.containsKey(groupName) ||
columnGroupsMap.containsKey(groupName) ||
measuresMap.containsKey(groupName))
{
throw new JRException("A group or measure having the same name already exists in the crosstab.");
}
columnGroupsMap.put(groupName, Integer.valueOf(columnGroups.size()));
columnGroups.add(group);
addColGroupVars(group);
setParent(group);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_COLUMN_GROUPS, group, columnGroups.size() - 1);
}
/**
* Adds a column group.
* <p>
* This group will be a sub group of the last column group, if any.
*
* @param group the group
* @throws JRException
* @see JRCrosstab#getColumnGroups()
*/
public void addColumnGroup(int index, JRDesignCrosstabColumnGroup group) throws JRException
{
String groupName = group.getName();
if (rowGroupsMap.containsKey(groupName) ||
columnGroupsMap.containsKey(groupName) ||
measuresMap.containsKey(groupName))
{
throw new JRException("A group or measure having the same name already exists in the crosstab.");
}
columnGroupsMap.put(groupName, Integer.valueOf(columnGroups.size()));
columnGroups.add(index, group);
addColGroupVars(group);
setParent(group);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_COLUMN_GROUPS, group, index);
}
protected void addColGroupVars(JRDesignCrosstabColumnGroup colGroup)
{
addVariable(colGroup.getVariable());
for (Iterator<JRCrosstabMeasure> measureIt = measures.iterator(); measureIt.hasNext();)
{
JRCrosstabMeasure measure = measureIt.next();
addTotalVar(measure, null, colGroup);
for (Iterator<JRCrosstabRowGroup> rowIt = rowGroups.iterator(); rowIt.hasNext();)
{
JRCrosstabRowGroup rowGroup = rowIt.next();
addTotalVar(measure, rowGroup, colGroup);
}
}
}
/**
* Adds a measure to the crosstab.
*
* @param measure the measure
* @throws JRException
* @see JRCrosstab#getMeasures()
*/
public void addMeasure(JRDesignCrosstabMeasure measure) throws JRException
{
String measureName = measure.getName();
if (rowGroupsMap.containsKey(measureName) ||
columnGroupsMap.containsKey(measureName) ||
measuresMap.containsKey(measureName))
{
throw new JRException("A group or measure having the same name already exists in the crosstab.");
}
measure.addPropertyChangeListener(JRDesignCrosstabMeasure.PROPERTY_VALUE_CLASS, measureClassChangeListener);
measuresMap.put(measureName, Integer.valueOf(measures.size()));
measures.add(measure);
addMeasureVars(measure);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_MEASURES, measure, measures.size() - 1);
}
/**
* Adds a measure to the crosstab.
*
* @param measure the measure
* @throws JRException
* @see JRCrosstab#getMeasures()
*/
public void addMeasure(int index, JRDesignCrosstabMeasure measure) throws JRException
{
String measureName = measure.getName();
if (rowGroupsMap.containsKey(measureName) ||
columnGroupsMap.containsKey(measureName) ||
measuresMap.containsKey(measureName))
{
throw new JRException("A group or measure having the same name already exists in the crosstab.");
}
measure.addPropertyChangeListener(JRDesignCrosstabMeasure.PROPERTY_VALUE_CLASS, measureClassChangeListener);
measuresMap.put(measureName, Integer.valueOf(measures.size()));
measures.add(index, measure);
addMeasureVars(measure);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_MEASURES, measure, index);
}
protected void addMeasureVars(JRDesignCrosstabMeasure measure)
{
addVariable(measure.getVariable());
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
addTotalVar(measure, null, colGroup);
}
for (Iterator<JRCrosstabRowGroup> rowIt = rowGroups.iterator(); rowIt.hasNext();)
{
JRCrosstabRowGroup rowGroup = rowIt.next();
addTotalVar(measure, rowGroup, null);
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
addTotalVar(measure, rowGroup, colGroup);
}
}
}
protected void addTotalVar(JRCrosstabMeasure measure, JRCrosstabRowGroup rowGroup, JRCrosstabColumnGroup colGroup)
{
JRDesignVariable var = new JRDesignVariable();
var.setCalculation(CalculationEnum.SYSTEM);
var.setSystemDefined(true);
var.setName(getTotalVariableName(measure, rowGroup, colGroup));
var.setValueClassName(measure.getValueClassName());
addVariable(var);
}
protected void removeTotalVar(JRCrosstabMeasure measure, JRCrosstabRowGroup rowGroup, JRCrosstabColumnGroup colGroup)
{
String varName = getTotalVariableName(measure, rowGroup, colGroup);
removeVariable(varName);
}
public static String getTotalVariableName(JRCrosstabMeasure measure, JRCrosstabRowGroup rowGroup, JRCrosstabColumnGroup colGroup)
{
StringBuffer name = new StringBuffer();
name.append(measure.getName());
if (rowGroup != null)
{
name.append('_');
name.append(rowGroup.getName());
}
if (colGroup != null)
{
name.append('_');
name.append(colGroup.getName());
}
name.append("_ALL");
return name.toString();
}
/**
* Removes a row group.
*
* @param groupName the group name
* @return the removed group
*/
public JRCrosstabRowGroup removeRowGroup(String groupName)
{
JRCrosstabRowGroup removed = null;
Integer idx = rowGroupsMap.remove(groupName);
if (idx != null)
{
removed = rowGroups.remove(idx.intValue());
for (ListIterator<JRCrosstabRowGroup> it = rowGroups.listIterator(idx.intValue()); it.hasNext();)
{
JRCrosstabRowGroup group = it.next();
rowGroupsMap.put(group.getName(), Integer.valueOf(it.previousIndex()));
}
for (Iterator<JRCrosstabCell> it = cellsList.iterator(); it.hasNext();)
{
JRCrosstabCell cell = it.next();
String rowTotalGroup = cell.getRowTotalGroup();
if (rowTotalGroup != null && rowTotalGroup.equals(groupName))
{
it.remove();
cellsMap.remove(new Pair<String,String>(rowTotalGroup, cell.getColumnTotalGroup()));
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_CELLS, cell, -1);
}
}
removeRowGroupVars(removed);
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_ROW_GROUPS, removed, idx.intValue());
}
return removed;
}
protected void removeRowGroupVars(JRCrosstabRowGroup rowGroup)
{
removeVariable(rowGroup.getVariable());
for (Iterator<JRCrosstabMeasure> measureIt = measures.iterator(); measureIt.hasNext();)
{
JRCrosstabMeasure measure = measureIt.next();
removeTotalVar(measure, rowGroup, null);
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
removeTotalVar(measure, rowGroup, colGroup);
}
}
}
/**
* Removes a row group.
*
* @param group the group to be removed
* @return the removed group
*/
public JRCrosstabRowGroup removeRowGroup(JRCrosstabRowGroup group)
{
return removeRowGroup(group.getName());
}
/**
* Removes a column group.
*
* @param groupName the group name
* @return the removed group
*/
public JRCrosstabColumnGroup removeColumnGroup(String groupName)
{
JRCrosstabColumnGroup removed = null;
Integer idx = columnGroupsMap.remove(groupName);
if (idx != null)
{
removed = columnGroups.remove(idx.intValue());
for (ListIterator<JRCrosstabColumnGroup> it = columnGroups.listIterator(idx.intValue()); it.hasNext();)
{
JRCrosstabColumnGroup group = it.next();
columnGroupsMap.put(group.getName(), Integer.valueOf(it.previousIndex()));
}
for (Iterator<JRCrosstabCell> it = cellsList.iterator(); it.hasNext();)
{
JRCrosstabCell cell = it.next();
String columnTotalGroup = cell.getColumnTotalGroup();
if (columnTotalGroup != null && columnTotalGroup.equals(groupName))
{
it.remove();
cellsMap.remove(new Pair<String,String>(cell.getRowTotalGroup(), columnTotalGroup));
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_CELLS, cell, -1);
}
}
removeColGroupVars(removed);
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_COLUMN_GROUPS, removed, idx.intValue());
}
return removed;
}
protected void removeColGroupVars(JRCrosstabColumnGroup colGroup)
{
removeVariable(colGroup.getVariable());
for (Iterator<JRCrosstabMeasure> measureIt = measures.iterator(); measureIt.hasNext();)
{
JRCrosstabMeasure measure = measureIt.next();
removeTotalVar(measure, null, colGroup);
for (Iterator<JRCrosstabRowGroup> rowIt = rowGroups.iterator(); rowIt.hasNext();)
{
JRCrosstabRowGroup rowGroup = rowIt.next();
removeTotalVar(measure, rowGroup, colGroup);
}
}
}
/**
* Removes a column group.
*
* @param group the group
* @return the removed group
*/
public JRCrosstabColumnGroup removeColumnGroup(JRCrosstabColumnGroup group)
{
return removeColumnGroup(group.getName());
}
/**
* Removes a measure.
*
* @param measureName the measure name
* @return the removed measure
*/
public JRCrosstabMeasure removeMeasure(String measureName)
{
JRDesignCrosstabMeasure removed = null;
Integer idx = measuresMap.remove(measureName);
if (idx != null)
{
removed = (JRDesignCrosstabMeasure) measures.remove(idx.intValue());
for (ListIterator<JRCrosstabMeasure> it = measures.listIterator(idx.intValue()); it.hasNext();)
{
JRCrosstabMeasure group = it.next();
measuresMap.put(group.getName(), Integer.valueOf(it.previousIndex()));
}
removeMeasureVars(removed);
removed.removePropertyChangeListener(JRDesignCrosstabMeasure.PROPERTY_VALUE_CLASS, measureClassChangeListener);
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_MEASURES, removed, idx.intValue());
}
return removed;
}
protected void removeMeasureVars(JRDesignCrosstabMeasure measure)
{
removeVariable(measure.getVariable());
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
removeTotalVar(measure, null, colGroup);
}
for (Iterator<JRCrosstabRowGroup> rowIt = rowGroups.iterator(); rowIt.hasNext();)
{
JRCrosstabRowGroup rowGroup = rowIt.next();
removeTotalVar(measure, rowGroup, null);
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
removeTotalVar(measure, rowGroup, colGroup);
}
}
}
/**
* Removes a measure.
*
* @param measure the measure
* @return the removed measure
*/
public JRCrosstabMeasure removeMeasure(JRCrosstabMeasure measure)
{
return removeMeasure(measure.getName());
}
public boolean isRepeatColumnHeaders()
{
return repeatColumnHeaders;
}
/**
* Sets the repeat column headers flag.
*
* @param repeatColumnHeaders whether to repeat the column headers on row breaks
* @see JRCrosstab#isRepeatColumnHeaders()
*/
public void setRepeatColumnHeaders(boolean repeatColumnHeaders)
{
boolean old = this.repeatColumnHeaders;
this.repeatColumnHeaders = repeatColumnHeaders;
getEventSupport().firePropertyChange(PROPERTY_REPEAT_COLUMN_HEADERS, old, this.repeatColumnHeaders);
}
public boolean isRepeatRowHeaders()
{
return repeatRowHeaders;
}
/**
* Sets the repeat row headers flag.
*
* @param repeatRowHeaders whether to repeat the row headers on column breaks
* @see JRCrosstab#isRepeatRowHeaders()
*/
public void setRepeatRowHeaders(boolean repeatRowHeaders)
{
boolean old = this.repeatRowHeaders;
this.repeatRowHeaders = repeatRowHeaders;
getEventSupport().firePropertyChange(PROPERTY_REPEAT_ROW_HEADERS, old, this.repeatRowHeaders);
}
public JRCrosstabCell[][] getCells()
{
return crossCells;
}
/**
* Returns the data cells list.
*
* @return the data cells list
* @see #addCell(JRDesignCrosstabCell)
*/
public List<JRCrosstabCell> getCellsList()
{
return cellsList;
}
/**
* Returns the crosstab cells indexed by corresponding row total group/
* column total group {@link Pair pairs}.
*
* @return the crosstab cells indexed by row/column total groups
* @see JRCrosstabCell#getRowTotalGroup()
* @see JRCrosstabCell#getColumnTotalGroup()
*/
public Map<Pair<String,String>,JRCrosstabCell> getCellsMap()
{
return cellsMap;
}
/**
* Adds a data cell to the crosstab.
*
* @param cell the cell
* @throws JRException
* @see JRCrosstab#getCells()
*/
public void addCell(JRDesignCrosstabCell cell) throws JRException
{
String rowTotalGroup = cell.getRowTotalGroup();
if (rowTotalGroup != null && !rowGroupsMap.containsKey(rowTotalGroup))
{
throw new JRException("Row group " + rowTotalGroup + " does not exist.");
}
String columnTotalGroup = cell.getColumnTotalGroup();
if (columnTotalGroup != null && !columnGroupsMap.containsKey(columnTotalGroup))
{
throw new JRException("Row group " + columnTotalGroup + " does not exist.");
}
Pair<String,String> cellKey = new Pair<String,String>(rowTotalGroup, columnTotalGroup);
if (cellsMap.containsKey(cellKey))
{
throw new JRException("Duplicate cell in crosstab.");
}
cellsMap.put(cellKey, cell);
cellsList.add(cell);
setCellOrigin(cell.getContents(),
new JRCrosstabOrigin(this, JRCrosstabOrigin.TYPE_DATA_CELL,
rowTotalGroup, columnTotalGroup));
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_CELLS, cell, cellsList.size() - 1);
}
/**
* Removes a data cell.
*
* @param rowTotalGroup the cell's total row group
* @param columnTotalGroup the cell's total column group
* @return the removed cell
*/
public JRCrosstabCell removeCell(String rowTotalGroup, String columnTotalGroup)
{
Object cellKey = new Pair<String,String>(rowTotalGroup, columnTotalGroup);
JRCrosstabCell cell = cellsMap.remove(cellKey);
if (cell != null)
{
cellsList.remove(cell);
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_CELLS, cell, -1);
}
return cell;
}
/**
* Removes a data cell.
*
* @param cell the cell to be removed
* @return the removed cell
*/
public JRCrosstabCell removeCell(JRCrosstabCell cell)
{
return removeCell(cell.getRowTotalGroup(), cell.getColumnTotalGroup());
}
public JRCrosstabParameter[] getParameters()
{
JRCrosstabParameter[] parameters = new JRCrosstabParameter[parametersList.size()];
parametersList.toArray(parameters);
return parameters;
}
/**
* Returns the paremeters list.
*
* @return the paremeters list
*/
public List<JRCrosstabParameter> getParametersList()
{
return parametersList;
}
/**
* Returns the parameters indexed by names.
*
* @return the parameters indexed by names
*/
public Map<String, JRCrosstabParameter> getParametersMap()
{
return parametersMap;
}
public JRExpression getParametersMapExpression()
{
return parametersMapExpression;
}
/**
* Adds a parameter to the crosstab.
*
* @param parameter the parameter
* @throws JRException
* @see JRCrosstab#getMeasures()
*/
public void addParameter(JRCrosstabParameter parameter) throws JRException
{
if (parametersMap.containsKey(parameter.getName()))
{
if (parametersMap.containsKey(parameter.getName()))
{
throw new JRException("Duplicate declaration of parameter : " + parameter.getName());
}
}
parametersMap.put(parameter.getName(), parameter);
parametersList.add(parameter);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_PARAMETERS, parameter, parametersList.size() - 1);
}
/**
* Adds a parameter to the crosstab.
*
* @param parameter the parameter
* @throws JRException
* @see JRCrosstab#getMeasures()
*/
public void addParameter(int index, JRCrosstabParameter parameter) throws JRException
{
if (parametersMap.containsKey(parameter.getName()))
{
if (parametersMap.containsKey(parameter.getName()))
{
throw new JRException("Duplicate declaration of parameter : " + parameter.getName());
}
}
parametersMap.put(parameter.getName(), parameter);
parametersList.add(index, parameter);
getEventSupport().fireCollectionElementAddedEvent(PROPERTY_PARAMETERS, parameter, index);
}
/**
* Removes a parameter.
*
* @param parameterName the name of the parameter to be removed
* @return the removed parameter
*/
public JRCrosstabParameter removeParameter(String parameterName)
{
JRCrosstabParameter param = parametersMap.remove(parameterName);
if (param != null)
{
int idx = parametersList.indexOf(param);
if (idx >= 0)
{
parametersList.remove(idx);
}
getEventSupport().fireCollectionElementRemovedEvent(PROPERTY_PARAMETERS, param, idx);
}
return param;
}
/**
* Removes a parameter.
*
* @param parameter the parameter to be removed
* @return the removed parameter
*/
public JRCrosstabParameter removeParameter(JRCrosstabParameter parameter)
{
return removeParameter(parameter.getName());
}
/**
* Sets the parameters map expression.
*
* @param expression the parameters map expression
* @see JRCrosstab#getParametersMapExpression()
*/
public void setParametersMapExpression(JRExpression expression)
{
Object old = this.parametersMapExpression;
this.parametersMapExpression = expression;
getEventSupport().firePropertyChange(PROPERTY_PARAMETERS_MAP_EXPRESSION, old, this.parametersMapExpression);
}
/**
* Returns the variables of this crosstab indexed by name.
*
* @return the variables of this crosstab indexed by name
*/
public Map<String, JRVariable> getVariablesMap()
{
JRVariable[] variables = getVariables();
Map<String, JRVariable> variablesMap = new HashMap<String, JRVariable>();
for (int i = 0; i < variables.length; i++)
{
variablesMap.put(variables[i].getName(), variables[i]);
}
return variablesMap;
}
/**
* Returns the list of variables created for this crosstab.
*
* @return the list of variables created for this crosstab
* @see JRCrosstabGroup#getVariable()
* @see JRCrosstabMeasure#getVariable()
* @see JRCrosstab#VARIABLE_ROW_COUNT
* @see JRCrosstab#VARIABLE_COLUMN_COUNT
*/
public JRVariable[] getVariables()
{
JRVariable[] variables = new JRVariable[variablesList.size()];
variablesList.values().toArray(variables);
return variables;
}
public int getColumnBreakOffset()
{
return columnBreakOffset;
}
/**
* Sets the column break offset.
*
* @param columnBreakOffset the offset
* @see JRCrosstab#getColumnBreakOffset()
*/
public void setColumnBreakOffset(int columnBreakOffset)
{
int old = this.columnBreakOffset;
this.columnBreakOffset = columnBreakOffset;
getEventSupport().firePropertyChange(PROPERTY_COLUMN_BREAK_OFFSET, old, this.columnBreakOffset);
}
/**
* Performs all the calculations required for report compilation.
*/
public void preprocess()
{
setGroupVariablesClass(rowGroups);
setGroupVariablesClass(columnGroups);
calculateSizes();
}
protected <T> void setGroupVariablesClass(List<T> groups)
{
for (Iterator<T> it = groups.iterator(); it.hasNext();)
{
JRDesignCrosstabGroup group = (JRDesignCrosstabGroup) it.next();
JRCrosstabBucket bucket = group.getBucket();
if (bucket != null)
{
JRExpression expression = bucket.getExpression();
if (expression != null)
{
group.designVariable.setValueClassName(bucket.getValueClassName());
}
}
}
}
protected void calculateSizes()
{
setWhenNoDataCellSize();
createCellMatrix();
int rowHeadersWidth = calculateRowHeadersSizes();
int colHeadersHeight = calculateColumnHeadersSizes(rowHeadersWidth);
if (headerCell != null)
{
headerCell.setWidth(rowHeadersWidth);
headerCell.setHeight(colHeadersHeight);
}
setTitleSize(rowHeadersWidth);
}
protected void setWhenNoDataCellSize()
{
if (whenNoDataCell != null)
{
whenNoDataCell.setWidth(getWidth());
whenNoDataCell.setHeight(getHeight());
}
}
protected void createCellMatrix()
{
crossCells = new JRDesignCrosstabCell[rowGroups.size() + 1][columnGroups.size() + 1];
for (Iterator<JRCrosstabCell> it = cellsList.iterator(); it.hasNext();)
{
JRDesignCrosstabCell crosstabCell = (JRDesignCrosstabCell) it.next();
JRDesignCellContents contents = (JRDesignCellContents) crosstabCell.getContents();
String rowTotalGroup = crosstabCell.getRowTotalGroup();
int rowGroupIndex = rowTotalGroup == null ? rowGroups.size() : (rowGroupsMap.get(rowTotalGroup)).intValue();
Integer cellWidth = crosstabCell.getWidth();
if (cellWidth != null)
{
contents.setWidth(cellWidth.intValue());
}
String columnTotalGroup = crosstabCell.getColumnTotalGroup();
int columnGroupIndex = columnTotalGroup == null ? columnGroups.size() : (columnGroupsMap.get(columnTotalGroup)).intValue();
Integer cellHeight = crosstabCell.getHeight();
if (cellHeight != null)
{
contents.setHeight(cellHeight.intValue());
}
crossCells[rowGroupIndex][columnGroupIndex] = crosstabCell;
}
inheritCells();
}
protected JRDesignCrosstabRowGroup getRowGroup(int rowGroupIndex)
{
return (JRDesignCrosstabRowGroup) rowGroups.get(rowGroupIndex);
}
protected JRDesignCrosstabColumnGroup getColumnGroup(int columnGroupIndex)
{
return (JRDesignCrosstabColumnGroup) columnGroups.get(columnGroupIndex);
}
protected void inheritCells()
{
for (int i = rowGroups.size(); i >= 0 ; --i)
{
for (int j = columnGroups.size(); j >= 0 ; --j)
{
boolean used = (i == rowGroups.size() || getRowGroup(i).hasTotal()) &&
(j == columnGroups.size() || getColumnGroup(j).hasTotal());
if (used)
{
if (crossCells[i][j] == null)
{
inheritCell(i, j);
if (crossCells[i][j] == null)
{
crossCells[i][j] = emptyCell(i, j);
inheritCellSize(i, j);
}
}
else
{
inheritCellSize(i, j);
}
}
else
{
crossCells[i][j] = null;
}
}
}
}
private JRDesignCrosstabCell emptyCell(int i, int j)
{
JRDesignCrosstabCell emptyCell = new JRDesignCrosstabCell();
if (i < rowGroups.size())
{
emptyCell.setRowTotalGroup((rowGroups.get(i)).getName());
}
if (j < columnGroups.size())
{
emptyCell.setColumnTotalGroup((columnGroups.get(j)).getName());
}
return emptyCell;
}
protected void inheritCellSize(int i, int j)
{
JRDesignCrosstabCell cell = crossCells[i][j];
JRDesignCellContents contents = (JRDesignCellContents) cell.getContents();
if (contents.getWidth() == JRCellContents.NOT_CALCULATED)
{
if (i < rowGroups.size())
{
JRDesignCrosstabCell rowCell = crossCells[rowGroups.size()][j];
if (rowCell != null)
{
contents.setWidth(rowCell.getContents().getWidth());
}
}
else
{
for (int k = j + 1; k <= columnGroups.size(); ++k)
{
if (crossCells[i][k] != null)
{
contents.setWidth(crossCells[i][k].getContents().getWidth());
break;
}
}
}
}
if (contents.getHeight() == JRCellContents.NOT_CALCULATED)
{
if (j < columnGroups.size())
{
JRDesignCrosstabCell colCell = crossCells[i][columnGroups.size()];
if (colCell != null)
{
contents.setHeight(colCell.getContents().getHeight());
}
}
else
{
for (int k = i + 1; k <= rowGroups.size(); ++k)
{
if (crossCells[k][j] != null)
{
contents.setHeight(crossCells[k][j].getContents().getHeight());
}
}
}
}
}
protected void inheritCell(int i, int j)
{
JRDesignCrosstabCell inheritedCell = null;
if (j < columnGroups.size())
{
JRDesignCrosstabCell colCell = crossCells[rowGroups.size()][j];
JRDesignCellContents colContents = colCell == null ? null : (JRDesignCellContents) colCell.getContents();
for (int k = j + 1; inheritedCell == null && k <= columnGroups.size(); ++k)
{
JRDesignCrosstabCell cell = crossCells[i][k];
if (cell != null)
{
JRDesignCellContents contents = (JRDesignCellContents) cell.getContents();
if (colContents == null || contents.getWidth() == colContents.getWidth())
{
inheritedCell = cell;
}
}
}
}
if (inheritedCell == null && i < rowGroups.size())
{
JRDesignCrosstabCell rowCell = crossCells[i][columnGroups.size()];
JRDesignCellContents rowContents = rowCell == null ? null : (JRDesignCellContents) rowCell.getContents();
for (int k = i + 1; inheritedCell == null && k <= rowGroups.size(); ++k)
{
JRDesignCrosstabCell cell = crossCells[k][j];
if (cell != null)
{
JRDesignCellContents contents = (JRDesignCellContents) cell.getContents();
if (rowContents == null || contents.getHeight() == rowContents.getHeight())
{
inheritedCell = cell;
}
}
}
}
crossCells[i][j] = inheritedCell;
}
protected int calculateRowHeadersSizes()
{
int widthSum = 0;
for (int i = rowGroups.size() - 1, heightSum = 0; i >= 0; --i)
{
JRDesignCrosstabRowGroup group = (JRDesignCrosstabRowGroup) rowGroups.get(i);
widthSum += group.getWidth();
JRDesignCrosstabCell cell = crossCells[i + 1][columnGroups.size()];
if (cell != null)
{
heightSum += cell.getContents().getHeight();
}
JRDesignCellContents header = (JRDesignCellContents) group.getHeader();
header.setHeight(heightSum);
header.setWidth(group.getWidth());
if (group.hasTotal())
{
JRDesignCellContents totalHeader = (JRDesignCellContents) group.getTotalHeader();
totalHeader.setWidth(widthSum);
JRDesignCrosstabCell totalCell = crossCells[i][columnGroups.size()];
if (totalCell != null)
{
totalHeader.setHeight(totalCell.getContents().getHeight());
}
}
}
return widthSum;
}
protected int calculateColumnHeadersSizes(int rowHeadersWidth)
{
int heightSum = 0;
for (int i = columnGroups.size() - 1, widthSum = 0; i >= 0; --i)
{
JRDesignCrosstabColumnGroup group = (JRDesignCrosstabColumnGroup) columnGroups.get(i);
heightSum += group.getHeight();
JRDesignCrosstabCell cell = crossCells[rowGroups.size()][i + 1];
if (cell != null)
{
widthSum += cell.getContents().getWidth();
}
JRDesignCellContents crosstabHeader = (JRDesignCellContents) group.getCrosstabHeader();
if (crosstabHeader != null)
{
crosstabHeader.setWidth(rowHeadersWidth);
crosstabHeader.setHeight(group.getHeight());
}
JRDesignCellContents header = (JRDesignCellContents) group.getHeader();
header.setHeight(group.getHeight());
header.setWidth(widthSum);
if (group.hasTotal())
{
JRDesignCellContents totalHeader = (JRDesignCellContents) group.getTotalHeader();
totalHeader.setHeight(heightSum);
JRDesignCrosstabCell totalCell = crossCells[rowGroups.size()][i];
if (totalCell != null)
{
totalHeader.setWidth(totalCell.getContents().getWidth());
}
}
}
return heightSum;
}
protected void setTitleSize(int rowHeadersWidth)
{
if (titleCell != null && titleCell.getDesignCellContents() != null)
{
JRDesignCellContents titleContents = titleCell.getDesignCellContents();
titleContents.setHeight(titleCell.getHeight());
int titleWidth = rowHeadersWidth;
if (!columnGroups.isEmpty())
{
JRCrosstabColumnGroup firstGroup = columnGroups.get(0);
titleWidth += firstGroup.getHeader().getWidth();
if (firstGroup.hasTotal())
{
titleWidth += firstGroup.getTotalHeader().getWidth();
}
}
titleContents.setWidth(titleWidth);
}
}
public JRCellContents getWhenNoDataCell()
{
return whenNoDataCell;
}
/**
* Sets the "No data" cell.
*
* @param whenNoDataCell the cell
* @see JRCrosstab#getWhenNoDataCell()
*/
public void setWhenNoDataCell(JRDesignCellContents whenNoDataCell)
{
Object old = this.whenNoDataCell;
this.whenNoDataCell = whenNoDataCell;
setCellOrigin(this.whenNoDataCell, new JRCrosstabOrigin(this, JRCrosstabOrigin.TYPE_WHEN_NO_DATA_CELL));
getEventSupport().firePropertyChange(PROPERTY_WHEN_NO_DATA_CELL, old, this.whenNoDataCell);
}
public JRElement getElementByKey(String elementKey)
{
return JRBaseCrosstab.getElementByKey(this, elementKey);
}
public ModeEnum getModeValue()
{
return JRStyleResolver.getMode(this, ModeEnum.TRANSPARENT);
}
public CrosstabColumnCell getTitleCell()
{
return titleCell;
}
public void setTitleCell(DesignCrosstabColumnCell titleCell)
{
Object old = this.titleCell;
this.titleCell = titleCell;
if (this.titleCell != null)
{
setCellOrigin(this.titleCell.getCellContents(), new JRCrosstabOrigin(this, JRCrosstabOrigin.TYPE_TITLE_CELL));
}
getEventSupport().firePropertyChange(PROPERTY_TITLE_CELL, old, this.titleCell);
}
public JRCellContents getHeaderCell()
{
return headerCell;
}
/**
* Sets the crosstab header cell (this cell will be rendered at the upper-left corder of the crosstab).
*
* @param headerCell the cell
* @see JRCrosstab#getHeaderCell()
*/
public void setHeaderCell(JRDesignCellContents headerCell)
{
Object old = this.headerCell;
this.headerCell = headerCell;
setCellOrigin(this.headerCell, new JRCrosstabOrigin(this, JRCrosstabOrigin.TYPE_HEADER_CELL));
getEventSupport().firePropertyChange(PROPERTY_HEADER_CELL, old, this.headerCell);
}
protected void measureClassChanged(JRDesignCrosstabMeasure measure, String valueClassName)
{
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
setTotalVarClass(measure, null, colGroup, valueClassName);
}
for (Iterator<JRCrosstabRowGroup> rowIt = rowGroups.iterator(); rowIt.hasNext();)
{
JRCrosstabRowGroup rowGroup = rowIt.next();
setTotalVarClass(measure, rowGroup, null, valueClassName);
for (Iterator<JRCrosstabColumnGroup> colIt = columnGroups.iterator(); colIt.hasNext();)
{
JRCrosstabColumnGroup colGroup = colIt.next();
setTotalVarClass(measure, rowGroup, colGroup, valueClassName);
}
}
}
protected void setTotalVarClass(JRCrosstabMeasure measure, JRCrosstabRowGroup rowGroup, JRCrosstabColumnGroup colGroup, String valueClassName)
{
JRDesignVariable variable = getVariable(getTotalVariableName(measure, rowGroup, colGroup));
variable.setValueClassName(valueClassName);
}
private void addVariable(JRVariable variable)
{
variablesList.put(variable.getName(), variable);
}
private void removeVariable(JRVariable variable)
{
removeVariable(variable.getName());
}
private void removeVariable(String varName)
{
variablesList.remove(varName);
}
private JRDesignVariable getVariable(String varName)
{
return (JRDesignVariable) variablesList.get(varName);
}
/**
*
*/
public RunDirectionEnum getRunDirectionValue()
{
return this.runDirectionValue;
}
/**
*
*/
public void setRunDirection(RunDirectionEnum runDirectionValue)
{
RunDirectionEnum old = this.runDirectionValue;
this.runDirectionValue = runDirectionValue;
getEventSupport().firePropertyChange(JRBaseCrosstab.PROPERTY_RUN_DIRECTION, old, this.runDirectionValue);
}
@Override
public HorizontalPosition getHorizontalPosition()
{
return horizontalPosition;
}
@Override
public void setHorizontalPosition(HorizontalPosition horizontalPosition)
{
HorizontalPosition old = this.horizontalPosition;
this.horizontalPosition = horizontalPosition;
getEventSupport().firePropertyChange(JRBaseCrosstab.PROPERTY_HORIZONTAL_POSITION, old, this.horizontalPosition);
}
protected void setCellOrigin(JRCellContents cell, JRCrosstabOrigin origin)
{
if (cell instanceof JRDesignCellContents)
{
setCellOrigin((JRDesignCellContents) cell, origin);
}
}
protected void setCellOrigin(JRDesignCellContents cell, JRCrosstabOrigin origin)
{
if (cell != null)
{
cell.setOrigin(origin);
}
}
protected void setParent(JRDesignCrosstabGroup group)
{
if (group != null)
{
group.setParent(this);
}
}
/**
*
*/
public Object clone()
{
JRDesignCrosstab clone = (JRDesignCrosstab)super.clone();
if (parametersList != null)
{
clone.parametersList = new ArrayList<JRCrosstabParameter>(parametersList.size());
clone.parametersMap = new HashMap<String, JRCrosstabParameter>(parametersList.size());
for(int i = 0; i < parametersList.size(); i++)
{
JRCrosstabParameter parameter = JRCloneUtils.nullSafeClone(parametersList.get(i));
clone.parametersList.add(parameter);
clone.parametersMap.put(parameter.getName(), parameter);
}
}
clone.parametersMapExpression = JRCloneUtils.nullSafeClone(parametersMapExpression);
clone.dataset = JRCloneUtils.nullSafeClone(dataset);
clone.lineBox = lineBox.clone(clone);
// keep group and measure cloned variables to reuse the clone instances
// in the variables list
Map<JRVariable,JRVariable> clonedVariables = new HashMap<JRVariable,JRVariable>();
if (rowGroups != null)
{
clone.rowGroups = new ArrayList<JRCrosstabRowGroup>(rowGroups.size());
clone.rowGroupsMap = new HashMap<String, Integer>(rowGroups.size());
for(int i = 0; i < rowGroups.size(); i++)
{
JRDesignCrosstabRowGroup group =
(JRDesignCrosstabRowGroup) rowGroups.get(i);
JRDesignCrosstabRowGroup groupClone =
(JRDesignCrosstabRowGroup) group.clone(clone);
clone.rowGroups.add(groupClone);
clone.rowGroupsMap.put(groupClone.getName(), Integer.valueOf(i));
adjustCrosstabReference(clone, (JRDesignCellContents) groupClone.getTotalHeader());
adjustCrosstabReference(clone, (JRDesignCellContents) groupClone.getHeader());
if (group.designVariable != null)
{
clonedVariables.put(group.designVariable, groupClone.designVariable);
}
}
}
if (columnGroups != null)
{
clone.columnGroups = new ArrayList<JRCrosstabColumnGroup>(columnGroups.size());
clone.columnGroupsMap = new HashMap<String, Integer>(columnGroups.size());
for(int i = 0; i < columnGroups.size(); i++)
{
JRDesignCrosstabColumnGroup group =
(JRDesignCrosstabColumnGroup) columnGroups.get(i);
JRDesignCrosstabColumnGroup groupClone =
(JRDesignCrosstabColumnGroup) group.clone(clone);
clone.columnGroups.add(groupClone);
clone.columnGroupsMap.put(groupClone.getName(), Integer.valueOf(i));
adjustCrosstabReference(clone,(JRDesignCellContents) groupClone.getCrosstabHeader());
adjustCrosstabReference(clone,(JRDesignCellContents) groupClone.getTotalHeader());
adjustCrosstabReference(clone,(JRDesignCellContents) groupClone.getHeader());
if (group.designVariable != null)
{
clonedVariables.put(group.designVariable, groupClone.designVariable);
}
}
}
if (measures != null)
{
clone.measures = new ArrayList<JRCrosstabMeasure>(measures.size());
clone.measuresMap = new HashMap<String,Integer>(measures.size());
for(int i = 0; i < measures.size(); i++)
{
JRDesignCrosstabMeasure measure =
(JRDesignCrosstabMeasure) measures.get(i);
JRDesignCrosstabMeasure clonedMeasure = JRCloneUtils.nullSafeClone(measure);
clone.measures.add(clonedMeasure);
clone.measuresMap.put(clonedMeasure.getName(), Integer.valueOf(i));
if (clonedMeasure.designVariable != null)
{
clonedVariables.put(measure.designVariable,
clonedMeasure.designVariable);
}
}
}
if (variablesList != null)
{
clone.variablesList = new LinkedMap(variablesList.size());
for(Iterator<?> it = variablesList.values().iterator(); it.hasNext();)
{
JRVariable variable = (JRVariable) it.next();
// check whether the variable was already cloned as part of a group or measure
JRVariable variableClone = clonedVariables.get(variable);
variableClone = JRCloneUtils.nullSafeClone(variable);
clone.variablesList.put(variableClone.getName(), variableClone);
}
}
if (cellsList != null)
{
clone.cellsList = new ArrayList<JRCrosstabCell>(cellsList.size());
clone.cellsMap = new HashMap<Pair<String,String>,JRCrosstabCell>(cellsList.size());
for(int i = 0; i < cellsList.size(); i++)
{
JRCrosstabCell cell = JRCloneUtils.nullSafeClone(cellsList.get(i));
adjustCrosstabReference(clone, (JRDesignCellContents) cell.getContents());
clone.cellsList.add(cell);
clone.cellsMap.put(new Pair<String,String>(cell.getRowTotalGroup(), cell.getColumnTotalGroup()), cell);
}
}
// clone not preprocessed
clone.crossCells = null;
clone.whenNoDataCell = JRCloneUtils.nullSafeClone(whenNoDataCell);
adjustCrosstabReference(clone, clone.whenNoDataCell);
clone.titleCell = JRCloneUtils.nullSafeClone(titleCell);
if (clone.titleCell != null)
{
adjustCrosstabReference(clone, clone.titleCell.getDesignCellContents());
}
clone.headerCell = JRCloneUtils.nullSafeClone(headerCell);
adjustCrosstabReference(clone, clone.headerCell);
return clone;
}
/**
* Adjust the crosstab reference inside the origin to point to this
* crosstab. Used in the clone method.
* @param contents
*/
private void adjustCrosstabReference(JRDesignCrosstab clone, JRDesignCellContents contents)
{
if (contents == null)
{
return;
}
contents.setOrigin(
new JRCrosstabOrigin(
clone,
contents.getOrigin().getType(),
contents.getOrigin().getRowGroupName(),
contents.getOrigin().getColumnGroupName()
)
);
}
public List<JRCrosstabRowGroup> getRowGroupsList()
{
return rowGroups;
}
public Map<String, Integer> getRowGroupIndicesMap()
{
return rowGroupsMap;
}
public List<JRCrosstabColumnGroup> getColumnGroupsList()
{
return columnGroups;
}
public Map<String, Integer> getColumnGroupIndicesMap()
{
return columnGroupsMap;
}
public List<JRCrosstabMeasure> getMesuresList()
{
return measures;
}
public Map<String, Integer> getMeasureIndicesMap()
{
return measuresMap;
}
public Boolean getIgnoreWidth()
{
return ignoreWidth;
}
public void setIgnoreWidth(Boolean ignoreWidth)
{
Object old = this.ignoreWidth;
this.ignoreWidth = ignoreWidth;
getEventSupport().firePropertyChange(PROPERTY_IGNORE_WIDTH,
old, this.ignoreWidth);
}
public void setIgnoreWidth(boolean ignoreWidth)
{
setIgnoreWidth(Boolean.valueOf(ignoreWidth));
}
public Color getDefaultLineColor()
{
return getForecolor();
}
public JRLineBox getLineBox()
{
return lineBox;
}
/*
* These fields are only for serialization backward compatibility.
*/
private int PSEUDO_SERIAL_VERSION_UID = JRConstants.PSEUDO_SERIAL_VERSION_UID; //NOPMD
/**
* @deprecated
*/
private byte runDirection;
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException
{
in.defaultReadObject();
if (PSEUDO_SERIAL_VERSION_UID < JRConstants.PSEUDO_SERIAL_VERSION_UID_3_7_2)
{
runDirectionValue = RunDirectionEnum.getByValue(runDirection);
}
if (lineBox == null)
{
lineBox = new JRBaseLineBox(this);
}
// this will work as long as SequencedHashMap is part of commons collections
// we could also look at PSEUDO_SERIAL_VERSION_UID
if (variablesList instanceof org.apache.commons.collections.SequencedHashMap)
{
// converting to the new type
variablesList = new LinkedMap(variablesList);
}
}
}
| |
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
// Start of user code for imports
import java.util.ArrayList;
import java.util.List;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable;
import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider;
import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.ProxyOutSequenceOutputConnectorPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
// End of user code
/**
*
*
*/
public class ProxyOutSequenceOutputConnectorPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, ProxyOutSequenceOutputConnectorPropertiesEditionPart {
protected ReferencesTable commentMediators;
protected List<ViewerFilter> commentMediatorsBusinessFilters = new ArrayList<ViewerFilter>();
protected List<ViewerFilter> commentMediatorsFilters = new ArrayList<ViewerFilter>();
/**
* For {@link ISection} use only.
*/
public ProxyOutSequenceOutputConnectorPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public ProxyOutSequenceOutputConnectorPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent);
Form form = scrolledForm.getForm();
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 3;
view.setLayout(layout);
createControls(widgetFactory, view);
return scrolledForm;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence proxyOutSequenceOutputConnectorStep = new BindingCompositionSequence(propertiesEditionComponent);
proxyOutSequenceOutputConnectorStep
.addStep(EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.class)
.addStep(EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators);
composer = new PartComposer(proxyOutSequenceOutputConnectorStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators) {
return createCommentMediatorsTableComposition(widgetFactory, parent);
}
return parent;
}
};
composer.compose(view);
}
/**
*
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.ProxyOutSequenceOutputConnectorPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
Composite propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
return propertiesGroup;
}
/**
* @param container
*
*/
protected Composite createCommentMediatorsTableComposition(FormToolkit widgetFactory, Composite parent) {
this.commentMediators = new ReferencesTable(getDescription(EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, EsbMessages.ProxyOutSequenceOutputConnectorPropertiesEditionPart_CommentMediatorsLabel), new ReferencesTableListener() {
public void handleAdd() {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ProxyOutSequenceOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null));
commentMediators.refresh();
}
public void handleEdit(EObject element) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ProxyOutSequenceOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element));
commentMediators.refresh();
}
public void handleMove(EObject element, int oldIndex, int newIndex) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ProxyOutSequenceOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex));
commentMediators.refresh();
}
public void handleRemove(EObject element) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ProxyOutSequenceOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element));
commentMediators.refresh();
}
public void navigateTo(EObject element) { }
});
for (ViewerFilter filter : this.commentMediatorsFilters) {
this.commentMediators.addFilter(filter);
}
this.commentMediators.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, EsbViewsRepository.FORM_KIND));
this.commentMediators.createControls(parent, widgetFactory);
this.commentMediators.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
if (e.item != null && e.item.getData() instanceof EObject) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ProxyOutSequenceOutputConnectorPropertiesEditionPartForm.this, EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData()));
}
}
});
GridData commentMediatorsData = new GridData(GridData.FILL_HORIZONTAL);
commentMediatorsData.horizontalSpan = 3;
this.commentMediators.setLayoutData(commentMediatorsData);
this.commentMediators.setLowerBound(0);
this.commentMediators.setUpperBound(-1);
commentMediators.setID(EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators);
commentMediators.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$
// Start of user code for createCommentMediatorsTableComposition
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ProxyOutSequenceOutputConnectorPropertiesEditionPart#initCommentMediators(EObject current, EReference containingFeature, EReference feature)
*/
public void initCommentMediators(ReferencesTableSettings settings) {
if (current.eResource() != null && current.eResource().getResourceSet() != null)
this.resourceSet = current.eResource().getResourceSet();
ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider();
commentMediators.setContentProvider(contentProvider);
commentMediators.setInput(settings);
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.ProxyOutSequenceOutputConnector.Properties.commentMediators);
if (eefElementEditorReadOnlyState && commentMediators.isEnabled()) {
commentMediators.setEnabled(false);
commentMediators.setToolTipText(EsbMessages.ProxyOutSequenceOutputConnector_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !commentMediators.isEnabled()) {
commentMediators.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ProxyOutSequenceOutputConnectorPropertiesEditionPart#updateCommentMediators()
*
*/
public void updateCommentMediators() {
commentMediators.refresh();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ProxyOutSequenceOutputConnectorPropertiesEditionPart#addFilterCommentMediators(ViewerFilter filter)
*
*/
public void addFilterToCommentMediators(ViewerFilter filter) {
commentMediatorsFilters.add(filter);
if (this.commentMediators != null) {
this.commentMediators.addFilter(filter);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ProxyOutSequenceOutputConnectorPropertiesEditionPart#addBusinessFilterCommentMediators(ViewerFilter filter)
*
*/
public void addBusinessFilterToCommentMediators(ViewerFilter filter) {
commentMediatorsBusinessFilters.add(filter);
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ProxyOutSequenceOutputConnectorPropertiesEditionPart#isContainedInCommentMediatorsTable(EObject element)
*
*/
public boolean isContainedInCommentMediatorsTable(EObject element) {
return ((ReferencesTableSettings)commentMediators.getInput()).contains(element);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.ProxyOutSequenceOutputConnector_Part_Title;
}
// Start of user code additional methods
// End of user code
}
| |
/* ************************************************************************
LEBAH PORTAL FRAMEWORK, http://lebah.sf.net
Copyright (C) 2007 Shamsul Bahrin
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* ************************************************************************ */
package lebah.portal.velocity;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.app.VelocityEngine;
import lebah.util.*;
/**
* @author Shamsul Bahrin Abd Mutalib
* @version 1.01
*/
public abstract class VTemplate extends javax.servlet.http.HttpServlet {
protected VelocityEngine engine;
protected VelocityContext context;
protected HttpServletRequest request;
protected HttpServletResponse response;
protected ServletContext servletContext;
protected ServletConfig servletConfig;
protected String id = "";
protected String templateName = "";
protected String parsedTemplateName = "";
protected boolean isActionTemplate = false;
protected boolean showVM = false;
protected boolean isDiv = false;
protected String submit = "";
protected String user = "";
protected VTemplate() {
}
protected VTemplate(VelocityEngine engine, VelocityContext context, HttpServletRequest request, HttpServletResponse response) {
this.engine = engine;
this.context = context;
this.request = request;
this.response = response;
}
public void setEnvironment(VelocityEngine engine, VelocityContext context, HttpServletRequest request, HttpServletResponse response) {
this.engine = engine;
this.context = context;
this.request = request;
this.response = response;
}
public void setId(String id) {
this.id = id;
}
public String getId() {
return id;
}
public void setServletContext(ServletContext ctx ) {
this.servletContext = ctx;
}
//protected ServletContext getServletContext() {
// return servletContext;
//}
public void setServletConfig(ServletConfig cfg ) {
this.servletConfig = cfg;
}
public ServletConfig getServletConfig() {
return servletConfig;
}
//public abstract Template doTemplate() throws Exception; //remove abstract
public Template doTemplate() throws Exception {
//subclass need to implement body
return null;
}
public StringBuffer getBuffer() throws Exception {
StringBuffer sb = new StringBuffer("");
try {
//String resourceLoaderPath = (String) engine.getProperty(Velocity.FILE_RESOURCE_LOADER_PATH);
Template template = doTemplate();
templateName = template.getName();
StringWriter writer = new StringWriter();
template.merge(context, writer);
writer.close();
sb = writer.getBuffer();
} catch ( Exception ex ) {
throw ex;
}
return sb;
}
public StringBuffer getBuffer(HttpSession session) throws Exception {
StringBuffer sb = new StringBuffer("");
try {
Template template = doTemplate();
//Tue Feb 6, 2006, shamsul
//LogActivity.log(this, submit, (String) session.getAttribute("_portal_login"));
//
templateName = template.getName();
StringWriter writer = new StringWriter();
template.merge(context, writer);
writer.close();
sb = writer.getBuffer();
} catch ( Exception ex ) {
ex.printStackTrace();
throw ex;
}
return sb;
}
public void setShowVM(boolean b) {
showVM = b;
}
public void print() throws Exception {
PrintWriter out = response.getWriter();
out.print(getBuffer());
printVM(out);
}
public void print(HttpSession session) throws Exception {
PrintWriter out = response.getWriter();
out.print(getBuffer(session));
printVM(out);
}
private void printVM(PrintWriter out) {
if ( showVM ) {
out.print("<br/><table width=\"100%\"><tr><td align=\"right\"><font size=\"1\">");
if ( isActionTemplate ) {
out.print(parsedTemplateName);
} else {
out.print(templateName);
}
out.print("</font></tr></td></table>");
}
}
//a handy method for getParameter
protected String getParam(String param) {
return request.getParameter(param) != null ? request.getParameter(param) : "";
}
protected int getParamAsInteger(String param) {
return getParam(param) != "" ? Integer.parseInt(getParam(param)) : 0;
}
protected boolean post(HttpSession session) {
return session.getAttribute("doPost") != null ?
"true".equals((String) session.getAttribute("doPost")) ? true : false : true;
}
protected String getParam(HttpServletRequest request, String param) {
return request.getParameter(param) != null ? request.getParameter(param) : "";
}
/**
* @return Returns the parsedTemplateName.
*/
public String getParsedTemplateName() {
return parsedTemplateName;
}
/**
* @param parsedTemplateName The parsedTemplateName to set.
*/
public void setParsedTemplateName(String parsedTemplateName) {
this.parsedTemplateName = parsedTemplateName;
}
/**
* @return Returns the isActionTemplate.
*/
public boolean isActionTemplate() {
return isActionTemplate;
}
/**
* @param isActionTemplate The isActionTemplate to set.
*/
public void setActionTemplate(boolean isActionTemplate) {
this.isActionTemplate = isActionTemplate;
}
public boolean isDiv() {
return isDiv;
}
public void setDiv(boolean isDiv) {
this.isDiv = isDiv;
}
}
| |
/** Copyright 2015 Goshi Noguchi (noggon54@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package org.nognog.jmatcher.client;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.InterfaceAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.nognog.jmatcher.Host;
import org.nognog.jmatcher.JMatcher;
import org.nognog.jmatcher.SpecialHostAddress;
import org.nognog.jmatcher.udp.request.ConnectionRequest;
import org.nognog.jmatcher.udp.response.ConnectionResponse;
/**
* This is a class to communicate with a JMatcherEntryClient. This class is not
* thread-safe.
*
* @author goshi 2015/11/27
*/
public class Connector {
private String name;
private Logger logger;
private String jmatcherServer;
private int jmatcherServerPort;
private int internalNetworkPortTellerPort = JMatcher.PORT;
private int retryCount = defaultRetryCount;
private int receiveBuffSize = defaultBuffSize;
private static final int defaultRetryCount = 2;
private static final int defaultBuffSize = JMatcherClientMessage.buffSizeToReceiveSerializedMessage;
private static final int defaultUdpSocketTimeoutMillSec = 4000;
private static final int maxCountOfReceivePacketsAtOneTime = 10;
/**
* @param name
* @param host
*/
public Connector(String name, String host) {
this(name, host, JMatcher.PORT);
}
/**
* @param name
* @param host
* @param port
*/
public Connector(String name, String host, int port) {
this.setName(name);
this.jmatcherServer = host;
this.jmatcherServerPort = port;
}
/**
* @return the name
*/
public String getName() {
return this.name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
if (!JMatcherClientMessage.regardsAsValidName(name)) {
final String message = new StringBuilder().append("name is too long : ").append(name).toString(); //$NON-NLS-1$
throw new IllegalArgumentException(message);
}
this.name = name;
}
/**
* @return the logger
*/
public Logger getLogger() {
return this.logger;
}
/**
* @param logger
* the logger to set
*/
public void setLogger(Logger logger) {
this.logger = logger;
}
/**
* @return the jmatcherServer
*/
public String getJmatcherServer() {
return this.jmatcherServer;
}
/**
* @param jmatcherServer
* the jmatcherServer to set
*/
public void setJmatcherServer(String jmatcherServer) {
this.jmatcherServer = jmatcherServer;
}
/**
* @return the jmatcherServerPort
*/
public int getJmatcherServerPort() {
return this.jmatcherServerPort;
}
/**
* @param jmatcherServerPort
* the jmatcherServerPort to set
*/
public void setJmatcherServerPort(int jmatcherServerPort) {
this.jmatcherServerPort = jmatcherServerPort;
}
/**
* @return the retryCount
*/
public int getRetryCount() {
return this.retryCount;
}
/**
* @param retryCount
* the retryCount to set
*/
public void setRetryCount(int retryCount) {
this.retryCount = retryCount;
}
/**
* @return the receiveBuffSize
*/
public int getReceiveBuffSize() {
return this.receiveBuffSize;
}
/**
* set receiveBuffSize, but the min value is restricted by
* {@link JMatcherClientMessage#buffSizeToReceiveSerializedMessage}}
*
* @param receiveBuffSize
* the receiveBuffSize to set
*/
public void setReceiveBuffSize(int receiveBuffSize) {
this.receiveBuffSize = Math.max(receiveBuffSize, JMatcherClientMessage.buffSizeToReceiveSerializedMessage);
}
@SuppressWarnings("static-method")
protected void setupUDPSocket(final DatagramSocket udpSocket) throws SocketException {
// overridden when configure the option of udp-socket
udpSocket.setSoTimeout(defaultUdpSocketTimeoutMillSec);
}
/**
* @return the internalNetworkPortTellerPort
*/
public int getInternalNetworkPortTellerPort() {
return this.internalNetworkPortTellerPort;
}
/**
* It has to be called before {@link #connect(int)}
*
* @param internalNetworkPortTellerPort
* the internalNetworkPortTellerPort to set
*/
public void setInternalNetworkPortTellerPort(int internalNetworkPortTellerPort) {
this.internalNetworkPortTellerPort = internalNetworkPortTellerPort;
}
/**
* @param key
* @return a peer which has connection to another peer that the key is
* corresponding with, or null is returned if failed to connect
* @throws IOException
* thrown if an I/O error occurs
*/
@SuppressWarnings("resource")
public ConnectorPeer connect(int key) throws IOException {
final DatagramSocket socket = new DatagramSocket();
this.setupUDPSocket(socket);
this.log(Level.INFO, "start to try to connect to ", Integer.valueOf(key)); //$NON-NLS-1$
try {
final ConnectorPeer peer = this.tryToConnect(key, socket);
if (peer == null) {
JMatcherClientUtil.close(socket);
return null;
}
this.log(Level.INFO, "succeeded in connecting to ", Integer.valueOf(key)); //$NON-NLS-1$
return peer;
} catch (Exception e) {
JMatcherClientUtil.close(socket);
this.log(Level.ERROR, new StringBuilder("falied to connect to ").append(key).toString(), e); //$NON-NLS-1$
throw e;
}
}
private ConnectorPeer tryToConnect(int key, DatagramSocket socket) throws IOException {
Host connectionTargetHost = this.getTargetHostFromServer(key, socket);
if (connectionTargetHost == null) {
this.log(Level.INFO, "could not find ", Integer.valueOf(key)); //$NON-NLS-1$
return null;
}
if (SpecialHostAddress.ON_INTERNAL_NETWORK_HOST.equals(connectionTargetHost.getAddress())) {
this.log(Level.INFO, "target host(", Integer.valueOf(key), ") is on my internal network"); //$NON-NLS-1$ //$NON-NLS-2$
connectionTargetHost = this.findInternalNetworkEntryHost(key, socket);
if (connectionTargetHost == null) {
this.log(Level.INFO, "could not find ", Integer.valueOf(key), " on internal network"); //$NON-NLS-1$ //$NON-NLS-2$
return null;
}
}
this.log(Level.DEBUG, "target host is ", connectionTargetHost); //$NON-NLS-1$
for (int i = 0; i < this.retryCount; i++) {
this.log(Level.DEBUG, "count of trying to connect : ", Integer.valueOf(i)); //$NON-NLS-1$
final ConnectorPeer peer = this.tryToConnectTo(connectionTargetHost, socket);
if (peer != null) {
return peer;
}
}
return null;
}
private Host getTargetHostFromServer(int key, DatagramSocket socket) throws IOException {
for (int i = 0; i < this.retryCount; i++) {
try {
JMatcherClientUtil.sendUDPRequest(socket, new ConnectionRequest(Integer.valueOf(key)), new InetSocketAddress(this.jmatcherServer, this.jmatcherServerPort));
this.log(Level.DEBUG, "sent connection request to ", this.jmatcherServer, ":", Integer.valueOf(this.jmatcherServerPort)); //$NON-NLS-1$ //$NON-NLS-2$
final ConnectionResponse response = (ConnectionResponse) JMatcherClientUtil.receiveUDPResponse(socket, this.receiveBuffSize);
this.log(Level.DEBUG, "received connection response"); //$NON-NLS-1$
return response.getHost();
} catch (SocketTimeoutException | ClassCastException | IllegalArgumentException | NullPointerException e) {
// failed
this.log(Level.DEBUG, "caught exception while getting target host from server", e); //$NON-NLS-1$
}
}
return null;
}
private Host findInternalNetworkEntryHost(int key, DatagramSocket socket) {
try {
final Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
final NetworkInterface networkInterface = interfaces.nextElement();
for (final InterfaceAddress interfaceAddress : networkInterface.getInterfaceAddresses()) {
try {
if (interfaceAddress.getBroadcast() == null) {
continue;
}
final InetSocketAddress broadcastSocketAddress = new InetSocketAddress(interfaceAddress.getBroadcast(), this.internalNetworkPortTellerPort);
JMatcherClientUtil.sendMessage(socket, String.valueOf(key), broadcastSocketAddress);
this.log(Level.DEBUG, "sent connection request to broadcast address ", broadcastSocketAddress); //$NON-NLS-1$
final DatagramPacket packet = JMatcherClientUtil.receiveUDPPacket(socket, this.receiveBuffSize);
final String hostAddress = packet.getAddress().getHostAddress();
this.log(Level.DEBUG, "received packet from ", hostAddress); //$NON-NLS-1$
final int toldPort = Integer.valueOf(JMatcherClientUtil.getMessageFrom(packet)).intValue();
this.log(Level.DEBUG, "told port is ", Integer.valueOf(toldPort)); //$NON-NLS-1$
return new Host(hostAddress, toldPort);
} catch (NumberFormatException | IOException e) {
// when toldPort was invalid or the broadcast didn't
// reach
this.log(Level.DEBUG, "caught exception while searching on ", interfaceAddress, e); //$NON-NLS-1$
}
}
}
} catch (IOException e) {
this.log(Level.DEBUG, "caught exception while finding entry host being on the internal network on the same network", e); //$NON-NLS-1$
}
return null;
}
private ConnectorPeer tryToConnectTo(final Host connectionTargetHost, DatagramSocket socket) throws IOException {
JMatcherClientUtil.sendJMatcherClientMessage(socket, JMatcherClientMessageType.CONNECT_REQUEST, this.name, connectionTargetHost);
this.log(Level.DEBUG, "sent connection request to ", connectionTargetHost, "to do hole-panching"); //$NON-NLS-1$ //$NON-NLS-2$
try {
for (int i = 0; i < maxCountOfReceivePacketsAtOneTime; i++) {
final JMatcherClientMessage receivedJMatcherMessage = this.tryToReceiveJMatcherMessageFrom(connectionTargetHost, socket);
if (receivedJMatcherMessage == null) {
continue;
}
final JMatcherClientMessageType messageType = receivedJMatcherMessage.getType();
this.log(Level.DEBUG, "received ", messageType); //$NON-NLS-1$
if (messageType == JMatcherClientMessageType.CONNECT_REQUEST) {
JMatcherClientUtil.sendJMatcherClientMessage(socket, JMatcherClientMessageType.GOT_CONNECT_REQUEST, this.name, connectionTargetHost);
continue;
}
if (messageType == JMatcherClientMessageType.ENTRY_CLIENT_IS_FULL || messageType == JMatcherClientMessageType.CANCEL) {
return null;
}
if (messageType == JMatcherClientMessageType.GOT_CONNECT_REQUEST) {
connectionTargetHost.setName(receivedJMatcherMessage.getSenderName());
return new ConnectorPeer(this.name, socket, connectionTargetHost, this.receiveBuffSize, this.retryCount);
}
}
} catch (SocketTimeoutException e) {
// one of the end conditions
}
return null;
}
private JMatcherClientMessage tryToReceiveJMatcherMessageFrom(Host host, DatagramSocket socket) throws SocketTimeoutException, IOException {
final DatagramPacket packet = this.tryToReceiveUDPPacketFrom(host, socket);
final JMatcherClientMessage result = JMatcherClientUtil.getJMatcherMessageFrom(packet);
this.log(Level.DEBUG, "receive ", result, " from ", host); //$NON-NLS-1$ //$NON-NLS-2$
return result;
}
private DatagramPacket tryToReceiveUDPPacketFrom(Host host, DatagramSocket socket) throws SocketTimeoutException, IOException {
final DatagramPacket packet = JMatcherClientUtil.receiveUDPPacket(socket, this.receiveBuffSize);
this.log(Level.DEBUG, "received packet from", packet.getSocketAddress()); //$NON-NLS-1$
if (JMatcherClientUtil.packetCameFrom(host, packet) == false) {
return null;
}
this.log(Level.DEBUG, "accept packet which came from", packet.getAddress(), ":", Integer.valueOf(packet.getPort())); //$NON-NLS-1$ //$NON-NLS-2$
return packet;
}
private void log(Level level, Object... msgs) {
if (this.logger == null) {
return;
}
StringBuilder sb = new StringBuilder();
for (Object msg : msgs) {
sb.append(msg);
}
this.logger.log(level, sb.toString());
}
private void log(Level level, String msg, Throwable t) {
if (this.logger == null) {
return;
}
this.logger.log(level, msg, t);
}
/**
* @author goshi 2016/02/08
*/
public static class ConnectorPeer implements Peer {
private String name;
private final DatagramSocket socket;
private Host connectingHost;
private int receiveBuffSize;
private int retryCount;
private Set<PeerObserver> observers;
private Thread communicationThread;
private ReceivedMessageBuffer receivedMessageBuffer;
private volatile boolean isDisconnecting;
ConnectorPeer(String name, DatagramSocket socket, Host connectingHost, int receiveBuffSize, int retryCount) {
if (socket == null || connectingHost == null) {
throw new IllegalArgumentException();
}
this.name = name;
this.socket = socket;
this.connectingHost = connectingHost;
this.receiveBuffSize = receiveBuffSize;
this.retryCount = retryCount;
this.observers = new HashSet<>();
this.receivedMessageBuffer = new ReceivedMessageBuffer();
this.communicationThread = new Thread() {
@Override
public void run() {
ConnectorPeer.this.performCommunicationLoop();
}
};
this.communicationThread.start();
}
/**
*
*/
public void performCommunicationLoop() {
try {
while (this.connectingHost != null && this.socket.isClosed() == false) {
try {
this.receivePacketAndHandle();
} catch (IOException e) {
throw e;
} catch (Exception e) {
// it should be logged
}
}
} catch (IOException e) {
// IOException is mainly caused by closing socket
}
}
private void receivePacketAndHandle() throws IOException {
final DatagramPacket packet = this.tryToReceiveUDPPacketFrom(this.connectingHost);
if (packet == null) {
return;
}
final String receivedMessage = JMatcherClientUtil.getMessageFrom(packet);
final JMatcherClientMessage jmatcherClientMessage = tryTransformToJMatcherClientMessage(receivedMessage);
if (jmatcherClientMessage == null) {
this.receivedMessageBuffer.store(this.connectingHost, receivedMessage);
} else if (JMatcherClientMessageType.CANCEL == jmatcherClientMessage.getType()) {
final Host removedHost = this.connectingHost;
this.connectingHost = null;
this.notifyObservers(UpdateEvent.REMOVE, removedHost);
} else if (JMatcherClientMessageType.CANCELLED == jmatcherClientMessage.getType()) {
synchronized (this) {
this.notifyAll();
}
this.isDisconnecting = false;
}
}
private DatagramPacket tryToReceiveUDPPacketFrom(Host host) throws IOException {
try {
final DatagramPacket packet = JMatcherClientUtil.receiveUDPPacket(this.socket, this.receiveBuffSize);
if (JMatcherClientUtil.packetCameFrom(host, packet) == false) {
return null;
}
return packet;
} catch (SocketTimeoutException e) {
return null;
}
}
private static JMatcherClientMessage tryTransformToJMatcherClientMessage(final String receiveMessage) {
return JMatcherClientMessage.deserialize(receiveMessage);
}
private void notifyObservers(UpdateEvent event, Host target) {
for (PeerObserver observer : this.observers) {
observer.updateConnectingHosts(new HashSet<Host>(), event, target);
}
}
@Override
public void disconnect(Host host) {
if (this.connectingHost == null || host != this.connectingHost) {
return;
}
this.sendDisconnectionMessage();
this.connectingHost = null;
this.notifyObservers(UpdateEvent.REMOVE, host);
}
/**
* Disconnect
*/
public void disconnect() {
this.disconnect(this.connectingHost);
}
@Override
public void close() {
if (this.socket.isClosed()) {
return;
}
if (this.connectingHost == null) {
this.closeWithoutNotificationToConnectingHost();
return;
}
this.sendDisconnectionMessage();
this.connectingHost = null;
this.closeWithoutNotificationToConnectingHost();
this.notifyObservers(UpdateEvent.CLEAR, null);
}
/**
* Close it without notification to the connecting host. We should
* generally use {@link #close()} instead of this method
*/
public void closeWithoutNotificationToConnectingHost() {
JMatcherClientUtil.close(this.socket);
}
/**
* The disconnection message may not reach the target (connecting host).
*/
private void sendDisconnectionMessage() {
try {
this.isDisconnecting = true;
for (int i = 0; i < this.retryCount; i++) {
JMatcherClientUtil.sendJMatcherClientMessage(this.socket, JMatcherClientMessageType.CANCEL, this.name, this.connectingHost);
synchronized (this) {
this.wait(this.socket.getSoTimeout());
}
if (this.isDisconnecting == false) {
break;
}
}
} catch (Exception e) {
// terminate forcely
} finally {
this.isDisconnecting = false;
}
}
@Override
public ReceivedMessage receiveMessage() {
if (this.socket.isClosed() || this.connectingHost == null) {
return null;
}
try {
return this.receivedMessageBuffer.poll(this.socket.getSoTimeout());
} catch (Exception e) {
return null;
}
}
@Override
public String receiveMessageFrom(Host host) {
if (!host.equals(this.connectingHost)) {
return null;
}
final ReceivedMessage receiveMessage = this.receiveMessage();
if (receiveMessage == null) {
return null;
}
return receiveMessage.getMessage();
}
@Override
public Host[] sendMessageTo(String message, Host... hosts) {
if (hosts.length != 1 || hosts[0].equals(this.connectingHost) == false) {
return new Host[0];
}
final boolean success = this.sendMessage(message);
if (success) {
final Host[] result = new Host[1];
result[0] = hosts[0];
return result;
}
return new Host[0];
}
/**
* @param message
* @return true if succeed in sending
* @throws IOException
* thrown if an I/O error occurs
*/
public boolean sendMessage(String message) {
if (message == null || this.socket.isClosed() || this.connectingHost == null) {
return false;
}
try {
JMatcherClientUtil.sendMessage(this.socket, message, this.connectingHost);
} catch (IOException e) {
return false;
}
return true;
}
/**
* @return the connectingHost
*/
public Host getConnectingHost() {
return this.connectingHost;
}
@Override
public Set<Host> getConnectingHosts() {
final Set<Host> result = new HashSet<>();
if (this.socket.isClosed() || this.connectingHost == null) {
return result;
}
result.add(this.connectingHost);
return result;
}
/**
* @return the socket
*/
@Override
public DatagramSocket getSocket() {
return this.socket;
}
@Override
public void setReceiveBuffSize(int buffSize) {
this.receiveBuffSize = Math.max(buffSize, JMatcherClientMessage.buffSizeToReceiveSerializedMessage);
}
@Override
public int getReceiveBuffSize() {
return this.receiveBuffSize;
}
/**
* @return the name
*/
public String getName() {
return this.name;
}
/**
* @param name
* the name to set
*/
public void setName(String name) {
this.name = name;
}
@Override
public boolean isOnline() {
return !this.socket.isClosed();
}
/**
* @param observer
*/
@Override
public void addObserver(PeerObserver observer) {
this.observers.add(observer);
}
/**
* @param observer
*/
@Override
public void removeObserver(PeerObserver observer) {
this.observers.remove(observer);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.testbeans.gui;
import java.awt.Component;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.beans.PropertyDescriptor;
import java.beans.PropertyEditorSupport;
import java.util.HashMap;
import java.util.Map;
import java.util.ResourceBundle;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JComboBox;
import javax.swing.text.JTextComponent;
import org.apache.jmeter.gui.ClearGui;
import org.apache.jmeter.util.JMeterUtils;
/**
* This class implements a property editor for possibly null String properties
* that supports custom editing (i.e.: provides a GUI component) based on a
* combo box.
* <p>
* The provided GUI is a combo box with:
* <ul>
* <li>An option for "undefined" (corresponding to the null value), unless the
* <b>noUndefined</b> property is set.
* <li>An option for each value in the <b>tags</b> property.
* <li>The possibility to write your own value, unless the <b>noEdit</b>
* property is set.
* </ul>
*
*/
class ComboStringEditor extends PropertyEditorSupport implements ItemListener, ClearGui {
private static final String[] EMPTY_STRING_ARRAY = new String[0];
/**
* The list of options to be offered by this editor.
*/
private final String[] tags;
/**
* The edited property's default value.
*/
private String initialEditValue;
private final JComboBox combo;
private final DefaultComboBoxModel model;
/*
* Map of translations for tags; only created if there is at least
* one tag and a ResourceBundle has been provided.
*/
private final Map<String, String> validTranslations;
private boolean startingEdit = false;
/*
* True iff we're currently processing an event triggered by the user
* selecting the "Edit" option. Used to prevent reverting the combo to
* non-editable during processing of secondary events.
*/
// Needs to be visible to test cases
final Object UNDEFINED = new UniqueObject("property_undefined"); //$NON-NLS-1$
private final Object EDIT = new UniqueObject("property_edit"); //$NON-NLS-1$
// The minimum index of the tags in the combo box
private final int minTagIndex;
// The maximum index of the tags in the combo box
private final int maxTagIndex;
@Deprecated // only for use from test code
ComboStringEditor() {
this(null, false, false);
}
ComboStringEditor(PropertyDescriptor descriptor) {
this((String[])descriptor.getValue(GenericTestBeanCustomizer.TAGS),
GenericTestBeanCustomizer.notExpression(descriptor),
GenericTestBeanCustomizer.notNull(descriptor),
(ResourceBundle) descriptor.getValue(GenericTestBeanCustomizer.RESOURCE_BUNDLE));
}
ComboStringEditor(String []tags, boolean noEdit, boolean noUndefined) {
this(tags, noEdit, noUndefined, null);
}
private ComboStringEditor(String []pTags, boolean noEdit, boolean noUndefined, ResourceBundle rb) {
tags = pTags == null ? EMPTY_STRING_ARRAY : pTags.clone();
model = new DefaultComboBoxModel();
if (rb != null && tags.length > 0) {
validTranslations=new HashMap<String, String>();
for (String tag : this.tags) {
validTranslations.put(tag, rb.getString(tag));
}
} else {
validTranslations=null;
}
if (!noUndefined) {
model.addElement(UNDEFINED);
}
if (tags.length == 0) {
this.minTagIndex = Integer.MAX_VALUE;
this.maxTagIndex = Integer.MIN_VALUE;
} else {
this.minTagIndex=model.getSize(); // track where tags start ...
for (String tag : this.tags) {
model.addElement(translate(tag));
}
this.maxTagIndex=model.getSize(); // ... and where they end
}
if (!noEdit) {
model.addElement(EDIT);
}
combo = new JComboBox(model);
combo.addItemListener(this);
combo.setEditable(false);
}
/**
* {@inheritDoc}
*/
@Override
public boolean supportsCustomEditor() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public Component getCustomEditor() {
return combo;
}
/**
* {@inheritDoc}
*/
@Override
public Object getValue() {
return getAsText();
}
/**
* {@inheritDoc}
*/
@Override
public String getAsText() {
final Object value = combo.getSelectedItem();
if (UNDEFINED.equals(value)) {
return null;
}
final int item = combo.getSelectedIndex();
// Check if the entry index corresponds to a tag, if so return the tag
// This also works if the tags were not translated
if (item >= minTagIndex && item <= maxTagIndex) {
return tags[item-minTagIndex];
}
// Not a tag entry, return the original value
return (String) value;
}
/**
* {@inheritDoc}
*/
@Override
public void setValue(Object value) {
setAsText((String) value);
}
/**
* {@inheritDoc}
*/
@Override
public void setAsText(String value) {
combo.setEditable(true);
if (value == null) {
combo.setSelectedItem(UNDEFINED);
} else {
combo.setSelectedItem(translate(value));
}
if (!startingEdit && combo.getSelectedIndex() >= 0) {
combo.setEditable(false);
}
}
/**
* {@inheritDoc}
*/
@Override
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.SELECTED) {
if (EDIT.equals(e.getItem())) {
startingEdit = true;
startEditing();
startingEdit = false;
} else {
if (!startingEdit && combo.getSelectedIndex() >= 0) {
combo.setEditable(false);
}
firePropertyChange();
}
}
}
private void startEditing() {
JTextComponent textField = (JTextComponent) combo.getEditor().getEditorComponent();
combo.setEditable(true);
textField.requestFocusInWindow();
String text = translate(initialEditValue);
if (text == null) {
text = ""; // will revert to last valid value if invalid
}
combo.setSelectedItem(text);
int i = text.indexOf("${}");
if (i != -1) {
textField.setCaretPosition(i + 2);
} else {
textField.selectAll();
}
}
/**
* {@inheritDoc}
*/
@Override
public String[] getTags() {
return tags.clone();
}
/**
* @param object
*/
public void setInitialEditValue(String object) {
initialEditValue = object;
}
/**
* This is a funny hack: if you use a plain String, entering the text of the
* string in the editor will make the combo revert to that option -- which
* actually amounts to making that string 'reserved'. I preferred to avoid
* this by using a different type having a controlled .toString().
*/
private static class UniqueObject {
private final String propKey;
private final String propValue;
UniqueObject(String propKey) {
this.propKey = propKey;
this.propValue = JMeterUtils.getResString(propKey);
}
@Override
public String toString() {
return propValue;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof UniqueObject) {
return propKey.equals(((UniqueObject) other).propKey);
}
return false;
}
}
@Override
public void clearGui() {
setAsText(initialEditValue);
}
// Replace a string with its translation, if one exists
private String translate(String input) {
if (validTranslations != null) {
final String entry = validTranslations.get(input);
return entry != null ? entry : input;
}
return input;
}
}
| |
// ========================================================================
// Copyright (c) 2006-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.client;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.concurrent.atomic.AtomicInteger;
import org.eclipse.jetty.client.security.SecurityListener;
import org.eclipse.jetty.http.HttpFields;
import org.eclipse.jetty.http.HttpHeaders;
import org.eclipse.jetty.http.HttpMethods;
import org.eclipse.jetty.http.HttpSchemes;
import org.eclipse.jetty.http.HttpURI;
import org.eclipse.jetty.http.HttpVersions;
import org.eclipse.jetty.io.Buffer;
import org.eclipse.jetty.io.BufferCache.CachedBuffer;
import org.eclipse.jetty.io.ByteArrayBuffer;
import org.eclipse.jetty.io.Connection;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.thread.Timeout;
/**
* <p>
* An HTTP client API that encapsulates an exchange (a request and its response) with a HTTP server.
* </p>
*
* This object encapsulates:
* <ul>
* <li>The HTTP server address, see {@link #setAddress(Address)}, or {@link #setURI(URI)}, or {@link #setURL(String)})
* <li>The HTTP request method, URI and HTTP version (see {@link #setMethod(String)}, {@link #setRequestURI(String)}, and {@link #setVersion(int)})
* <li>The request headers (see {@link #addRequestHeader(String, String)} or {@link #setRequestHeader(String, String)})
* <li>The request content (see {@link #setRequestContent(Buffer)} or {@link #setRequestContentSource(InputStream)})
* <li>The status of the exchange (see {@link #getStatus()})
* <li>Callbacks to handle state changes (see the onXxx methods such as {@link #onRequestComplete()} or {@link #onResponseComplete()})
* <li>The ability to intercept callbacks (see {@link #setEventListener(HttpEventListener)}
* </ul>
*
* <p>
* The HttpExchange class is intended to be used by a developer wishing to have close asynchronous interaction with the the exchange.<br />
* Typically a developer will extend the HttpExchange class with a derived class that overrides some or all of the onXxx callbacks. <br />
* There are also some predefined HttpExchange subtypes that can be used as a basis, see {@link org.eclipse.jetty.client.ContentExchange} and
* {@link org.eclipse.jetty.client.CachedExchange}.
* </p>
*
* <p>
* Typically the HttpExchange is passed to the {@link HttpClient#send(HttpExchange)} method, which in turn selects a {@link HttpDestination} and calls its
* {@link HttpDestination#send(HttpExchange)}, which then creates or selects a {@link AbstractHttpConnection} and calls its {@link AbstractHttpConnection#send(HttpExchange)}. A
* developer may wish to directly call send on the destination or connection if they wish to bypass some handling provided (eg Cookie handling in the
* HttpDestination).
* </p>
*
* <p>
* In some circumstances, the HttpClient or HttpDestination may wish to retry a HttpExchange (eg. failed pipeline request, authentication retry or redirection).
* In such cases, the HttpClient and/or HttpDestination may insert their own HttpExchangeListener to intercept and filter the call backs intended for the
* HttpExchange.
* </p>
*/
public class HttpExchange
{
static final Logger LOG = Log.getLogger(HttpExchange.class);
public static final int STATUS_START = 0;
public static final int STATUS_WAITING_FOR_CONNECTION = 1;
public static final int STATUS_WAITING_FOR_COMMIT = 2;
public static final int STATUS_SENDING_REQUEST = 3;
public static final int STATUS_WAITING_FOR_RESPONSE = 4;
public static final int STATUS_PARSING_HEADERS = 5;
public static final int STATUS_PARSING_CONTENT = 6;
public static final int STATUS_COMPLETED = 7;
public static final int STATUS_EXPIRED = 8;
public static final int STATUS_EXCEPTED = 9;
public static final int STATUS_CANCELLING = 10;
public static final int STATUS_CANCELLED = 11;
// HTTP protocol fields
private String _method = HttpMethods.GET;
private Buffer _scheme = HttpSchemes.HTTP_BUFFER;
private String _uri;
private int _version = HttpVersions.HTTP_1_1_ORDINAL;
private Address _address;
private final HttpFields _requestFields = new HttpFields();
private Buffer _requestContent;
private InputStream _requestContentSource;
private AtomicInteger _status = new AtomicInteger(STATUS_START);
private boolean _retryStatus = false;
// controls if the exchange will have listeners autoconfigured by the destination
private boolean _configureListeners = true;
private HttpEventListener _listener = new Listener();
private volatile AbstractHttpConnection _connection;
private Address _localAddress = null;
// a timeout for this exchange
private long _timeout = -1;
private volatile Timeout.Task _timeoutTask;
private long _lastStateChange=System.currentTimeMillis();
private long _sent=-1;
private int _lastState=-1;
private int _lastStatePeriod=-1;
boolean _onRequestCompleteDone;
boolean _onResponseCompleteDone;
boolean _onDone; // == onConnectionFail || onException || onExpired || onCancelled || onResponseCompleted && onRequestCompleted
protected void expire(HttpDestination destination)
{
if (getStatus() < HttpExchange.STATUS_COMPLETED)
setStatus(HttpExchange.STATUS_EXPIRED);
destination.exchangeExpired(this);
AbstractHttpConnection connection = _connection;
if (connection != null)
connection.exchangeExpired(this);
}
public int getStatus()
{
return _status.get();
}
/**
* @param status
* the status to wait for
* @throws InterruptedException
* if the waiting thread is interrupted
* @deprecated Use {@link #waitForDone()} instead
*/
@Deprecated
public void waitForStatus(int status) throws InterruptedException
{
throw new UnsupportedOperationException();
}
/**
* Wait until the exchange is "done". Done is defined as when a final state has been passed to the HttpExchange via the associated onXxx call. Note that an
* exchange can transit a final state when being used as part of a dialog (eg {@link SecurityListener}. Done status is thus defined as:
*
* <pre>
* done == onConnectionFailed || onException || onExpire || onRequestComplete && onResponseComplete
* </pre>
*
* @return the done status
* @throws InterruptedException
*/
public int waitForDone() throws InterruptedException
{
synchronized (this)
{
while (!isDone())
this.wait();
return _status.get();
}
}
public void reset()
{
// TODO - this should do a cancel and wakeup everybody that was waiting.
// might need a version number concept
synchronized (this)
{
_timeoutTask = null;
_onRequestCompleteDone = false;
_onResponseCompleteDone = false;
_onDone = false;
setStatus(STATUS_START);
}
}
/* ------------------------------------------------------------ */
/**
* @param newStatus
* @return True if the status was actually set.
*/
boolean setStatus(int newStatus)
{
boolean set = false;
try
{
int oldStatus = _status.get();
boolean ignored = false;
if (oldStatus != newStatus)
{
long now = System.currentTimeMillis();
_lastStatePeriod=(int)(now-_lastStateChange);
_lastState=oldStatus;
_lastStateChange=now;
if (newStatus==STATUS_SENDING_REQUEST)
_sent=_lastStateChange;
}
// State machine: from which old status you can go into which new status
switch (oldStatus)
{
case STATUS_START:
switch (newStatus)
{
case STATUS_START:
case STATUS_WAITING_FOR_CONNECTION:
case STATUS_WAITING_FOR_COMMIT:
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_WAITING_FOR_CONNECTION:
switch (newStatus)
{
case STATUS_WAITING_FOR_COMMIT:
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_WAITING_FOR_COMMIT:
switch (newStatus)
{
case STATUS_SENDING_REQUEST:
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_SENDING_REQUEST:
switch (newStatus)
{
case STATUS_WAITING_FOR_RESPONSE:
if (set = _status.compareAndSet(oldStatus,newStatus))
getEventListener().onRequestCommitted();
break;
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_WAITING_FOR_RESPONSE:
switch (newStatus)
{
case STATUS_PARSING_HEADERS:
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_PARSING_HEADERS:
switch (newStatus)
{
case STATUS_PARSING_CONTENT:
if (set = _status.compareAndSet(oldStatus,newStatus))
getEventListener().onResponseHeaderComplete();
break;
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_PARSING_CONTENT:
switch (newStatus)
{
case STATUS_COMPLETED:
if (set = _status.compareAndSet(oldStatus,newStatus))
getEventListener().onResponseComplete();
break;
case STATUS_CANCELLING:
case STATUS_EXCEPTED:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_EXPIRED:
set = setStatusExpired(newStatus,oldStatus);
break;
}
break;
case STATUS_COMPLETED:
switch (newStatus)
{
case STATUS_START:
case STATUS_EXCEPTED:
case STATUS_WAITING_FOR_RESPONSE:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_CANCELLING:
case STATUS_EXPIRED:
// Don't change the status, it's too late
ignored = true;
break;
}
break;
case STATUS_CANCELLING:
switch (newStatus)
{
case STATUS_EXCEPTED:
case STATUS_CANCELLED:
if (set = _status.compareAndSet(oldStatus,newStatus))
done();
break;
default:
// Ignore other statuses, we're cancelling
ignored = true;
break;
}
break;
case STATUS_EXCEPTED:
case STATUS_EXPIRED:
case STATUS_CANCELLED:
switch (newStatus)
{
case STATUS_START:
set = _status.compareAndSet(oldStatus,newStatus);
break;
case STATUS_COMPLETED:
ignored = true;
done();
break;
default:
ignored = true;
break;
}
break;
default:
// Here means I allowed to set a state that I don't recognize
throw new AssertionError(oldStatus + " => " + newStatus);
}
if (!set && !ignored)
throw new IllegalStateException(toState(oldStatus) + " => " + toState(newStatus));
LOG.debug("setStatus {} {}",newStatus,this);
}
catch (IOException x)
{
LOG.warn(x);
}
return set;
}
private boolean setStatusExpired(int newStatus, int oldStatus)
{
boolean set;
if (set = _status.compareAndSet(oldStatus,newStatus))
getEventListener().onExpire();
return set;
}
public boolean isDone()
{
synchronized (this)
{
return _onDone;
}
}
/**
* @deprecated
*/
@Deprecated
public boolean isDone(int status)
{
return isDone();
}
public HttpEventListener getEventListener()
{
return _listener;
}
public void setEventListener(HttpEventListener listener)
{
_listener = listener;
}
public void setTimeout(long timeout)
{
_timeout = timeout;
}
public long getTimeout()
{
return _timeout;
}
/**
* @param url
* an absolute URL (for example 'http://localhost/foo/bar?a=1')
*/
public void setURL(String url)
{
setURI(URI.create(url));
}
/**
* @param address
* the address of the server
*/
public void setAddress(Address address)
{
_address = address;
}
/**
* @return the address of the server
*/
public Address getAddress()
{
return _address;
}
/**
* the local address used by the connection
*
* Note: this method will not be populated unless the exchange has been executed by the HttpClient
*
* @return the local address used for the running of the exchange if available, null otherwise.
*/
public Address getLocalAddress()
{
return _localAddress;
}
/**
* @param scheme
* the scheme of the URL (for example 'http')
*/
public void setScheme(Buffer scheme)
{
_scheme = scheme;
}
/**
* @param scheme
* the scheme of the URL (for example 'http')
*/
public void setScheme(String scheme)
{
if (scheme != null)
{
if (HttpSchemes.HTTP.equalsIgnoreCase(scheme))
setScheme(HttpSchemes.HTTP_BUFFER);
else if (HttpSchemes.HTTPS.equalsIgnoreCase(scheme))
setScheme(HttpSchemes.HTTPS_BUFFER);
else
setScheme(new ByteArrayBuffer(scheme));
}
}
/**
* @return the scheme of the URL
*/
public Buffer getScheme()
{
return _scheme;
}
/**
* @param version
* the HTTP protocol version as integer, 9, 10 or 11 for 0.9, 1.0 or 1.1
*/
public void setVersion(int version)
{
_version = version;
}
/**
* @param version
* the HTTP protocol version as string
*/
public void setVersion(String version)
{
CachedBuffer v = HttpVersions.CACHE.get(version);
if (v == null)
_version = 10;
else
_version = v.getOrdinal();
}
/**
* @return the HTTP protocol version as integer
* @see #setVersion(int)
*/
public int getVersion()
{
return _version;
}
/**
* @param method
* the HTTP method (for example 'GET')
*/
public void setMethod(String method)
{
_method = method;
}
/**
* @return the HTTP method
*/
public String getMethod()
{
return _method;
}
/**
* @return request URI
* @see #getRequestURI()
* @deprecated
*/
@Deprecated
public String getURI()
{
return getRequestURI();
}
/**
* @return request URI
*/
public String getRequestURI()
{
return _uri;
}
/**
* Set the request URI
*
* @param uri
* new request URI
* @see #setRequestURI(String)
* @deprecated
*/
@Deprecated
public void setURI(String uri)
{
setRequestURI(uri);
}
/**
* Set the request URI
*
* Per RFC 2616 sec5, Request-URI = "*" | absoluteURI | abs_path | authority<br/>
* where:<br/>
* <br/>
* "*" - request applies to server itself<br/>
* absoluteURI - required for proxy requests, e.g. http://localhost:8080/context<br/>
* (this form is generated automatically by HttpClient)<br/>
* abs_path - used for most methods, e.g. /context<br/>
* authority - used for CONNECT method only, e.g. localhost:8080<br/>
* <br/>
* For complete definition of URI components, see RFC 2396 sec3.<br/>
*
* @param uri
* new request URI
*/
public void setRequestURI(String uri)
{
_uri = uri;
}
/* ------------------------------------------------------------ */
/**
* @param uri
* an absolute URI (for example 'http://localhost/foo/bar?a=1')
*/
public void setURI(URI uri)
{
if (!uri.isAbsolute())
throw new IllegalArgumentException("!Absolute URI: " + uri);
if (uri.isOpaque())
throw new IllegalArgumentException("Opaque URI: " + uri);
if (LOG.isDebugEnabled())
LOG.debug("URI = {}",uri.toASCIIString());
String scheme = uri.getScheme();
int port = uri.getPort();
if (port <= 0)
port = "https".equalsIgnoreCase(scheme)?443:80;
setScheme(scheme);
setAddress(new Address(uri.getHost(),port));
HttpURI httpUri = new HttpURI(uri);
String completePath = httpUri.getCompletePath();
setRequestURI(completePath == null?"/":completePath);
}
/**
* Adds the specified request header
*
* @param name
* the header name
* @param value
* the header value
*/
public void addRequestHeader(String name, String value)
{
getRequestFields().add(name,value);
}
/**
* Adds the specified request header
*
* @param name
* the header name
* @param value
* the header value
*/
public void addRequestHeader(Buffer name, Buffer value)
{
getRequestFields().add(name,value);
}
/**
* Sets the specified request header
*
* @param name
* the header name
* @param value
* the header value
*/
public void setRequestHeader(String name, String value)
{
getRequestFields().put(name,value);
}
/**
* Sets the specified request header
*
* @param name
* the header name
* @param value
* the header value
*/
public void setRequestHeader(Buffer name, Buffer value)
{
getRequestFields().put(name,value);
}
/**
* @param value
* the content type of the request
*/
public void setRequestContentType(String value)
{
getRequestFields().put(HttpHeaders.CONTENT_TYPE_BUFFER,value);
}
/**
* @return the request headers
*/
public HttpFields getRequestFields()
{
return _requestFields;
}
/**
* @param requestContent
* the request content
*/
public void setRequestContent(Buffer requestContent)
{
_requestContent = requestContent;
}
/**
* @param stream
* the request content as a stream
*/
public void setRequestContentSource(InputStream stream)
{
_requestContentSource = stream;
if (_requestContentSource != null && _requestContentSource.markSupported())
_requestContentSource.mark(Integer.MAX_VALUE);
}
/**
* @return the request content as a stream
*/
public InputStream getRequestContentSource()
{
return _requestContentSource;
}
public Buffer getRequestContentChunk(Buffer buffer) throws IOException
{
synchronized (this)
{
if (_requestContentSource!=null)
{
if (buffer == null)
buffer = new ByteArrayBuffer(8192); // TODO configure
int space = buffer.space();
int length = _requestContentSource.read(buffer.array(),buffer.putIndex(),space);
if (length >= 0)
{
buffer.setPutIndex(buffer.putIndex()+length);
return buffer;
}
}
return null;
}
}
/**
* @return the request content
*/
public Buffer getRequestContent()
{
return _requestContent;
}
/**
* @return whether a retry will be attempted or not
*/
public boolean getRetryStatus()
{
return _retryStatus;
}
/**
* @param retryStatus
* whether a retry will be attempted or not
*/
public void setRetryStatus(boolean retryStatus)
{
_retryStatus = retryStatus;
}
/**
* Initiates the cancelling of this exchange. The status of the exchange is set to {@link #STATUS_CANCELLING}. Cancelling the exchange is an asynchronous
* operation with respect to the request/response, and as such checking the request/response status of a cancelled exchange may return undefined results
* (for example it may have only some of the response headers being sent by the server). The cancelling of the exchange is completed when the exchange
* status (see {@link #getStatus()}) is {@link #STATUS_CANCELLED}, and this can be waited using {@link #waitForDone()}.
*/
public void cancel()
{
setStatus(STATUS_CANCELLING);
abort();
}
private void done()
{
synchronized (this)
{
disassociate();
_onDone = true;
notifyAll();
}
}
private void abort()
{
AbstractHttpConnection httpConnection = _connection;
if (httpConnection != null)
{
try
{
// Closing the connection here will cause the connection
// to be returned in HttpConnection.handle()
httpConnection.close();
}
catch (IOException x)
{
LOG.debug(x);
}
finally
{
disassociate();
}
}
}
void associate(AbstractHttpConnection connection)
{
if (connection.getEndPoint().getLocalAddr() != null)
_localAddress = new Address(connection.getEndPoint().getLocalAddr(),connection.getEndPoint().getLocalPort());
_connection = connection;
if (getStatus() == STATUS_CANCELLING)
abort();
}
boolean isAssociated()
{
return this._connection != null;
}
AbstractHttpConnection disassociate()
{
AbstractHttpConnection result = _connection;
this._connection = null;
if (getStatus() == STATUS_CANCELLING)
setStatus(STATUS_CANCELLED);
return result;
}
public static String toState(int s)
{
String state;
switch (s)
{
case STATUS_START:
state = "START";
break;
case STATUS_WAITING_FOR_CONNECTION:
state = "CONNECTING";
break;
case STATUS_WAITING_FOR_COMMIT:
state = "CONNECTED";
break;
case STATUS_SENDING_REQUEST:
state = "SENDING";
break;
case STATUS_WAITING_FOR_RESPONSE:
state = "WAITING";
break;
case STATUS_PARSING_HEADERS:
state = "HEADERS";
break;
case STATUS_PARSING_CONTENT:
state = "CONTENT";
break;
case STATUS_COMPLETED:
state = "COMPLETED";
break;
case STATUS_EXPIRED:
state = "EXPIRED";
break;
case STATUS_EXCEPTED:
state = "EXCEPTED";
break;
case STATUS_CANCELLING:
state = "CANCELLING";
break;
case STATUS_CANCELLED:
state = "CANCELLED";
break;
default:
state = "UNKNOWN";
}
return state;
}
@Override
public String toString()
{
String state=toState(getStatus());
long now=System.currentTimeMillis();
long forMs = now -_lastStateChange;
String s= _lastState>=0
?String.format("%s@%x=%s//%s%s#%s(%dms)->%s(%dms)",getClass().getSimpleName(),hashCode(),_method,_address,_uri,toState(_lastState),_lastStatePeriod,state,forMs)
:String.format("%s@%x=%s//%s%s#%s(%dms)",getClass().getSimpleName(),hashCode(),_method,_address,_uri,state,forMs);
if (getStatus()>=STATUS_SENDING_REQUEST && _sent>0)
s+="sent="+(now-_sent)+"ms";
return s;
}
/**
*/
protected Connection onSwitchProtocol(EndPoint endp) throws IOException
{
return null;
}
/**
* Callback called when the request headers have been sent to the server. This implementation does nothing.
*
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onRequestCommitted() throws IOException
{
}
/**
* Callback called when the request and its body have been sent to the server. This implementation does nothing.
*
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onRequestComplete() throws IOException
{
}
/**
* Callback called when a response status line has been received from the server. This implementation does nothing.
*
* @param version
* the HTTP version
* @param status
* the HTTP status code
* @param reason
* the HTTP status reason string
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onResponseStatus(Buffer version, int status, Buffer reason) throws IOException
{
}
/**
* Callback called for each response header received from the server. This implementation does nothing.
*
* @param name
* the header name
* @param value
* the header value
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onResponseHeader(Buffer name, Buffer value) throws IOException
{
}
/**
* Callback called when the response headers have been completely received from the server. This implementation does nothing.
*
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onResponseHeaderComplete() throws IOException
{
}
/**
* Callback called for each chunk of the response content received from the server. This implementation does nothing.
*
* @param content
* the buffer holding the content chunk
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onResponseContent(Buffer content) throws IOException
{
}
/**
* Callback called when the entire response has been received from the server This implementation does nothing.
*
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onResponseComplete() throws IOException
{
}
/**
* Callback called when an exception was thrown during an attempt to establish the connection with the server (for example the server is not listening).
* This implementation logs a warning.
*
* @param x
* the exception thrown attempting to establish the connection with the server
*/
protected void onConnectionFailed(Throwable x)
{
LOG.warn("CONNECTION FAILED " + this,x);
}
/**
* Callback called when any other exception occurs during the handling of this exchange. This implementation logs a warning.
*
* @param x
* the exception thrown during the handling of this exchange
*/
protected void onException(Throwable x)
{
LOG.warn("EXCEPTION " + this,x);
}
/**
* Callback called when no response has been received within the timeout. This implementation logs a warning.
*/
protected void onExpire()
{
LOG.warn("EXPIRED " + this);
}
/**
* Callback called when the request is retried (due to failures or authentication). Implementations must reset any consumable content that needs to be sent.
*
* @throws IOException
* allowed to be thrown by overriding code
*/
protected void onRetry() throws IOException
{
if (_requestContentSource != null)
{
if (_requestContentSource.markSupported())
{
_requestContent = null;
_requestContentSource.reset();
}
else
{
throw new IOException("Unsupported retry attempt");
}
}
}
/**
* @return true if the exchange should have listeners configured for it by the destination, false if this is being managed elsewhere
* @see #setConfigureListeners(boolean)
*/
public boolean configureListeners()
{
return _configureListeners;
}
/**
* @param autoConfigure
* whether the listeners are configured by the destination or elsewhere
*/
public void setConfigureListeners(boolean autoConfigure)
{
this._configureListeners = autoConfigure;
}
protected void scheduleTimeout(final HttpDestination destination)
{
assert _timeoutTask == null;
_timeoutTask = new Timeout.Task()
{
@Override
public void expired()
{
HttpExchange.this.expire(destination);
}
};
HttpClient httpClient = destination.getHttpClient();
long timeout = getTimeout();
if (timeout > 0)
httpClient.schedule(_timeoutTask,timeout);
else
httpClient.schedule(_timeoutTask);
}
protected void cancelTimeout(HttpClient httpClient)
{
Timeout.Task task = _timeoutTask;
if (task != null)
httpClient.cancel(task);
_timeoutTask = null;
}
private class Listener implements HttpEventListener
{
public void onConnectionFailed(Throwable ex)
{
try
{
HttpExchange.this.onConnectionFailed(ex);
}
finally
{
done();
}
}
public void onException(Throwable ex)
{
try
{
HttpExchange.this.onException(ex);
}
finally
{
done();
}
}
public void onExpire()
{
try
{
HttpExchange.this.onExpire();
}
finally
{
done();
}
}
public void onRequestCommitted() throws IOException
{
HttpExchange.this.onRequestCommitted();
}
public void onRequestComplete() throws IOException
{
try
{
HttpExchange.this.onRequestComplete();
}
finally
{
synchronized (HttpExchange.this)
{
_onRequestCompleteDone = true;
// Member _onDone may already be true, for example
// because the exchange expired or has been canceled
_onDone |= _onResponseCompleteDone;
if (_onDone)
disassociate();
HttpExchange.this.notifyAll();
}
}
}
public void onResponseComplete() throws IOException
{
try
{
HttpExchange.this.onResponseComplete();
}
finally
{
synchronized (HttpExchange.this)
{
_onResponseCompleteDone = true;
// Member _onDone may already be true, for example
// because the exchange expired or has been canceled
_onDone |= _onRequestCompleteDone;
if (_onDone)
disassociate();
HttpExchange.this.notifyAll();
}
}
}
public void onResponseContent(Buffer content) throws IOException
{
HttpExchange.this.onResponseContent(content);
}
public void onResponseHeader(Buffer name, Buffer value) throws IOException
{
HttpExchange.this.onResponseHeader(name,value);
}
public void onResponseHeaderComplete() throws IOException
{
HttpExchange.this.onResponseHeaderComplete();
}
public void onResponseStatus(Buffer version, int status, Buffer reason) throws IOException
{
HttpExchange.this.onResponseStatus(version,status,reason);
}
public void onRetry()
{
HttpExchange.this.setRetryStatus(true);
try
{
HttpExchange.this.onRetry();
}
catch (IOException e)
{
LOG.debug(e);
}
}
}
/**
* @deprecated use {@link org.eclipse.jetty.client.CachedExchange} instead
*/
@Deprecated
public static class CachedExchange extends org.eclipse.jetty.client.CachedExchange
{
public CachedExchange(boolean cacheFields)
{
super(cacheFields);
}
}
/**
* @deprecated use {@link org.eclipse.jetty.client.ContentExchange} instead
*/
@Deprecated
public static class ContentExchange extends org.eclipse.jetty.client.ContentExchange
{
}
}
| |
/**
* Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package javanamespace.implementation;
import javanamespace.SwaggerPetstore;
import com.microsoft.rest.ServiceClient;
import com.microsoft.rest.RestClient;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.ServiceResponseWithHeaders;
import java.io.IOException;
import java.util.List;
import javanamespace.models.ErrorException;
import javanamespace.models.ListPetsHeaders;
import javanamespace.models.Pet;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.POST;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* Initializes a new instance of the SwaggerPetstore class.
*/
public class SwaggerPetstoreImpl extends ServiceClient implements SwaggerPetstore {
/**
* The Retrofit service to perform REST calls.
*/
private SwaggerPetstoreService service;
/**
* Initializes an instance of SwaggerPetstore client.
*/
public SwaggerPetstoreImpl() {
this("http://petstore.swagger.io/v1");
}
/**
* Initializes an instance of SwaggerPetstore client.
*
* @param baseUrl the base URL of the host
*/
public SwaggerPetstoreImpl(String baseUrl) {
super(baseUrl);
initialize();
}
/**
* Initializes an instance of SwaggerPetstore client.
*
* @param clientBuilder the builder for building an OkHttp client, bundled with user configurations
* @param restBuilder the builder for building an Retrofit client, bundled with user configurations
*/
public SwaggerPetstoreImpl(OkHttpClient.Builder clientBuilder, Retrofit.Builder restBuilder) {
this("http://petstore.swagger.io/v1", clientBuilder, restBuilder);
initialize();
}
/**
* Initializes an instance of SwaggerPetstore client.
*
* @param baseUrl the base URL of the host
* @param clientBuilder the builder for building an OkHttp client, bundled with user configurations
* @param restBuilder the builder for building an Retrofit client, bundled with user configurations
*/
public SwaggerPetstoreImpl(String baseUrl, OkHttpClient.Builder clientBuilder, Retrofit.Builder restBuilder) {
super(baseUrl, clientBuilder, restBuilder);
initialize();
}
/**
* Initializes an instance of SwaggerPetstore client.
*
* @param restClient the REST client containing pre-configured settings
*/
public SwaggerPetstoreImpl(RestClient restClient) {
super(restClient);
initialize();
}
private void initialize() {
initializeService();
}
private void initializeService() {
service = retrofit().create(SwaggerPetstoreService.class);
}
/**
* The interface defining all the services for SwaggerPetstore to be
* used by Retrofit to perform actually REST calls.
*/
interface SwaggerPetstoreService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: javanamespace.SwaggerPetstore listPets" })
@GET("pets")
Observable<Response<ResponseBody>> listPets(@Query("limit") Integer limit);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: javanamespace.SwaggerPetstore createPets" })
@POST("pets")
Observable<Response<ResponseBody>> createPets();
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: javanamespace.SwaggerPetstore showPetById" })
@GET("pets/{petId}")
Observable<Response<ResponseBody>> showPetById(@Path("petId") String petId);
}
/**
* List all pets.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<Pet> object if successful.
*/
public List<Pet> listPets() {
return listPetsWithServiceResponseAsync().toBlocking().single().body();
}
/**
* List all pets.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<Pet>> listPetsAsync(final ServiceCallback<List<Pet>> serviceCallback) {
return ServiceFuture.fromHeaderResponse(listPetsWithServiceResponseAsync(), serviceCallback);
}
/**
* List all pets.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<Pet> object
*/
public Observable<List<Pet>> listPetsAsync() {
return listPetsWithServiceResponseAsync().map(new Func1<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>, List<Pet>>() {
@Override
public List<Pet> call(ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders> response) {
return response.body();
}
});
}
/**
* List all pets.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<Pet> object
*/
public Observable<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>> listPetsWithServiceResponseAsync() {
final Integer limit = null;
return service.listPets(limit)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>>>() {
@Override
public Observable<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>> call(Response<ResponseBody> response) {
try {
ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders> clientResponse = listPetsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
/**
* List all pets.
*
* @param limit How many items to return at one time (max 100)
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<Pet> object if successful.
*/
public List<Pet> listPets(Integer limit) {
return listPetsWithServiceResponseAsync(limit).toBlocking().single().body();
}
/**
* List all pets.
*
* @param limit How many items to return at one time (max 100)
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<Pet>> listPetsAsync(Integer limit, final ServiceCallback<List<Pet>> serviceCallback) {
return ServiceFuture.fromHeaderResponse(listPetsWithServiceResponseAsync(limit), serviceCallback);
}
/**
* List all pets.
*
* @param limit How many items to return at one time (max 100)
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<Pet> object
*/
public Observable<List<Pet>> listPetsAsync(Integer limit) {
return listPetsWithServiceResponseAsync(limit).map(new Func1<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>, List<Pet>>() {
@Override
public List<Pet> call(ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders> response) {
return response.body();
}
});
}
/**
* List all pets.
*
* @param limit How many items to return at one time (max 100)
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<Pet> object
*/
public Observable<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>> listPetsWithServiceResponseAsync(Integer limit) {
return service.listPets(limit)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>>>() {
@Override
public Observable<ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders>> call(Response<ResponseBody> response) {
try {
ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders> clientResponse = listPetsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponseWithHeaders<List<Pet>, ListPetsHeaders> listPetsDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<List<Pet>, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<List<Pet>>() { }.getType())
.registerError(ErrorException.class)
.buildWithHeaders(response, ListPetsHeaders.class);
}
/**
* Create a pet.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void createPets() {
createPetsWithServiceResponseAsync().toBlocking().single().body();
}
/**
* Create a pet.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> createPetsAsync(final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(createPetsWithServiceResponseAsync(), serviceCallback);
}
/**
* Create a pet.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> createPetsAsync() {
return createPetsWithServiceResponseAsync().map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Create a pet.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> createPetsWithServiceResponseAsync() {
return service.createPets()
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = createPetsDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> createPetsDelegate(Response<ResponseBody> response) throws ErrorException, IOException {
return this.restClient().responseBuilderFactory().<Void, ErrorException>newInstance(this.serializerAdapter())
.register(201, new TypeToken<Void>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
/**
* Info for a specific pet.
*
* @param petId The id of the pet to retrieve
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<Pet> object if successful.
*/
public List<Pet> showPetById(String petId) {
return showPetByIdWithServiceResponseAsync(petId).toBlocking().single().body();
}
/**
* Info for a specific pet.
*
* @param petId The id of the pet to retrieve
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<Pet>> showPetByIdAsync(String petId, final ServiceCallback<List<Pet>> serviceCallback) {
return ServiceFuture.fromResponse(showPetByIdWithServiceResponseAsync(petId), serviceCallback);
}
/**
* Info for a specific pet.
*
* @param petId The id of the pet to retrieve
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<Pet> object
*/
public Observable<List<Pet>> showPetByIdAsync(String petId) {
return showPetByIdWithServiceResponseAsync(petId).map(new Func1<ServiceResponse<List<Pet>>, List<Pet>>() {
@Override
public List<Pet> call(ServiceResponse<List<Pet>> response) {
return response.body();
}
});
}
/**
* Info for a specific pet.
*
* @param petId The id of the pet to retrieve
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<Pet> object
*/
public Observable<ServiceResponse<List<Pet>>> showPetByIdWithServiceResponseAsync(String petId) {
if (petId == null) {
throw new IllegalArgumentException("Parameter petId is required and cannot be null.");
}
return service.showPetById(petId)
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<Pet>>>>() {
@Override
public Observable<ServiceResponse<List<Pet>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<List<Pet>> clientResponse = showPetByIdDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<List<Pet>> showPetByIdDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException {
return this.restClient().responseBuilderFactory().<List<Pet>, ErrorException>newInstance(this.serializerAdapter())
.register(200, new TypeToken<List<Pet>>() { }.getType())
.registerError(ErrorException.class)
.build(response);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.discovery.graph;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.TitanVertex;
import org.apache.atlas.AtlasException;
import org.apache.atlas.query.Expressions;
import org.apache.atlas.query.GraphPersistenceStrategies;
import org.apache.atlas.query.GraphPersistenceStrategies$class;
import org.apache.atlas.query.IntSequence;
import org.apache.atlas.query.TypeUtils;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.IDataType;
import org.apache.atlas.typesystem.types.Multiplicity;
import org.apache.atlas.typesystem.types.StructType;
import org.apache.atlas.typesystem.types.TraitType;
import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/**
* Default implementation of GraphPersistenceStrategy.
*/
public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategies {
private static final Logger LOG = LoggerFactory.getLogger(DefaultGraphPersistenceStrategy.class);
private final GraphBackedMetadataRepository metadataRepository;
public DefaultGraphPersistenceStrategy(MetadataRepository metadataRepository) {
this.metadataRepository = (GraphBackedMetadataRepository) metadataRepository;
}
@Override
public String typeAttributeName() {
return metadataRepository.getTypeAttributeName();
}
@Override
public String superTypeAttributeName() {
return metadataRepository.getSuperTypeAttributeName();
}
@Override
public String edgeLabel(IDataType<?> dataType, AttributeInfo aInfo) {
return metadataRepository.getEdgeLabel(dataType, aInfo);
}
@Override
public String traitLabel(IDataType<?> dataType, String traitName) {
return metadataRepository.getTraitLabel(dataType, traitName);
}
@Override
public String fieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) {
try {
return metadataRepository.getFieldNameInVertex(dataType, aInfo);
} catch (AtlasException e) {
throw new RuntimeException(e);
}
}
@Override
public List<String> traitNames(TitanVertex vertex) {
return metadataRepository.getTraitNames(vertex);
}
@Override
public String fieldPrefixInSelect() {
return "it";
}
@Override
public Id getIdFromVertex(String dataTypeName, TitanVertex vertex) {
return metadataRepository.getIdFromVertex(dataTypeName, vertex);
}
@Override
public <U> U constructInstance(IDataType<U> dataType, Object value) {
try {
switch (dataType.getTypeCategory()) {
case PRIMITIVE:
case ENUM:
return dataType.convert(value, Multiplicity.OPTIONAL);
case ARRAY:
DataTypes.ArrayType arrType = (DataTypes.ArrayType) dataType;
IDataType<?> elemType = arrType.getElemType();
ImmutableCollection.Builder result = ImmutableList.builder();
List list = (List) value;
for(Object listElement : list) {
Object collectionEntry = constructCollectionEntry(elemType, listElement);
if(collectionEntry != null) {
result.add(collectionEntry);
}
}
return (U)result.build();
case MAP:
// todo
break;
case STRUCT:
TitanVertex structVertex = (TitanVertex) value;
StructType structType = (StructType) dataType;
ITypedStruct structInstance = structType.createInstance();
TypeSystem.IdType idType = TypeSystem.getInstance().getIdType();
if (dataType.getName().equals(idType.getName())) {
structInstance.set(idType.typeNameAttrName(), structVertex.getProperty(typeAttributeName()));
structInstance.set(idType.idAttrName(), structVertex.getProperty(idAttributeName()));
} else {
metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(structVertex, structInstance, structType.fieldMapping().fields);
}
return dataType.convert(structInstance, Multiplicity.OPTIONAL);
case TRAIT:
TitanVertex traitVertex = (TitanVertex) value;
TraitType traitType = (TraitType) dataType;
ITypedStruct traitInstance = traitType.createInstance();
// todo - this is not right, we should load the Instance associated with this
// trait. for now just loading the trait struct.
// metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
// traitVertex, dataType.getName(), , traitType, traitInstance);
metadataRepository.getGraphToInstanceMapper()
.mapVertexToInstance(traitVertex, traitInstance, traitType.fieldMapping().fields);
break;
case CLASS:
TitanVertex classVertex = (TitanVertex) value;
ITypedReferenceableInstance classInstance = metadataRepository.getGraphToInstanceMapper()
.mapGraphToTypedInstance(classVertex.<String>getProperty(Constants.GUID_PROPERTY_KEY),
classVertex);
return dataType.convert(classInstance, Multiplicity.OPTIONAL);
default:
throw new UnsupportedOperationException("Load for type " + dataType + "is not supported");
}
} catch (AtlasException e) {
LOG.error("error while constructing an instance", e);
}
return null;
}
public <U> U constructCollectionEntry(IDataType<U> elementType, Object value) throws AtlasException {
switch (elementType.getTypeCategory()) {
case PRIMITIVE:
case ENUM:
return constructInstance(elementType, value);
//The array values in case of STRUCT, CLASS contain the edgeId if the outgoing edge which links to the STRUCT, CLASS vertex referenced
case STRUCT:
case CLASS:
String edgeId = (String) value;
return (U) metadataRepository.getGraphToInstanceMapper().getReferredEntity(edgeId, elementType);
case ARRAY:
case MAP:
case TRAIT:
return null;
default:
throw new UnsupportedOperationException("Load for type " + elementType + " in collections is not supported");
}
}
@Override
public String edgeLabel(TypeUtils.FieldInfo fInfo) {
return fInfo.reverseDataType() == null ? edgeLabel(fInfo.dataType(), fInfo.attrInfo()) :
edgeLabel(fInfo.reverseDataType(), fInfo.attrInfo());
}
@Override
public String gremlinCompOp(Expressions.ComparisonExpression op) {
return GraphPersistenceStrategies$class.gremlinCompOp(this, op);
}
@Override
public String loopObjectExpression(IDataType<?> dataType) {
return GraphPersistenceStrategies$class.loopObjectExpression(this, dataType);
}
@Override
public String instanceToTraitEdgeDirection() {
return "out";
}
@Override
public String traitToInstanceEdgeDirection() {
return "in";
}
@Override
public String idAttributeName() {
return metadataRepository.getIdAttributeName();
}
@Override
public scala.collection.Seq<String> typeTestExpression(String typeName, IntSequence intSeq) {
return GraphPersistenceStrategies$class.typeTestExpression(this, typeName, intSeq);
}
@Override
public boolean collectTypeInstancesIntoVar() {
return GraphPersistenceStrategies$class.collectTypeInstancesIntoVar(this);
}
@Override
public boolean addGraphVertexPrefix(scala.collection.Traversable<String> preStatements) {
return GraphPersistenceStrategies$class.addGraphVertexPrefix(this, preStatements);
}
}
| |
package org.recommender101.recommender.extensions.funksvd.impl;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* From previous Apache Mahout implementation (0.4)
* @see FastMap
* @see FastIDSet
*/
public final class FastByIDMap<V> implements Cloneable {
public static final int NO_MAX_SIZE = Integer.MAX_VALUE;
private static final double ALLOWED_LOAD_FACTOR = 1.5;
/** Dummy object used to represent a key that has been removed. */
private static final long REMOVED = Long.MAX_VALUE;
private static final long NULL = Long.MIN_VALUE;
private long[] keys;
private V[] values;
private int numEntries;
private int numSlotsUsed;
private int maxSize;
private BitSet recentlyAccessed;
private final boolean countingAccesses;
/** Creates a new with default capacity. */
public FastByIDMap() {
this(2, NO_MAX_SIZE);
}
public FastByIDMap(int size) {
this(size, NO_MAX_SIZE);
}
/**
* Creates a new whose capacity can accommodate the given number of entries without
* rehash.</p>
*
* @param size
* desired capacity
* @param maxSize
* max capacity
* @throws IllegalArgumentException
* if size is less than 0, maxSize is less than 1, or at least half of
* {@link RandomUtils#MAX_INT_SMALLER_TWIN_PRIME}
*/
@SuppressWarnings("unchecked")
public FastByIDMap(int size, int maxSize) {
if (size < 0) {
throw new IllegalArgumentException("size must be at least 0");
}
int max = (int) (RandomUtils.MAX_INT_SMALLER_TWIN_PRIME / ALLOWED_LOAD_FACTOR);
if (size >= max) {
throw new IllegalArgumentException("size must be less than " + max);
}
if (maxSize < 1) {
throw new IllegalArgumentException("maxSize must be at least 1");
}
int hashSize = RandomUtils.nextTwinPrime((int) (ALLOWED_LOAD_FACTOR * size));
keys = new long[hashSize];
Arrays.fill(keys, NULL);
values = (V[]) new Object[hashSize];
this.maxSize = maxSize;
this.countingAccesses = maxSize != Integer.MAX_VALUE;
this.recentlyAccessed = countingAccesses ? new BitSet(hashSize) : null;
}
/**
* @see #findForAdd(long)
*/
private int find(long key) {
int theHashCode = (int) key & 0x7FFFFFFF; // make sure it's positive
long[] keys = this.keys;
int hashSize = keys.length;
int jump = 1 + theHashCode % (hashSize - 2);
int index = theHashCode % hashSize;
long currentKey = keys[index];
while ((currentKey != NULL) && (key != currentKey)) {
if (index < jump) {
index += hashSize - jump;
} else {
index -= jump;
}
currentKey = keys[index];
}
return index;
}
/**
* @see #find(long)
*/
private int findForAdd(long key) {
int theHashCode = (int) key & 0x7FFFFFFF; // make sure it's positive
long[] keys = this.keys;
int hashSize = keys.length;
int jump = 1 + theHashCode % (hashSize - 2);
int index = theHashCode % hashSize;
long currentKey = keys[index];
while ((currentKey != NULL) && (currentKey != REMOVED) && (key != currentKey)) { // Different
// here
if (index < jump) {
index += hashSize - jump;
} else {
index -= jump;
}
currentKey = keys[index];
}
return index;
}
public V get(long key) {
if (key == NULL) {
return null;
}
int index = find(key);
if (countingAccesses) {
recentlyAccessed.set(index);
}
return values[index];
}
public int size() {
return numEntries;
}
public boolean isEmpty() {
return numEntries == 0;
}
public boolean containsKey(long key) {
return (key != NULL) && (key != REMOVED) && (keys[find(key)] != NULL);
}
public boolean containsValue(Object value) {
if (value == null) {
return false;
}
for (V theValue : values) {
if ((theValue != null) && value.equals(theValue)) {
return true;
}
}
return false;
}
public V put(long key, V value) {
if ((key == NULL) || (key == REMOVED)) {
throw new IllegalArgumentException();
}
if (value == null) {
throw new NullPointerException();
}
// If less than half the slots are open, let's clear it up
if (numSlotsUsed * ALLOWED_LOAD_FACTOR >= keys.length) {
// If over half the slots used are actual entries, let's grow
if (numEntries * ALLOWED_LOAD_FACTOR >= numSlotsUsed) {
growAndRehash();
} else {
// Otherwise just rehash to clear REMOVED entries and don't grow
rehash();
}
}
// Here we may later consider implementing Brent's variation described on page 532
int index = findForAdd(key);
long keyIndex = keys[index];
if (keyIndex == key) {
V oldValue = values[index];
values[index] = value;
return oldValue;
} else {
// If size is limited,
if (countingAccesses && (numEntries >= maxSize)) {
// and we're too large, clear some old-ish entry
clearStaleEntry(index);
}
keys[index] = key;
values[index] = value;
numEntries++;
if (keyIndex == NULL) {
numSlotsUsed++;
}
return null;
}
}
private void clearStaleEntry(int index) {
while (true) {
long currentKey;
do {
if (index == 0) {
index = keys.length - 1;
} else {
index--;
}
currentKey = keys[index];
} while ((currentKey == NULL) || (currentKey == REMOVED));
if (recentlyAccessed.get(index)) {
recentlyAccessed.clear(index);
} else {
break;
}
}
// Delete the entry
keys[index] = REMOVED;
numEntries--;
values[index] = null;
}
public V remove(long key) {
if ((key == NULL) || (key == REMOVED)) {
return null;
}
int index = find(key);
if (keys[index] == NULL) {
return null;
} else {
keys[index] = REMOVED;
numEntries--;
V oldValue = values[index];
values[index] = null;
// don't decrement numSlotsUsed
return oldValue;
}
// Could un-set recentlyAccessed's bit but doesn't matter
}
public void clear() {
numEntries = 0;
numSlotsUsed = 0;
Arrays.fill(keys, NULL);
Arrays.fill(values, null);
if (countingAccesses) {
recentlyAccessed.clear();
}
}
public LongPrimitiveIterator keySetIterator() {
return new KeyIterator();
}
public Set<Map.Entry<Long,V>> entrySet() {
return new EntrySet();
}
public void rehash() {
rehash(RandomUtils.nextTwinPrime((int) (ALLOWED_LOAD_FACTOR * numEntries)));
}
private void growAndRehash() {
if (keys.length * ALLOWED_LOAD_FACTOR >= RandomUtils.MAX_INT_SMALLER_TWIN_PRIME) {
throw new IllegalStateException("Can't grow any more");
}
rehash(RandomUtils.nextTwinPrime((int) (ALLOWED_LOAD_FACTOR * keys.length)));
}
@SuppressWarnings("unchecked")
private void rehash(int newHashSize) {
long[] oldKeys = keys;
V[] oldValues = values;
numEntries = 0;
numSlotsUsed = 0;
if (countingAccesses) {
recentlyAccessed = new BitSet(newHashSize);
}
keys = new long[newHashSize];
Arrays.fill(keys, NULL);
values = (V[]) new Object[newHashSize];
int length = oldKeys.length;
for (int i = 0; i < length; i++) {
long key = oldKeys[i];
if ((key != NULL) && (key != REMOVED)) {
put(key, oldValues[i]);
}
}
}
void iteratorRemove(int lastNext) {
if (lastNext >= values.length) {
throw new NoSuchElementException();
}
if (lastNext < 0) {
throw new IllegalStateException();
}
values[lastNext] = null;
keys[lastNext] = REMOVED;
numEntries--;
}
@SuppressWarnings("unchecked")
@Override
public FastByIDMap<V> clone() {
FastByIDMap<V> clone;
try {
clone = (FastByIDMap<V>) super.clone();
} catch (CloneNotSupportedException cnse) {
throw new AssertionError();
}
clone.keys = keys.clone();
clone.values = values.clone();
clone.recentlyAccessed = countingAccesses ? new BitSet(keys.length) : null;
return clone;
}
@Override
public String toString() {
if (isEmpty()) {
return "{}";
}
StringBuilder result = new StringBuilder();
result.append('{');
for (int i = 0; i < keys.length; i++) {
long key = keys[i];
if ((key != NULL) && (key != REMOVED)) {
result.append(key).append('=').append(values[i]).append(',');
}
}
result.setCharAt(result.length() - 1, '}');
return result.toString();
}
private final class KeyIterator extends AbstractLongPrimitiveIterator {
private int position;
private int lastNext = -1;
@Override
public boolean hasNext() {
goToNext();
return position < keys.length;
}
@Override
public long nextLong() {
goToNext();
lastNext = position;
if (position >= keys.length) {
throw new NoSuchElementException();
}
return keys[position++];
}
@Override
public long peek() {
goToNext();
if (position >= keys.length) {
throw new NoSuchElementException();
}
return keys[position];
}
private void goToNext() {
int length = values.length;
while ((position < length) && (values[position] == null)) {
position++;
}
}
@Override
public void remove() {
iteratorRemove(lastNext);
}
@Override
public void skip(int n) {
position += n;
}
}
private final class EntrySet extends AbstractSet<Map.Entry<Long,V>> {
@Override
public int size() {
return FastByIDMap.this.size();
}
@Override
public boolean isEmpty() {
return FastByIDMap.this.isEmpty();
}
@Override
public boolean contains(Object o) {
return containsKey((Long) o);
}
@Override
public Iterator<Map.Entry<Long,V>> iterator() {
return new EntryIterator();
}
@Override
public boolean add(Map.Entry<Long,V> t) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends Map.Entry<Long,V>> ts) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> objects) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> objects) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
FastByIDMap.this.clear();
}
private final class MapEntry implements Map.Entry<Long,V> {
private final int index;
private MapEntry(int index) {
this.index = index;
}
@Override
public Long getKey() {
return keys[index];
}
@Override
public V getValue() {
return values[index];
}
@Override
public V setValue(V value) {
if (value == null) {
throw new IllegalArgumentException();
}
V oldValue = values[index];
values[index] = value;
return oldValue;
}
}
private final class EntryIterator implements Iterator<Map.Entry<Long,V>> {
private int position;
private int lastNext = -1;
@Override
public boolean hasNext() {
goToNext();
return position < keys.length;
}
@Override
public Map.Entry<Long,V> next() {
goToNext();
lastNext = position;
if (position >= keys.length) {
throw new NoSuchElementException();
}
return new MapEntry(position++);
}
private void goToNext() {
int length = values.length;
while ((position < length) && (values[position] == null)) {
position++;
}
}
@Override
public void remove() {
iteratorRemove(lastNext);
}
}
}
}
| |
package com.bernard.beaconportal.activities.activity.setup;
import java.net.URI;
import java.net.URISyntaxException;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.text.Editable;
import android.text.TextWatcher;
import android.text.method.DigitsKeyListener;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.bernard.beaconportal.activities.Account;
import com.bernard.beaconportal.activities.MAIL;
import com.bernard.beaconportal.activities.Preferences;
import com.bernard.beaconportal.activities.R;
import com.bernard.beaconportal.activities.activity.MAILActivityMaterial;
import com.bernard.beaconportal.activities.activity.setup.AccountSetupCheckSettings.CheckDirection;
import com.bernard.beaconportal.activities.helper.Utility;
import com.bernard.beaconportal.activities.mail.AuthType;
import com.bernard.beaconportal.activities.mail.ConnectionSecurity;
import com.bernard.beaconportal.activities.mail.ServerSettings;
import com.bernard.beaconportal.activities.mail.Transport;
import com.bernard.beaconportal.activities.mail.transport.SmtpTransport;
public class AccountSetupOutgoing extends MAILActivityMaterial implements
OnClickListener, OnCheckedChangeListener {
private static final String EXTRA_ACCOUNT = "account";
private static final String EXTRA_MAKE_DEFAULT = "makeDefault";
private static final String SMTP_PORT = "587";
private static final String SMTP_SSL_PORT = "465";
private EditText mUsernameView;
private EditText mPasswordView;
private EditText mServerView;
private EditText mPortView;
private CheckBox mRequireLoginView;
private ViewGroup mRequireLoginSettingsView;
private Spinner mSecurityTypeView;
private Spinner mAuthTypeView;
private ArrayAdapter<AuthType> mAuthTypeAdapter;
private Button mNextButton;
private Account mAccount;
private boolean mMakeDefault;
public static void actionOutgoingSettings(Context context, Account account,
boolean makeDefault) {
Intent i = new Intent(context, AccountSetupOutgoing.class);
i.putExtra(EXTRA_ACCOUNT, account.getUuid());
i.putExtra(EXTRA_MAKE_DEFAULT, makeDefault);
context.startActivity(i);
}
public static void actionEditOutgoingSettings(Context context,
Account account) {
context.startActivity(intentActionEditOutgoingSettings(context, account));
}
public static Intent intentActionEditOutgoingSettings(Context context,
Account account) {
Intent i = new Intent(context, AccountSetupOutgoing.class);
i.setAction(Intent.ACTION_EDIT);
i.putExtra(EXTRA_ACCOUNT, account.getUuid());
return i;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.account_setup_outgoing);
int titleId = getResources().getIdentifier("action_bar_title", "id",
"android");
TextView abTitle = (TextView) findViewById(titleId);
abTitle.setTextColor(getResources().getColor((R.color.white)));
SharedPreferences sharedpref = getSharedPreferences("actionbar_color",
Context.MODE_PRIVATE);
if (!sharedpref.contains("actionbar_color")) {
getSupportActionBar().setBackgroundDrawable(
new ColorDrawable(Color.parseColor("#4285f4")));
if (Build.VERSION.SDK_INT >= 21) {
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
window.setStatusBarColor(Color.parseColor("#3367d6"));
}
} else {
String actionbar_colors = sharedpref.getString("actionbar_color",
null);
getSupportActionBar().setBackgroundDrawable(
new ColorDrawable(Color.parseColor(actionbar_colors)));
final int splitBarId = getResources().getIdentifier(
"split_action_bar", "id", "android");
final View splitActionBar = findViewById(splitBarId);
if (splitActionBar != null) {
splitActionBar.setBackgroundDrawable(
new ColorDrawable(Color.parseColor(actionbar_colors)));
}
if (Build.VERSION.SDK_INT >= 21) {
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
window.setStatusBarColor(Color.parseColor(actionbar_colors));
}
}
ActionBar bar = getSupportActionBar();
bar.setIcon(new ColorDrawable(getResources().getColor(
android.R.color.transparent)));
String accountUuid = getIntent().getStringExtra(EXTRA_ACCOUNT);
mAccount = Preferences.getPreferences(this).getAccount(accountUuid);
try {
if (new URI(mAccount.getStoreUri()).getScheme()
.startsWith("webdav")) {
mAccount.setTransportUri(mAccount.getStoreUri());
AccountSetupCheckSettings.actionCheckSettings(this, mAccount,
CheckDirection.OUTGOING);
}
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mUsernameView = (EditText) findViewById(R.id.account_username);
mPasswordView = (EditText) findViewById(R.id.account_password);
mServerView = (EditText) findViewById(R.id.account_server);
mPortView = (EditText) findViewById(R.id.account_port);
mRequireLoginView = (CheckBox) findViewById(R.id.account_require_login);
mRequireLoginSettingsView = (ViewGroup) findViewById(R.id.account_require_login_settings);
mSecurityTypeView = (Spinner) findViewById(R.id.account_security_type);
mAuthTypeView = (Spinner) findViewById(R.id.account_auth_type);
mNextButton = (Button) findViewById(R.id.next);
mNextButton.setOnClickListener(this);
mRequireLoginView.setOnCheckedChangeListener(this);
ArrayAdapter<ConnectionSecurity> securityTypesAdapter = new ArrayAdapter<ConnectionSecurity>(
this, android.R.layout.simple_spinner_item,
ConnectionSecurity.values());
securityTypesAdapter
.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mSecurityTypeView.setAdapter(securityTypesAdapter);
mAuthTypeAdapter = AuthType.getArrayAdapter(this);
mAuthTypeView.setAdapter(mAuthTypeAdapter);
/*
* Calls validateFields() which enables or disables the Next button
* based on the fields' validity.
*/
TextWatcher validationTextWatcher = new TextWatcher() {
@Override
public void afterTextChanged(Editable s) {
validateFields();
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count,
int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before,
int count) {
}
};
mUsernameView.addTextChangedListener(validationTextWatcher);
mPasswordView.addTextChangedListener(validationTextWatcher);
mServerView.addTextChangedListener(validationTextWatcher);
mPortView.addTextChangedListener(validationTextWatcher);
/*
* Only allow digits in the port field.
*/
mPortView.setKeyListener(DigitsKeyListener.getInstance("0123456789"));
// FIXME: get Account object again?
accountUuid = getIntent().getStringExtra(EXTRA_ACCOUNT);
mAccount = Preferences.getPreferences(this).getAccount(accountUuid);
mMakeDefault = getIntent().getBooleanExtra(EXTRA_MAKE_DEFAULT, false);
/*
* If we're being reloaded we override the original account with the one
* we saved
*/
if (savedInstanceState != null
&& savedInstanceState.containsKey(EXTRA_ACCOUNT)) {
accountUuid = savedInstanceState.getString(EXTRA_ACCOUNT);
mAccount = Preferences.getPreferences(this).getAccount(accountUuid);
}
try {
ServerSettings settings = Transport.decodeTransportUri(mAccount
.getTransportUri());
String username = settings.username;
String password = settings.password;
if (username != null) {
mUsernameView.setText(username);
mRequireLoginView.setChecked(true);
}
if (password != null) {
mPasswordView.setText(password);
}
updateAuthPlainTextFromSecurityType(settings.connectionSecurity);
// The first item is selected if settings.authenticationType is null
// or is not in mAuthTypeAdapter
int position = mAuthTypeAdapter
.getPosition(settings.authenticationType);
mAuthTypeView.setSelection(position, false);
// Select currently configured security type
mSecurityTypeView.setSelection(
settings.connectionSecurity.ordinal(), false);
/*
* Updates the port when the user changes the security type. This
* allows us to show a reasonable default which the user can change.
*
* Note: It's important that we set the listener *after* an initial
* option has been selected by the code above. Otherwise the
* listener might be called after onCreate() has been processed and
* the current port value set later in this method is overridden
* with the default port for the selected security type.
*/
mSecurityTypeView
.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent,
View view, int position, long id) {
updatePortFromSecurityType();
}
@Override
public void onNothingSelected(AdapterView<?> parent) { /* unused */
}
});
if (settings.host != null) {
mServerView.setText(settings.host);
}
if (settings.port != -1) {
mPortView.setText(Integer.toString(settings.port));
} else {
updatePortFromSecurityType();
}
validateFields();
} catch (Exception e) {
/*
* We should always be able to parse our own settings.
*/
failure(e);
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putString(EXTRA_ACCOUNT, mAccount.getUuid());
}
private void validateFields() {
mNextButton.setEnabled(Utility.domainFieldValid(mServerView)
&& Utility.requiredFieldValid(mPortView)
&& (!mRequireLoginView.isChecked() || (Utility
.requiredFieldValid(mUsernameView) && Utility
.requiredFieldValid(mPasswordView))));
Utility.setCompoundDrawablesAlpha(mNextButton,
mNextButton.isEnabled() ? 255 : 128);
}
private void updatePortFromSecurityType() {
ConnectionSecurity securityType = (ConnectionSecurity) mSecurityTypeView
.getSelectedItem();
mPortView.setText(getDefaultSmtpPort(securityType));
updateAuthPlainTextFromSecurityType(securityType);
}
private String getDefaultSmtpPort(ConnectionSecurity securityType) {
String port;
switch (securityType) {
case NONE:
case STARTTLS_REQUIRED:
port = SMTP_PORT;
break;
case SSL_TLS_REQUIRED:
port = SMTP_SSL_PORT;
break;
default:
port = "";
Log.e(MAIL.LOG_TAG, "Unhandled ConnectionSecurity type encountered");
}
return port;
}
private void updateAuthPlainTextFromSecurityType(
ConnectionSecurity securityType) {
switch (securityType) {
case NONE:
AuthType.PLAIN.useInsecureText(true, mAuthTypeAdapter);
break;
default:
AuthType.PLAIN.useInsecureText(false, mAuthTypeAdapter);
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
if (Intent.ACTION_EDIT.equals(getIntent().getAction())) {
mAccount.save(Preferences.getPreferences(this));
finish();
} else {
AccountSetupOptions.actionOptions(this, mAccount, mMakeDefault);
finish();
}
}
}
protected void onNext() {
ConnectionSecurity securityType = (ConnectionSecurity) mSecurityTypeView
.getSelectedItem();
String uri;
String username = null;
String password = null;
AuthType authType = null;
if (mRequireLoginView.isChecked()) {
username = mUsernameView.getText().toString();
password = mPasswordView.getText().toString();
authType = (AuthType) mAuthTypeView.getSelectedItem();
}
String newHost = mServerView.getText().toString();
int newPort = Integer.parseInt(mPortView.getText().toString());
String type = SmtpTransport.TRANSPORT_TYPE;
ServerSettings server = new ServerSettings(type, newHost, newPort,
securityType, authType, username, password);
uri = Transport.createTransportUri(server);
mAccount.deleteCertificate(newHost, newPort, CheckDirection.OUTGOING);
mAccount.setTransportUri(uri);
AccountSetupCheckSettings.actionCheckSettings(this, mAccount,
CheckDirection.OUTGOING);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.next:
onNext();
break;
}
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
mRequireLoginSettingsView.setVisibility(isChecked ? View.VISIBLE
: View.GONE);
validateFields();
}
private void failure(Exception use) {
Log.e(MAIL.LOG_TAG, "Failure", use);
String toastText = getString(R.string.account_setup_bad_uri,
use.getMessage());
Toast toast = Toast.makeText(getApplication(), toastText,
Toast.LENGTH_LONG);
toast.show();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/environment.proto
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The request message for [Environments.CreateEnvironment][google.cloud.dialogflow.cx.v3beta1.Environments.CreateEnvironment].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest}
*/
public final class CreateEnvironmentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest)
CreateEnvironmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateEnvironmentRequest.newBuilder() to construct.
private CreateEnvironmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateEnvironmentRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateEnvironmentRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CreateEnvironmentRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18:
{
com.google.cloud.dialogflow.cx.v3beta1.Environment.Builder subBuilder = null;
if (environment_ != null) {
subBuilder = environment_.toBuilder();
}
environment_ =
input.readMessage(
com.google.cloud.dialogflow.cx.v3beta1.Environment.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(environment_);
environment_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_CreateEnvironmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_CreateEnvironmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENVIRONMENT_FIELD_NUMBER = 2;
private com.google.cloud.dialogflow.cx.v3beta1.Environment environment_;
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the environment field is set.
*/
@java.lang.Override
public boolean hasEnvironment() {
return environment_ != null;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The environment.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.Environment getEnvironment() {
return environment_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Environment.getDefaultInstance()
: environment_;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.EnvironmentOrBuilder getEnvironmentOrBuilder() {
return getEnvironment();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (environment_ != null) {
output.writeMessage(2, getEnvironment());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (environment_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEnvironment());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest other =
(com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasEnvironment() != other.hasEnvironment()) return false;
if (hasEnvironment()) {
if (!getEnvironment().equals(other.getEnvironment())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasEnvironment()) {
hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER;
hash = (53 * hash) + getEnvironment().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for [Environments.CreateEnvironment][google.cloud.dialogflow.cx.v3beta1.Environments.CreateEnvironment].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest)
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_CreateEnvironmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_CreateEnvironmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
if (environmentBuilder_ == null) {
environment_ = null;
} else {
environment_ = null;
environmentBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.EnvironmentProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_CreateEnvironmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest build() {
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest result =
new com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest(this);
result.parent_ = parent_;
if (environmentBuilder_ == null) {
result.environment_ = environment_;
} else {
result.environment_ = environmentBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.hasEnvironment()) {
mergeEnvironment(other.getEnvironment());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The [Agent][google.cloud.dialogflow.cx.v3beta1.Agent] to create an [Environment][google.cloud.dialogflow.cx.v3beta1.Environment] for.
* Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private com.google.cloud.dialogflow.cx.v3beta1.Environment environment_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.Environment,
com.google.cloud.dialogflow.cx.v3beta1.Environment.Builder,
com.google.cloud.dialogflow.cx.v3beta1.EnvironmentOrBuilder>
environmentBuilder_;
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the environment field is set.
*/
public boolean hasEnvironment() {
return environmentBuilder_ != null || environment_ != null;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The environment.
*/
public com.google.cloud.dialogflow.cx.v3beta1.Environment getEnvironment() {
if (environmentBuilder_ == null) {
return environment_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Environment.getDefaultInstance()
: environment_;
} else {
return environmentBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEnvironment(com.google.cloud.dialogflow.cx.v3beta1.Environment value) {
if (environmentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
environment_ = value;
onChanged();
} else {
environmentBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEnvironment(
com.google.cloud.dialogflow.cx.v3beta1.Environment.Builder builderForValue) {
if (environmentBuilder_ == null) {
environment_ = builderForValue.build();
onChanged();
} else {
environmentBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEnvironment(com.google.cloud.dialogflow.cx.v3beta1.Environment value) {
if (environmentBuilder_ == null) {
if (environment_ != null) {
environment_ =
com.google.cloud.dialogflow.cx.v3beta1.Environment.newBuilder(environment_)
.mergeFrom(value)
.buildPartial();
} else {
environment_ = value;
}
onChanged();
} else {
environmentBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEnvironment() {
if (environmentBuilder_ == null) {
environment_ = null;
onChanged();
} else {
environment_ = null;
environmentBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Environment.Builder getEnvironmentBuilder() {
onChanged();
return getEnvironmentFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.EnvironmentOrBuilder getEnvironmentOrBuilder() {
if (environmentBuilder_ != null) {
return environmentBuilder_.getMessageOrBuilder();
} else {
return environment_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Environment.getDefaultInstance()
: environment_;
}
}
/**
*
*
* <pre>
* Required. The environment to create.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.Environment environment = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.Environment,
com.google.cloud.dialogflow.cx.v3beta1.Environment.Builder,
com.google.cloud.dialogflow.cx.v3beta1.EnvironmentOrBuilder>
getEnvironmentFieldBuilder() {
if (environmentBuilder_ == null) {
environmentBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.Environment,
com.google.cloud.dialogflow.cx.v3beta1.Environment.Builder,
com.google.cloud.dialogflow.cx.v3beta1.EnvironmentOrBuilder>(
getEnvironment(), getParentForChildren(), isClean());
environment_ = null;
}
return environmentBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest)
private static final com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest();
}
public static com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateEnvironmentRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateEnvironmentRequest>() {
@java.lang.Override
public CreateEnvironmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateEnvironmentRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateEnvironmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateEnvironmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.CreateEnvironmentRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright (C) 2011 lightcouch.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lightcouch;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import org.apache.commons.codec.Charsets;
import org.apache.http.client.methods.HttpGet;
import org.lightcouch.ChangesResult.Row;
import com.google.gson.Gson;
/**
* <p>Contains the Change Notifications API, supports <i>normal</i> and <i>continuous</i> feed Changes.
* <h3>Usage Example:</h3>
* <pre>
* // feed type normal
* String since = dbClient.context().info().getUpdateSeq(); // latest update seq
* ChangesResult changeResult = dbClient.changes()
* .since(since)
* .limit(10)
* .filter("example/filter")
* .getChanges();
*
* for (ChangesResult.Row row : changeResult.getResults()) {
* String docId = row.getId()
* JsonObject doc = row.getDoc();
* }
*
* // feed type continuous
* Changes changes = dbClient.changes()
* .includeDocs(true)
* .heartBeat(30000)
* .continuousChanges();
*
* while (changes.hasNext()) {
* ChangesResult.Row feed = changes.next();
* String docId = feed.getId();
* JsonObject doc = feed.getDoc();
* // changes.stop(); // stop continuous feed
* }
* </pre>
* @see ChangesResult
* @since 0.0.2
* @author Ahmed Yehia
*/
public class Changes {
private BufferedReader reader;
private HttpGet httpGet;
private Row nextRow;
private boolean stop;
private CouchDbClientBase dbc;
private Gson gson;
private URIBuilder uriBuilder;
Changes(CouchDbClientBase dbc) {
this.dbc = dbc;
this.gson = dbc.getGson();
this.uriBuilder = URIBuilder.buildUri(dbc.getDBUri()).path("_changes");
}
/**
* Requests Change notifications of feed type continuous.
* <p>Feed notifications are accessed in an <i>iterator</i> style.
* @return {@link Changes}
*/
public Changes continuousChanges() {
final URI uri = uriBuilder.query("feed", "continuous").build();
httpGet = new HttpGet(uri);
final InputStream in = dbc.get(httpGet);
final InputStreamReader is = new InputStreamReader(in, Charsets.UTF_8);
setReader(new BufferedReader(is));
return this;
}
/**
* Checks whether a feed is available in the continuous stream, blocking
* until a feed is received.
* @return true If a feed is available
*/
public boolean hasNext() {
return readNextRow();
}
/**
* @return The next feed in the stream.
*/
public Row next() {
return getNextRow();
}
/**
* Stops a running continuous feed.
*/
public void stop() {
stop = true;
}
/**
* Requests Change notifications of feed type normal.
* @return {@link ChangesResult}
*/
public ChangesResult getChanges() {
final URI uri = uriBuilder.query("feed", "normal").build();
return dbc.get(uri, ChangesResult.class);
}
// Query Params
public Changes since(String since) {
uriBuilder.query("since", since);
return this;
}
public Changes limit(int limit) {
uriBuilder.query("limit", limit);
return this;
}
public Changes heartBeat(long heartBeat) {
uriBuilder.query("heartbeat", heartBeat);
return this;
}
public Changes timeout(long timeout) {
uriBuilder.query("timeout", timeout);
return this;
}
public Changes filter(String filter) {
uriBuilder.query("filter", filter);
return this;
}
public Changes includeDocs(boolean includeDocs) {
uriBuilder.query("include_docs", includeDocs);
return this;
}
public Changes style(String style) {
uriBuilder.query("style", style);
return this;
}
// Helper
/**
* Reads and sets the next feed in the stream.
*/
private boolean readNextRow() {
boolean hasNext = false;
try {
if(!stop) {
String row = "";
do {
row = getReader().readLine();
} while(row.length() == 0);
if(!row.startsWith("{\"last_seq\":")) {
setNextRow(gson.fromJson(row, Row.class));
hasNext = true;
}
}
} catch (Exception e) {
terminate();
throw new CouchDbException("Error reading continuous stream.", e);
}
if(!hasNext)
terminate();
return hasNext;
}
private BufferedReader getReader() {
return reader;
}
private void setReader(BufferedReader reader) {
this.reader = reader;
}
private Row getNextRow() {
return nextRow;
}
private void setNextRow(Row nextRow) {
this.nextRow = nextRow;
}
private void terminate() {
httpGet.abort();
CouchDbUtil.close(getReader());
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.containers.hash;
import org.junit.Assert;
import org.junit.Test;
import java.util.Iterator;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
public class LinkedHashMapTest {
@Test
public void testPutGet() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 1000; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(1000, tested.size());
for (int i = 0; i < 1000; ++i) {
assertEquals(Integer.toString(i), tested.get(i));
}
for (int i = 0; i < 1000; ++i) {
assertEquals(Integer.toString(i), tested.put(i, Integer.toString(i + 1)));
}
assertEquals(1000, tested.size());
for (int i = 0; i < 1000; ++i) {
assertEquals(Integer.toString(i + 1), tested.get(i));
}
}
@Test
public void testPutGet2() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 1000; ++i) {
tested.put(i - 500, Integer.toString(i));
}
assertEquals(1000, tested.size());
for (int i = 0; i < 1000; ++i) {
assertEquals(Integer.toString(i), tested.get(i - 500));
}
for (int i = 0; i < 1000; ++i) {
assertEquals(Integer.toString(i), tested.put(i - 500, Integer.toString(i + 1)));
}
assertEquals(1000, tested.size());
for (int i = 0; i < 1000; ++i) {
assertEquals(Integer.toString(i + 1), tested.get(i - 500));
}
}
@Test
public void testPutGetRemove() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 1000; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(1000, tested.size());
for (int i = 0; i < 1000; i += 2) {
assertEquals(Integer.toString(i), tested.remove(i));
}
assertEquals(500, tested.size());
for (int i = 0; i < 1000; ++i) {
assertEquals((i % 2 == 0) ? null : Integer.toString(i), tested.get(i));
}
}
@Test
public void keySet() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 10000; ++i) {
tested.put(i, Integer.toString(i));
}
int i = 0;
for (Integer key : tested.keySet()) {
assertEquals(i++, key.intValue());
}
}
@Test
public void keySet2() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 10000; ++i) {
tested.put(i, Integer.toString(i));
}
Iterator<Integer> it = tested.keySet().iterator();
while (it.hasNext()) {
final int i = it.next();
if (i % 2 == 0) {
it.remove();
}
}
assertEquals(5000, tested.size());
it = tested.keySet().iterator();
for (int i = 1; i <= 9999; i += 2) {
Assert.assertTrue(it.hasNext());
assertEquals(i, it.next().intValue());
}
}
@Test
public void lru() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>() {
@Override
protected boolean removeEldestEntry(Map.Entry<Integer, String> eldest) {
return size() > 500;
}
};
for (int i = 0; i < 1000; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(500, tested.size());
for (int i = 0; i < 500; ++i) {
Assert.assertNull(tested.remove(i));
}
assertEquals(500, tested.size());
for (int i = 500; i < 1000; ++i) {
assertEquals(Integer.toString(i), tested.remove(i));
}
assertEquals(0, tested.size());
}
@Test
public void lru2() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>(0, true) {
@Override
protected boolean removeEldestEntry(Map.Entry<Integer, String> eldest) {
return size() > 1000;
}
};
for (int i = 0; i < 1000; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(Integer.toString(0), tested.get(0));
for (int i = 1000; i < 1999; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(Integer.toString(0), tested.get(0));
tested.put(2000, Integer.toString(2000));
Assert.assertNull(tested.get(1000));
}
@Test
public void lru3() {
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>(0, true) {
@Override
protected boolean removeEldestEntry(Map.Entry<Integer, String> eldest) {
return size() > 1000;
}
};
for (int i = 0; i < 1000; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(Integer.toString(999), tested.remove(999));
assertEquals(999, tested.size());
assertEquals(Integer.toString(0), tested.get(0));
for (int i = 1000; i < 1999; ++i) {
tested.put(i, Integer.toString(i));
}
assertEquals(Integer.toString(0), tested.get(0));
tested.put(2000, Integer.toString(2000));
assertNull(tested.get(1000));
}
@Test
public void valuesIteration() {
Map<Integer, String> map = new LinkedHashMap<Integer, String>();
map.put(1, "a");
map.put(2, "b");
map.put(3, "c");
Iterator<String> iterator = map.values().iterator();
assertEquals("a", iterator.next());
assertEquals("b", iterator.next());
assertEquals("c", iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void accessOrderValuesIteration() {
Map<Integer, String> map = new LinkedHashMap<Integer, String>(0, true);
map.put(1, "a");
map.put(2, "b");
map.put(3, "c");
Iterator<String> iterator = map.values().iterator();
assertEquals("a", iterator.next());
assertEquals("b", iterator.next());
assertEquals("c", iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void lastAddedKey() {
LinkedHashMap<Integer, String> map = new LinkedHashMap<Integer, String>();
map.put(1, "a");
map.put(2, "b");
map.put(3, "c");
map.get(1);
map.get(2);
assertEquals(3, map.getLastKey().intValue());
assertEquals("c", map.getLastValue());
map.remove(2);
assertEquals(3, map.getLastKey().intValue());
assertEquals("c", map.getLastValue());
map.remove(3);
assertEquals(1, map.getLastKey().intValue());
assertEquals("a", map.getLastValue());
map.remove(1);
assertNull(map.getLastKey());
assertNull(map.getLastValue());
}
//@Test
public void benchmarkGet() {
long started;
final Map<Integer, String> map = new java.util.LinkedHashMap<Integer, String>();
for (int i = 0; i < 100000; ++i) {
map.put(i, Integer.toString(i));
}
started = System.currentTimeMillis();
for (int i = 0; i < 1000; ++i) {
for (int j = 0; j < 100000; ++j) {
map.get(j);
}
}
System.out.println("100 000 000 lookups in java.util.LinkedHashMap took " + (System.currentTimeMillis() - started));
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 100000; ++i) {
tested.put(i, Integer.toString(i));
}
started = System.currentTimeMillis();
for (int i = 0; i < 1000; ++i) {
for (int j = 0; j < 100000; ++j) {
tested.get(j);
}
}
System.out.println("100 000 000 lookups in LinkedHashMap took " + (System.currentTimeMillis() - started));
}
//@Test
public void benchmarkGetMissingKeys() {
long started;
final Map<Integer, String> map = new java.util.LinkedHashMap<Integer, String>();
for (int i = 0; i < 100000; ++i) {
map.put(i, Integer.toString(i));
}
started = System.currentTimeMillis();
for (int i = 0; i < 1000; ++i) {
for (int j = 0; j < 100000; ++j) {
map.get(j + 1000000);
}
}
System.out.println("100 000 000 lookups in java.util.LinkedHashMap took " + (System.currentTimeMillis() - started));
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 100000; ++i) {
tested.put(i, Integer.toString(i));
}
started = System.currentTimeMillis();
for (int i = 0; i < 1000; ++i) {
for (int j = 0; j < 100000; ++j) {
tested.get(j + 1000000);
}
}
System.out.println("100 000 000 lookups in LinkedHashMap took " + (System.currentTimeMillis() - started));
}
//@Test
public void benchmarkLRU() {
long started;
final Map<Integer, String> map = new java.util.LinkedHashMap<Integer, String>();
for (int i = 0; i < 100000; ++i) {
map.put(i, Integer.toString(i));
}
started = System.currentTimeMillis();
for (int i = 0; i < 200; ++i) {
for (int j = 0; j < 100000; ++j) {
final String v = map.remove(j);
map.put(j, v);
}
}
System.out.println("20 000 000 LRU lookups in java.util.LinkedHashMap took " + (System.currentTimeMillis() - started));
final LinkedHashMap<Integer, String> tested = new LinkedHashMap<Integer, String>();
for (int i = 0; i < 100000; ++i) {
tested.put(i, Integer.toString(i));
}
started = System.currentTimeMillis();
for (int i = 0; i < 200; ++i) {
for (int j = 0; j < 100000; ++j) {
tested.get(j);
}
}
System.out.println("20 000 000 lookups in LinkedHashMap took " + (System.currentTimeMillis() - started));
}
}
| |
package com.smartsheet.api.internal;
/*
* #[license]
* Smartsheet SDK for Java
* %%
* Copyright (C) 2014 Smartsheet
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* %[license]
*/
import com.smartsheet.api.*;
import com.smartsheet.api.internal.http.HttpEntity;
import com.smartsheet.api.internal.http.HttpMethod;
import com.smartsheet.api.internal.http.HttpRequest;
import com.smartsheet.api.internal.http.HttpResponse;
import com.smartsheet.api.internal.util.QueryUtil;
import com.smartsheet.api.internal.util.Util;
import com.smartsheet.api.models.*;
import com.smartsheet.api.models.enums.ListUserInclusion;
import com.smartsheet.api.models.enums.UserInclusion;
import java.io.*;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.*;
/**
* This is the implementation of the UserResources.
*
* Thread Safety: This class is thread safe because it is immutable and its base class is thread safe.
*/
public class UserResourcesImpl extends AbstractResources implements UserResources {
/**
* Constructor.
*
* Exceptions: - IllegalArgumentException : if any argument is null
*
* @param smartsheet the smartsheet
*/
public UserResourcesImpl(SmartsheetImpl smartsheet) {
super(smartsheet);
}
/**
* List all users.
*
* It mirrors to the following Smartsheet REST API method: GET /users
*
* @return the list of all users
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public PagedResult<User> listUsers() throws SmartsheetException {
return this.listResourcesWithWrapper("users", User.class);
}
/**
* List all users.
*
* It mirrors to the following Smartsheet REST API method: GET /users
*
* Exceptions:
* - InvalidRequestException : if there is any problem with the REST API request
* - AuthorizationException : if there is any problem with the REST API authorization(access token)
* - ServiceUnavailableException : if the REST API service is not available (possibly due to rate limiting)
* - SmartsheetRestException : if there is any other REST API related error occurred during the operation
* - SmartsheetException : if there is any other error occurred during the operation
*
* @param email the list of email addresses
* @param pagination the object containing the pagination query parameters
* @return all users (note that empty list will be returned if there is none)
* @throws SmartsheetException the smartsheet exception
*/
public PagedResult<User> listUsers(Set<String> email, PaginationParameters pagination) throws SmartsheetException {
return this.listUsers(email, null, pagination);
}
/**
* List all users.
*
* It mirrors to the following Smartsheet REST API method: GET /users
*
* Exceptions:
* - InvalidRequestException : if there is any problem with the REST API request
* - AuthorizationException : if there is any problem with the REST API authorization(access token)
* - ServiceUnavailableException : if the REST API service is not available (possibly due to rate limiting)
* - SmartsheetRestException : if there is any other REST API related error occurred during the operation
* - SmartsheetException : if there is any other error occurred during the operation
*
* @param email the list of email addresses
* @param includes elements to include in the response
* @param pagination the object containing the pagination query parameters
* @return all users (note that empty list will be returned if there is none)
* @throws SmartsheetException the smartsheet exception
*/
public PagedResult<User> listUsers(Set<String> email, EnumSet<ListUserInclusion> includes,
PaginationParameters pagination) throws SmartsheetException {
String path = "users";
HashMap<String, Object> parameters = new HashMap<String, Object>();
if (pagination != null){
parameters = pagination.toHashMap();
}
parameters.put("email", QueryUtil.generateCommaSeparatedList(email));
parameters.put("include", QueryUtil.generateCommaSeparatedList(includes));
path += QueryUtil.generateUrl(null, parameters);
return this.listResourcesWithWrapper(path, User.class);
}
/**
* Add a user to the organization, without sending email.
*
* It mirrors to the following Smartsheet REST API method: POST /users
*
* Exceptions:
* - IllegalArgumentException : if any argument is null
* - InvalidRequestException : if there is any problem with the REST API request
* - AuthorizationException : if there is any problem with the REST API authorization(access token)
* - ResourceNotFoundException : if the resource can not be found
* - ServiceUnavailableException : if the REST API service is not available (possibly due to rate limiting)
* - SmartsheetRestException : if there is any other REST API related error occurred during the operation
* - SmartsheetException : if there is any other error occurred during the operation
*
* @param user the user object limited to the following attributes: * admin * email * licensedSheetCreator
* @return the user
* @throws SmartsheetException the smartsheet exception
*/
public User addUser(User user) throws SmartsheetException {
return this.createResource("users", User.class, user);
}
/**
* Add a user to the organization, without sending email.
*
* It mirrors to the following Smartsheet REST API method: POST /users
*
* Exceptions:
* - IllegalArgumentException : if any argument is null
* - InvalidRequestException : if there is any problem with the REST API request
* - AuthorizationException : if there is any problem with the REST API authorization(access token)
* - ResourceNotFoundException : if the resource can not be found
* - ServiceUnavailableException : if the REST API service is not available (possibly due to rate limiting)
* - SmartsheetRestException : if there is any other REST API related error occurred during the operation
* - SmartsheetException : if there is any other error occurred during the operation
*
* @param user the created user
* @param sendEmail whether to send email
* @return the user object limited to the following attributes: * admin * email * licensedSheetCreator
* @throws SmartsheetException the smartsheet exception
*/
public User addUser(User user, boolean sendEmail) throws SmartsheetException {
return this.createResource("users?sendEmail=" + sendEmail, User.class, user);
}
/**
* Get the current user.
*
* It mirrors to the following Smartsheet REST API method: GET /users/{userId}
*
* @param userId the user id
* @return the user
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public UserProfile getUser(long userId) throws SmartsheetException {
return this.getResource("users/" + userId, UserProfile.class);
}
/**
* Get the current user.
*
* It mirrors to the following Smartsheet REST API method: GET /users/me
*
* Exceptions:
* - InvalidRequestException : if there is any problem with the REST API request
* - AuthorizationException : if there is any problem with the REST API authorization(access token)
* - ResourceNotFoundException : if the resource can not be found
* - ServiceUnavailableException : if the REST API service is not available (possibly due to rate limiting)
* - SmartsheetRestException : if there is any other REST API related error occurred during the operation
* - SmartsheetException : if there is any other error occurred during the operation
*
* @return the resource (note that if there is no such resource, this method will throw ResourceNotFoundException
* rather than returning null).
* @throws SmartsheetException the smartsheet exception
*/
public UserProfile getCurrentUser() throws SmartsheetException {
return this.getResource("users/me", UserProfile.class);
}
/**
* <p>Get the current user.</p>
*
* <p>It mirrors to the following Smartsheet REST API method: GET /user/me</p>
*
* @param includes used to specify the optional objects to include.
* @return the current user
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public UserProfile getCurrentUser(EnumSet<UserInclusion> includes) throws SmartsheetException {
String path = "users/me";
HashMap<String, Object> parameters = new HashMap<String, Object>();
parameters.put("include", QueryUtil.generateCommaSeparatedList(includes));
path += QueryUtil.generateUrl(null, parameters);
return this.getResource(path, UserProfile.class);
}
/**
* List all organisation sheets.
*
* It mirrors to the following Smartsheet REST API method: GET /users/sheets
*
* @param pagination the object containing the pagination query parameters
* @param modifiedSince
* @return the list of all organisation sheets
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public PagedResult<Sheet> listOrgSheets(PaginationParameters pagination, Date modifiedSince) throws SmartsheetException {
String path = "users/sheets";
HashMap<String, Object> parameters = new HashMap<String, Object>();
if (pagination != null) {
parameters = pagination.toHashMap();
}
if (modifiedSince != null) {
String isoDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").format(modifiedSince);
parameters.put("modifiedSince", isoDate);
}
path += QueryUtil.generateUrl(null, parameters);
return this.listResourcesWithWrapper(path, Sheet.class);
}
public PagedResult<Sheet> listOrgSheets(PaginationParameters pagination) throws SmartsheetException {
return this.listOrgSheets(pagination, null);
}
/**
* List all user alternate emails.
*
* It mirrors to the following Smartsheet REST API method: GET /users/{userId}/alternateemails
*
* @param userId the id of the user
* @param pagination the object containing the pagination query parameters
* @return the list of all user alternate emails
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public PagedResult<AlternateEmail> listAlternateEmails(long userId, PaginationParameters pagination) throws SmartsheetException {
String path = "users/" + userId + "/alternateemails";
if (pagination != null) {
path += pagination.toQueryString();
}
return this.listResourcesWithWrapper(path, AlternateEmail.class);
}
/**
* Get alternate email.
*
* It mirrors to the following Smartsheet REST API method: GET /users/{userId}/alternateemails/{alternateEmailId}
*
* @param userId the id of the user
* @param altEmailId the alternate email id for the alternate email to retrieve.
* @return the resource (note that if there is no such resource, this method will throw
* ResourceNotFoundException rather than returning null).
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public AlternateEmail getAlternateEmail(long userId, long altEmailId) throws SmartsheetException {
return this.getResource("users/" + userId + "/alternateemails/" + altEmailId, AlternateEmail.class);
}
/**
* Add an alternate email.
*
* It mirrors to the following Smartsheet REST API method: POST /users/{userId}/alternateemails
*
* @param userId the id of the user
* @param altEmails AlternateEmail alternate email address to add.
* @return the resource (note that if there is no such resource, this method will throw
* ResourceNotFoundException rather than returning null).
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public List<AlternateEmail> addAlternateEmail(long userId, List<AlternateEmail> altEmails) throws SmartsheetException {
Util.throwIfNull(altEmails);
if (altEmails.size() == 0) {
return altEmails;
}
return this.postAndReceiveList("users/" + userId + "/alternateemails", altEmails, AlternateEmail.class);
}
/**
* Delete an alternate email.
*
* It mirrors to the following Smartsheet REST API method: DELETE /users/{userId}/alternateemails/{alternateEmailId}
*
* @param userId the id of the user
* @param altEmailId the alternate email id for the alternate email to retrieve.
* @return the resource (note that if there is no such resource, this method will throw
* ResourceNotFoundException rather than returning null).
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException if there is any other error during the operation
*/
public void deleteAlternateEmail(long userId, long altEmailId) throws SmartsheetException {
this.deleteResource("users/" + userId + "/alternateemails/" + altEmailId, AlternateEmail.class);
}
/**
* Promote and alternate email to primary.
*
* @param userId id of the user
* @param altEmailId alternate email id
* @return alternateEmail of the primary
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException f there is any other error during the operation
*/
public AlternateEmail promoteAlternateEmail(long userId, long altEmailId) throws SmartsheetException {
HttpRequest request = createHttpRequest(smartsheet.getBaseURI().resolve(
"users/" + userId + "/alternateemails/" + altEmailId + "/makeprimary"), HttpMethod.POST);
Object obj = null;
try {
HttpResponse response = this.smartsheet.getHttpClient().request(request);
switch (response.getStatusCode()) {
case 200:
obj = this.smartsheet.getJsonSerializer().deserializeResult(AlternateEmail.class,
response.getEntity().getContent());
break;
default:
handleError(response);
}
} finally {
smartsheet.getHttpClient().releaseConnection();
}
return (AlternateEmail)obj;
}
/**
* Uploads a profile image for the specified user.
*
* @param userId id of the user
* @param file path to the image file
* @param fileType content type of the image file
* @return user
* @throws IllegalArgumentException if any argument is null or empty string
* @throws InvalidRequestException if there is any problem with the REST API request
* @throws AuthorizationException if there is any problem with the REST API authorization (access token)
* @throws ResourceNotFoundException if the resource cannot be found
* @throws ServiceUnavailableException if the REST API service is not available (possibly due to rate limiting)
* @throws SmartsheetException f there is any other error during the operation
*/
public User addProfileImage(long userId, String file, String fileType) throws SmartsheetException, FileNotFoundException {
return attachProfileImage("users/" + userId + "/profileimage", file, fileType);
}
private User attachProfileImage(String path, String file, String contentType) throws SmartsheetException, FileNotFoundException {
Util.throwIfNull(file);
if(contentType == null) {
contentType = "application/octet-stream";
}
HashMap<String, Object> parameters = new HashMap<String, Object>();
path += QueryUtil.generateUrl(null, parameters);
HttpRequest request = createHttpRequest(this.smartsheet.getBaseURI().resolve(path), HttpMethod.POST);
try {
request.getHeaders().put("Content-Disposition", "attachment; filename=\"" + URLEncoder.encode(file, "UTF-8") + "\"");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
File f = new File(file);
InputStream is = new FileInputStream(f);
HttpEntity entity = new HttpEntity();
entity.setContentType(contentType);
entity.setContent(is);
entity.setContentLength(f.length());
request.setEntity(entity);
User obj = null;
try {
HttpResponse response = this.smartsheet.getHttpClient().request(request);
switch (response.getStatusCode()) {
case 200:
obj = this.smartsheet.getJsonSerializer().deserializeResult(User.class,
response.getEntity().getContent()).getResult();
break;
default:
handleError(response);
}
} finally {
smartsheet.getHttpClient().releaseConnection();
}
return obj;
}
@Override
public User updateUser(User user) throws SmartsheetException {
return this.updateResource("users/" + user.getId(), User.class, user);
}
@Override
public void deleteUser(long userId, DeleteUserParameters parameters) throws SmartsheetException {
String path = "users/" + userId;
if (parameters != null) {
path += parameters.toQueryString();
}
this.deleteResource(path, User.class);
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved.
*
* Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.
*
* The contents of this file are subject to the terms of either the GNU General
* Public License Version 2 only ("GPL") or the Common Development and Distribution
* License("CDDL") (collectively, the "License"). You may not use this file except in
* compliance with the License. You can obtain a copy of the License at
* http://www.netbeans.org/cddl-gplv2.html or nbbuild/licenses/CDDL-GPL-2-CP. See the
* License for the specific language governing permissions and limitations under the
* License. When distributing the software, include this License Header Notice in
* each file and include the License file at nbbuild/licenses/CDDL-GPL-2-CP. Oracle
* designates this particular file as subject to the "Classpath" exception as
* provided by Oracle in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License Header,
* with the fields enclosed by brackets [] replaced by your own identifying
* information: "Portions Copyrighted [year] [name of copyright owner]"
*
* Contributor(s):
*
* The Original Software is NetBeans. The Initial Developer of the Original Software
* is Sun Microsystems, Inc. Portions Copyright 1997-2007 Sun Microsystems, Inc. All
* Rights Reserved.
*
* If you wish your version of this file to be governed by only the CDDL or only the
* GPL Version 2, indicate your decision by adding "[Contributor] elects to include
* this software in this distribution under the [CDDL or GPL Version 2] license." If
* you do not indicate a single choice of license, a recipient has the option to
* distribute your version of this file under either the CDDL, the GPL Version 2 or
* to extend the choice of license to its licensees as provided above. However, if
* you add GPL Version 2 code and therefore, elected the GPL Version 2 license, then
* the option applies only if the new code is made subject to such option by the
* copyright holder.
*/
package org.mycompany.installer.wizard.components.panels;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.border.EmptyBorder;
import org.netbeans.installer.Installer;
import org.netbeans.installer.product.Registry;
import org.netbeans.installer.product.RegistryNode;
import org.netbeans.installer.product.RegistryType;
import org.netbeans.installer.product.components.Product;
import org.netbeans.installer.utils.ErrorManager;
import org.netbeans.installer.utils.FileUtils;
import org.netbeans.installer.utils.LogManager;
import org.netbeans.installer.utils.ResourceUtils;
import org.netbeans.installer.utils.StringUtils;
import org.netbeans.installer.utils.SystemUtils;
import org.mycompany.installer.utils.applications.NetBeansRCPUtils;
import org.netbeans.installer.utils.exceptions.NativeException;
import org.netbeans.installer.utils.helper.swing.NbiCheckBox;
import org.netbeans.installer.utils.helper.swing.NbiLabel;
import org.netbeans.installer.utils.helper.swing.NbiPanel;
import org.netbeans.installer.utils.helper.swing.NbiTextPane;
import org.netbeans.installer.wizard.components.panels.ErrorMessagePanel;
import org.netbeans.installer.wizard.components.panels.ErrorMessagePanel.ErrorMessagePanelSwingUi;
import org.netbeans.installer.wizard.components.panels.ErrorMessagePanel.ErrorMessagePanelUi;
import org.netbeans.installer.wizard.containers.SwingContainer;
import org.netbeans.installer.wizard.ui.SwingUi;
import org.netbeans.installer.wizard.ui.WizardUi;
/**
*
* @author Dmitry Lipin
*/
public class PreInstallSummaryPanel extends ErrorMessagePanel {
/////////////////////////////////////////////////////////////////////////////////
// Instance
public PreInstallSummaryPanel() {
setProperty(TITLE_PROPERTY,
DEFAULT_TITLE);
setProperty(DESCRIPTION_PROPERTY,
DEFAULT_DESCRIPTION);
setProperty(INSTALLATION_FOLDER_PROPERTY,
DEFAULT_INSTALLATION_FOLDER);
setProperty(UNINSTALL_LABEL_TEXT_PROPERTY,
DEFAULT_UNINSTALL_LABEL_TEXT);
setProperty(INSTALLATION_SIZE_PROPERTY,
DEFAULT_INSTALLATION_SIZE);
setProperty(DOWNLOAD_SIZE_PROPERTY,
DEFAULT_DOWNLOAD_SIZE);
setProperty(NEXT_BUTTON_TEXT_PROPERTY,
DEFAULT_NEXT_BUTTON_TEXT);
setProperty(ERROR_NOT_ENOUGH_SPACE_PROPERTY,
DEFAULT_ERROR_NOT_ENOUGH_SPACE);
setProperty(ERROR_CANNOT_CHECK_SPACE_PROPERTY,
DEFAULT_ERROR_CANNOT_CHECK_SPACE);
setProperty(ERROR_LOGIC_ACCESS_PROPERTY,
DEFAULT_ERROR_LOGIC_ACCESS);
setProperty(ERROR_FSROOTS_PROPERTY,
DEFAULT_ERROR_FSROOTS);
setProperty(ERROR_NON_EXISTENT_ROOT_PROPERTY,
DEFAULT_ERROR_NON_EXISTENT_ROOT);
setProperty(ERROR_CANNOT_WRITE_PROPERTY,
DEFAULT_ERROR_CANNOT_WRITE);
setProperty(REMOVE_APP_USERDIR_TEXT_PROPERTY,
DEFAULT_REMOVE_APP_USERDIR_TEXT);
setProperty(REMOVE_APP_USERDIR_CHECKBOX_PROPERTY,
DEFAULT_REMOVE_APP_USERDIR_CHECKBOX);
}
@Override
public WizardUi getWizardUi() {
if (wizardUi == null) {
wizardUi = new PreInstallSummaryPanelUi(this);
}
return wizardUi;
}
@Override
public void initialize() {
final List<Product> toInstall =
Registry.getInstance().getProductsToInstall();
if (toInstall.size() > 0) {
setProperty(NEXT_BUTTON_TEXT_PROPERTY, DEFAULT_NEXT_BUTTON_TEXT);
setProperty(DESCRIPTION_PROPERTY, DEFAULT_DESCRIPTION);
} else {
setProperty(NEXT_BUTTON_TEXT_PROPERTY, DEFAULT_NEXT_BUTTON_TEXT_UNINSTALL);
setProperty(DESCRIPTION_PROPERTY, DEFAULT_DESCRIPTION_UNINSTALL);
}
}
/////////////////////////////////////////////////////////////////////////////////
// Inner Classes
public static class PreInstallSummaryPanelUi extends ErrorMessagePanelUi {
protected PreInstallSummaryPanel component;
public PreInstallSummaryPanelUi(PreInstallSummaryPanel component) {
super(component);
this.component = component;
}
@Override
public SwingUi getSwingUi(SwingContainer container) {
if (swingUi == null) {
swingUi = new PreInstallSummaryPanelSwingUi(component, container);
}
return super.getSwingUi(container);
}
}
public static class PreInstallSummaryPanelSwingUi extends ErrorMessagePanelSwingUi {
protected PreInstallSummaryPanel component;
private NbiTextPane locationsPane;
private NbiLabel uninstallListLabel;
private NbiTextPane uninstallListPane;
private NbiLabel installationSizeLabel;
private NbiLabel installationSizeValue;
private NbiLabel downloadSizeLabel;
private NbiLabel downloadSizeValue;
private NbiCheckBox removeUserdirCheckbox;
private NbiTextPane removeUserdirPane;
private NbiPanel spacer;
private int gridy = 0;
public PreInstallSummaryPanelSwingUi(
final PreInstallSummaryPanel component,
final SwingContainer container) {
super(component, container);
this.component = component;
initComponents();
}
// protected ////////////////////////////////////////////////////////////////
@Override
protected void initializeContainer() {
super.initializeContainer();
container.getNextButton().setText(
panel.getProperty(NEXT_BUTTON_TEXT_PROPERTY));
}
@Override
protected void initialize() {
final Registry registry = Registry.getInstance();
final StringBuilder text = new StringBuilder();
long installationSize = 0;
long downloadSize = 0;
for (Product product : registry.getProductsToInstall()) {
installationSize += product.getRequiredDiskSpace();
downloadSize += product.getDownloadSize();
}
// add top-level components like nb-base, glassfish, tomcat, jdk
for (Product product : registry.getProductsToInstall()) {
text.append(StringUtils.LF);
text.append(StringUtils.format(panel.getProperty(INSTALLATION_FOLDER_PROPERTY),
product.getDisplayName()));
text.append(StringUtils.LF);
text.append(" " + product.getInstallationLocation());
text.append(StringUtils.LF);
//normen-show only first item
break;
}
locationsPane.setText(text);
List<Product> toUninstall = registry.getProductsToUninstall();
String uninstallLabelText = toUninstall.size() > 0 ? StringUtils.format(
panel.getProperty(UNINSTALL_LABEL_TEXT_PROPERTY),
toUninstall.get(0).getDisplayName()) : "";
uninstallListLabel.setText(uninstallLabelText);
installationSizeLabel.setText(
panel.getProperty(INSTALLATION_SIZE_PROPERTY));
installationSizeValue.setText(StringUtils.formatSize(
installationSize));
downloadSizeLabel.setText(
panel.getProperty(DOWNLOAD_SIZE_PROPERTY));
downloadSizeValue.setText(StringUtils.formatSize(
downloadSize));
if (registry.getProductsToInstall().size() == 0) {
locationsPane.setVisible(false);
installationSizeLabel.setVisible(false);
installationSizeValue.setVisible(false);
} else {
locationsPane.setVisible(true);
installationSizeLabel.setVisible(true);
installationSizeValue.setVisible(true);
}
if (registry.getProductsToUninstall().size() == 0) {
uninstallListLabel.setVisible(false);
uninstallListPane.setVisible(false);
} else {
uninstallListLabel.setVisible(true);
uninstallListPane.setVisible(true);
}
downloadSizeLabel.setVisible(false);
downloadSizeValue.setVisible(false);
for (RegistryNode remoteNode : registry.getNodes(RegistryType.REMOTE)) {
if (remoteNode.isVisible()) {
downloadSizeLabel.setVisible(true);
downloadSizeValue.setVisible(true);
}
}
if (Boolean.getBoolean(REMOVE_APP_USERDIR_PROPERTY)) {
removeUserdirCheckbox.doClick();
}
removeUserdirCheckbox.setVisible(false);
removeUserdirPane.setVisible(false);
for (Product product : Registry.getInstance().getProductsToUninstall()) {
try {
File installLocation = product.getInstallationLocation();
LogManager.log("... product installation directory: " + installLocation);
File userDir = NetBeansRCPUtils.getApplicationUserDirFile(installLocation);
LogManager.log("... product userdir: " + userDir);
if (FileUtils.exists(userDir) && FileUtils.canWrite(userDir)) {
removeUserdirCheckbox.setText(
StringUtils.format(
panel.getProperty(REMOVE_APP_USERDIR_CHECKBOX_PROPERTY),
userDir.getAbsolutePath()));
removeUserdirCheckbox.setBorder(new EmptyBorder(0, 0, 0, 0));
removeUserdirCheckbox.setVisible(true);
removeUserdirPane.setVisible(true);
removeUserdirPane.setContentType("text/html");
removeUserdirPane.setText(
StringUtils.format(
panel.getProperty(REMOVE_APP_USERDIR_TEXT_PROPERTY),
product.getDisplayName()));
}
break;
} catch (IOException e) {
LogManager.log(e);
}
}
//if(productCheckboxList!=null) {
// for(Pair <Product, NbiCheckBox> pair : productCheckboxList) {
// pair.getSecond().doClick();
// }
//}
super.initialize();
}
@Override
protected String validateInput() {
try {
if (!Boolean.getBoolean(SystemUtils.NO_SPACE_CHECK_PROPERTY)) {
final List<File> roots =
SystemUtils.getFileSystemRoots();
final List<Product> toInstall =
Registry.getInstance().getProductsToInstall();
final Map<File, Long> spaceMap =
new HashMap<File, Long>();
LogManager.log("Available roots : " + StringUtils.asString(roots));
File downloadDataDirRoot = FileUtils.getRoot(
Installer.getInstance().getLocalDirectory(), roots);
long downloadSize = 0;
for (Product product : toInstall) {
downloadSize += product.getDownloadSize();
}
// the critical check point - we download all the data
spaceMap.put(downloadDataDirRoot, new Long(downloadSize));
long lastDataSize = 0;
for (Product product : toInstall) {
final File installLocation = product.getInstallationLocation();
final File root = FileUtils.getRoot(installLocation, roots);
final long productSize = product.getRequiredDiskSpace();
LogManager.log(" [" + root + "] <- " + installLocation);
if (root != null) {
Long ddSize = spaceMap.get(downloadDataDirRoot);
// remove space that was freed after the remove of previos product data
spaceMap.put(downloadDataDirRoot,
Long.valueOf(ddSize - lastDataSize));
// add space required for next product installation
Long size = spaceMap.get(root);
size = Long.valueOf(
(size != null ? size.longValue() : 0L)
+ productSize);
spaceMap.put(root, size);
lastDataSize = product.getDownloadSize();
} else {
return StringUtils.format(
panel.getProperty(ERROR_NON_EXISTENT_ROOT_PROPERTY),
product, installLocation);
}
}
for (File root : spaceMap.keySet()) {
try {
final long availableSpace =
SystemUtils.getFreeSpace(root);
final long requiredSpace =
spaceMap.get(root) + REQUIRED_SPACE_ADDITION;
if (availableSpace < requiredSpace) {
return StringUtils.format(
panel.getProperty(ERROR_NOT_ENOUGH_SPACE_PROPERTY),
root,
StringUtils.formatSize(requiredSpace - availableSpace));
}
} catch (NativeException e) {
ErrorManager.notifyError(
panel.getProperty(ERROR_CANNOT_CHECK_SPACE_PROPERTY),
e);
}
}
}
final List<Product> toUninstall =
Registry.getInstance().getProductsToUninstall();
for (Product product : toUninstall) {
if (!FileUtils.canWrite(product.getInstallationLocation())) {
return StringUtils.format(
panel.getProperty(ERROR_CANNOT_WRITE_PROPERTY),
product,
product.getInstallationLocation());
}
}
} catch (IOException e) {
ErrorManager.notifyError(
panel.getProperty(ERROR_FSROOTS_PROPERTY), e);
}
return null;
}
// private //////////////////////////////////////////////////////////////////
private void initComponents() {
gridy = 0;
// locationsPane ////////////////////////////////////////////////////////
locationsPane = new NbiTextPane();
// uninstallListPane ////////////////////////////////////////////////////
uninstallListPane = new NbiTextPane();
// uninstallListLabel ///////////////////////////////////////////////////
uninstallListLabel = new NbiLabel();
uninstallListLabel.setLabelFor(uninstallListPane);
// installationSizeValue ////////////////////////////////////////////////
installationSizeValue = new NbiLabel();
//installationSizeValue.setFocusable(true);
// installationSizeLabel ////////////////////////////////////////////////
installationSizeLabel = new NbiLabel();
installationSizeLabel.setLabelFor(installationSizeValue);
// downloadSizeValue ////////////////////////////////////////////////////
downloadSizeValue = new NbiLabel();
//downloadSizeValue.setFocusable(true);
// downloadSizeLabel ////////////////////////////////////////////////////
downloadSizeLabel = new NbiLabel();
downloadSizeLabel.setLabelFor(downloadSizeValue);
// spacer ///////////////////////////////////////////////////////////////
spacer = new NbiPanel();
// this /////////////////////////////////////////////////////////////////
add(locationsPane, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.PAGE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(11, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
add(uninstallListLabel, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.PAGE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(11, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
add(uninstallListPane, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.PAGE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(0, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
int gridy0 = gridy;
gridy++;
removeUserdirPane = new NbiTextPane();
add(removeUserdirPane, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.PAGE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(11, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
removeUserdirCheckbox = new NbiCheckBox();
add(removeUserdirCheckbox, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.PAGE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(4, 20, 0, 11), // padding
0, 0)); // padx, pady - ???
removeUserdirCheckbox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.setProperty(REMOVE_APP_USERDIR_PROPERTY,
"" + removeUserdirCheckbox.isSelected());
}
});
add(installationSizeLabel, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.LINE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(22, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
add(installationSizeValue, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.LINE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(4, 22, 0, 11), // padding
0, 0)); // padx, pady - ???
add(downloadSizeLabel, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.LINE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(4, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
add(downloadSizeValue, new GridBagConstraints(
0, gridy++, // x, y
1, 1, // width, height
1.0, 0.0, // weight-x, weight-y
GridBagConstraints.LINE_START, // anchor
GridBagConstraints.HORIZONTAL, // fill
new Insets(4, 22, 0, 11), // padding
0, 0)); // padx, pady - ???
add(spacer, new GridBagConstraints(
0, gridy + 10, // x, y
1, 1, // width, height
1.0, 1.0, // weight-x, weight-y
GridBagConstraints.CENTER, // anchor
GridBagConstraints.BOTH, // fill
new Insets(0, 11, 0, 11), // padding
0, 0)); // padx, pady - ???
}
}
/////////////////////////////////////////////////////////////////////////////////
// Constants
public static final String INSTALLATION_FOLDER_PROPERTY =
"installation.folder"; // NOI18N
public static final String UNINSTALL_LABEL_TEXT_PROPERTY =
"uninstall.list.label.text"; // NOI18N
public static final String INSTALLATION_SIZE_PROPERTY =
"installation.size"; // NOI18N
public static final String DOWNLOAD_SIZE_PROPERTY =
"download.size"; // NOI18N
public static final String ERROR_NOT_ENOUGH_SPACE_PROPERTY =
"error.not.enough.space"; // NOI18N
public static final String ERROR_CANNOT_CHECK_SPACE_PROPERTY =
"error.cannot.check.space"; // NOI18N
public static final String ERROR_LOGIC_ACCESS_PROPERTY =
"error.logic.access"; // NOI18N
public static final String ERROR_FSROOTS_PROPERTY =
"error.fsroots"; // NOI18N
public static final String ERROR_NON_EXISTENT_ROOT_PROPERTY =
"error.non.existent.root"; // NOI18N
public static final String ERROR_CANNOT_WRITE_PROPERTY =
"error.cannot.write"; // NOI18N
public static final String REMOVE_APP_USERDIR_PROPERTY =
"remove.app.userdir";
public static final String REMOVE_APP_USERDIR_TEXT_PROPERTY =
"remove.app.userdir.text";
public static final String REMOVE_APP_USERDIR_CHECKBOX_PROPERTY =
"remove.app.userdir.checkbox";
public static final String DEFAULT_TITLE =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.title"); // NOI18N
public static final String DEFAULT_DESCRIPTION =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.description"); // NOI18N
public static final String DEFAULT_DESCRIPTION_UNINSTALL =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.description.uninstall"); // NOI18N
public static final String DEFAULT_INSTALLATION_FOLDER =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.installation.folder"); // NOI18N
public static final String DEFAULT_UNINSTALL_LABEL_TEXT =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.uninstall.list.label.text"); // NOI18N
public static final String DEFAULT_INSTALLATION_SIZE =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.installation.size"); // NOI18N
public static final String DEFAULT_DOWNLOAD_SIZE =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.download.size"); // NOI18N
public static final String DEFAULT_NEXT_BUTTON_TEXT =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.next.button.text"); // NOI18N
public static final String DEFAULT_NEXT_BUTTON_TEXT_UNINSTALL =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.next.button.text.uninstall"); // NOI18N
public static final String DEFAULT_ERROR_NOT_ENOUGH_SPACE =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.error.not.enough.space"); // NOI18N
public static final String DEFAULT_ERROR_CANNOT_CHECK_SPACE =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.error.cannot.check.space");// NOI18N
public static final String DEFAULT_ERROR_LOGIC_ACCESS =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.error.logic.access");// NOI18N
public static final String DEFAULT_ERROR_FSROOTS =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.error.fsroots"); // NOI18N
public static final String DEFAULT_ERROR_NON_EXISTENT_ROOT =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.error.non.existent.root"); // NOI18N
public static final String DEFAULT_ERROR_CANNOT_WRITE =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.error.cannot.write"); // NOI18N
public static final String DEFAULT_REMOVE_APP_USERDIR_TEXT =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.remove.app.userdir.text"); // NOI18N
public static final String DEFAULT_REMOVE_APP_USERDIR_CHECKBOX =
ResourceUtils.getString(PreInstallSummaryPanel.class,
"PrISP.remove.app.userdir.checkbox"); // NOI18N
public static final long REQUIRED_SPACE_ADDITION =
10L * 1024L * 1024L; // 10MB
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<@pp.dropOutputFile />
<#list drillOI.map as entry>
<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/hive/Drill${entry.drillType}ObjectInspector.java" />
<#include "/@includes/license.ftl" />
package org.apache.drill.exec.expr.fn.impl.hive;
import org.apache.drill.exec.util.DecimalUtility;
import org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers;
import org.apache.drill.exec.expr.holders.*;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
public abstract class Drill${entry.drillType}ObjectInspector extends AbstractDrillPrimitiveObjectInspector
implements ${entry.hiveOI} {
public Drill${entry.drillType}ObjectInspector() {
super(TypeInfoFactory.${entry.hiveType?lower_case}TypeInfo);
}
<#if entry.drillType == "VarChar">
@Override
public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
HiveVarcharWritable valW = new HiveVarcharWritable();
valW.set(getPrimitiveJavaObject(o));
return valW;
}
public static class Required extends Drill${entry.drillType}ObjectInspector {
@Override
public HiveVarchar getPrimitiveJavaObject(Object o) {
VarCharHolder h = (VarCharHolder)o;
String s = StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer);
return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH);
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector {
@Override
public HiveVarchar getPrimitiveJavaObject(Object o) {
NullableVarCharHolder h = (NullableVarCharHolder)o;
String s = h.isSet == 0 ? null : StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer);
return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH);
}
}
<#elseif entry.drillType == "Var16Char">
@Override
public Text getPrimitiveWritableObject(Object o) {
throw new UnsupportedOperationException();
}
public static class Required extends Drill${entry.drillType}ObjectInspector {
@Override
public String getPrimitiveJavaObject(Object o){
Var16CharHolder h = (Var16CharHolder)o;
String s = StringFunctionHelpers.toStringFromUTF16(h.start, h.end, h.buffer);
return s;
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector {
@Override
public String getPrimitiveJavaObject(Object o){
NullableVar16CharHolder h = (NullableVar16CharHolder)o;
String s = h.isSet == 0 ? null : StringFunctionHelpers.toStringFromUTF16(h.start, h.end, h.buffer);
return s;
}
}
<#elseif entry.drillType == "VarBinary">
@Override
public BytesWritable getPrimitiveWritableObject(Object o) {
return new BytesWritable(getPrimitiveJavaObject(o));
}
public static class Required extends Drill${entry.drillType}ObjectInspector {
@Override
public byte[] getPrimitiveJavaObject(Object o) {
VarBinaryHolder h = (VarBinaryHolder)o;
byte[] buf = new byte[h.end-h.start];
h.buffer.getBytes(h.start, buf, 0, h.end-h.start);
return buf;
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector {
@Override
public byte[] getPrimitiveJavaObject(Object o) {
NullableVarBinaryHolder h = (NullableVarBinaryHolder)o;
byte[] buf = new byte[h.end-h.start];
h.buffer.getBytes(h.start, buf, 0, h.end-h.start);
return buf;
}
}
<#elseif entry.drillType == "Bit">
public static class Required extends Drill${entry.drillType}ObjectInspector {
@Override
public boolean get(Object o) {
return ((BitHolder)o).value == 0 ? false : true;
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector {
@Override
public boolean get(Object o) {
return ((NullableBitHolder)o).value == 0 ? false : true;
}
}
@Override
public BooleanWritable getPrimitiveWritableObject(Object o) {
return new BooleanWritable(get(o));
}
@Override
public Boolean getPrimitiveJavaObject(Object o) {
return new Boolean(get(o));
}
<#elseif entry.drillType == "Decimal38Sparse">
public HiveDecimalWritable getPrimitiveWritableObject(Object o) {
return new HiveDecimalWritable(getPrimitiveJavaObject(o));
}
public static class Required extends Drill${entry.drillType}ObjectInspector{
@Override
public HiveDecimal getPrimitiveJavaObject(Object o){
Decimal38SparseHolder h = (Decimal38SparseHolder) o;
return HiveDecimal.create(DecimalUtility.getBigDecimalFromSparse(h.buffer, h.start, h.nDecimalDigits, h.scale));
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector{
@Override
public HiveDecimal getPrimitiveJavaObject(Object o){
NullableDecimal38SparseHolder h = (NullableDecimal38SparseHolder) o;
return HiveDecimal.create(DecimalUtility.getBigDecimalFromSparse(h.buffer, h.start, h.nDecimalDigits, h.scale));
}
}
<#elseif entry.drillType == "TimeStamp">
@Override
public TimestampWritable getPrimitiveWritableObject(Object o) {
return new TimestampWritable(getPrimitiveJavaObject(o));
}
public static class Required extends Drill${entry.drillType}ObjectInspector{
@Override
public java.sql.Timestamp getPrimitiveJavaObject(Object o){
return new java.sql.Timestamp(((TimeStampHolder)o).value);
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector{
@Override
public java.sql.Timestamp getPrimitiveJavaObject(Object o){
return new java.sql.Timestamp(((NullableTimeStampHolder)o).value);
}
}
<#elseif entry.drillType == "Date">
@Override
public DateWritable getPrimitiveWritableObject(Object o) {
return new DateWritable(getPrimitiveJavaObject(o));
}
public static class Required extends Drill${entry.drillType}ObjectInspector{
@Override
public java.sql.Date getPrimitiveJavaObject(Object o){
return new java.sql.Date(((DateHolder)o).value);
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector{
@Override
public java.sql.Date getPrimitiveJavaObject(Object o){
return new java.sql.Date(((NullableDateHolder)o).value);
}
}
<#else>
<#if entry.drillType == "Int">
@Override
public Integer getPrimitiveJavaObject(Object o) {
return new Integer(get(o));
}
<#else>
@Override
public ${entry.javaType?cap_first} getPrimitiveJavaObject(Object o) {
return new ${entry.javaType?cap_first}(get(o));
}
</#if>
@Override
public ${entry.javaType?cap_first}Writable getPrimitiveWritableObject(Object o) {
return new ${entry.javaType?cap_first}Writable(get(o));
}
public static class Required extends Drill${entry.drillType}ObjectInspector{
@Override
public ${entry.javaType} get(Object o){
return((${entry.drillType}Holder)o).value;
}
}
public static class Optional extends Drill${entry.drillType}ObjectInspector{
@Override
public ${entry.javaType} get(Object o){
return((Nullable${entry.drillType}Holder)o).value;
}
}
</#if>
}
</#list>
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.packages.BuiltinProvider;
import com.google.devtools.build.lib.packages.NativeInfo;
import com.google.devtools.build.lib.skylarkbuildapi.android.AndroidResourcesInfoApi;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.SkylarkNestedSet;
/** A provider that supplies ResourceContainers from its transitive closure. */
@Immutable
public class AndroidResourcesInfo extends NativeInfo
implements AndroidResourcesInfoApi<Artifact, ValidatedAndroidData, ProcessedAndroidManifest> {
public static final String PROVIDER_NAME = "AndroidResourcesInfo";
public static final Provider PROVIDER = new Provider();
/*
* Local information about the target that produced this provider, for tooling. These values will
* be made available even if they should not be inherited (for example, if this target has
* "neverlink" set) - do not inherit them directly.
*/
// Lets us know where the provider came from
private final Label label;
// An updated manifest - resource processing sometimes does additional manifest processing
// TODO(b/30817309): Remove this once resource processing no longer does manifest processing
private final ProcessedAndroidManifest manifest;
// An R.txt file containing a list of all transitive resources this target expected
private final Artifact rTxt;
/*
* Transitive information used for resource processing
*/
private final NestedSet<ValidatedAndroidData> transitiveAndroidResources;
private final NestedSet<ValidatedAndroidData> directAndroidResources;
private final NestedSet<Artifact> transitiveResources;
private final NestedSet<Artifact> transitiveAssets;
private final NestedSet<Artifact> transitiveManifests;
private final NestedSet<Artifact> transitiveAapt2RTxt;
private final NestedSet<Artifact> transitiveSymbolsBin;
private final NestedSet<Artifact> transitiveCompiledSymbols;
private final NestedSet<Artifact> transitiveStaticLib;
private final NestedSet<Artifact> transitiveRTxt;
AndroidResourcesInfo(
Label label,
ProcessedAndroidManifest manifest,
Artifact rTxt,
NestedSet<ValidatedAndroidData> transitiveAndroidResources,
NestedSet<ValidatedAndroidData> directAndroidResources,
NestedSet<Artifact> transitiveResources,
NestedSet<Artifact> transitiveAssets,
NestedSet<Artifact> transitiveManifests,
NestedSet<Artifact> transitiveAapt2RTxt,
NestedSet<Artifact> transitiveSymbolsBin,
NestedSet<Artifact> transitiveCompiledSymbols,
NestedSet<Artifact> transitiveStaticLib,
NestedSet<Artifact> transitiveRTxt) {
super(PROVIDER);
this.label = label;
this.manifest = manifest;
this.rTxt = rTxt;
this.transitiveAndroidResources = transitiveAndroidResources;
this.directAndroidResources = directAndroidResources;
this.transitiveResources = transitiveResources;
this.transitiveAssets = transitiveAssets;
this.transitiveManifests = transitiveManifests;
this.transitiveAapt2RTxt = transitiveAapt2RTxt;
this.transitiveSymbolsBin = transitiveSymbolsBin;
this.transitiveCompiledSymbols = transitiveCompiledSymbols;
this.transitiveStaticLib = transitiveStaticLib;
this.transitiveRTxt = transitiveRTxt;
}
@Override
public Label getLabel() {
return label;
}
@Override
public ProcessedAndroidManifest getManifest() {
return manifest;
}
@Override
public Artifact getRTxt() {
return rTxt;
}
@Override
public NestedSet<ValidatedAndroidData> getTransitiveAndroidResources() {
return transitiveAndroidResources;
}
@Override
public NestedSet<ValidatedAndroidData> getDirectAndroidResources() {
return directAndroidResources;
}
@Override
public NestedSet<Artifact> getTransitiveResources() {
return transitiveResources;
}
/** @deprecated Assets are being decoupled from resources */
@Deprecated
public NestedSet<Artifact> getTransitiveAssets() {
return transitiveAssets;
}
@Override
public NestedSet<Artifact> getTransitiveManifests() {
return transitiveManifests;
}
@Override
public NestedSet<Artifact> getTransitiveAapt2RTxt() {
return transitiveAapt2RTxt;
}
@Override
public NestedSet<Artifact> getTransitiveSymbolsBin() {
return transitiveSymbolsBin;
}
@Override
public NestedSet<Artifact> getTransitiveCompiledSymbols() {
return transitiveCompiledSymbols;
}
@Override
public NestedSet<Artifact> getTransitiveStaticLib() {
return transitiveStaticLib;
}
@Override
public NestedSet<Artifact> getTransitiveRTxt() {
return transitiveRTxt;
}
/** Provider for {@link AndroidResourcesInfo}. */
public static class Provider extends BuiltinProvider<AndroidResourcesInfo>
implements AndroidResourcesInfoApi.AndroidResourcesInfoApiProvider<
Artifact, ValidatedAndroidData, ProcessedAndroidManifest> {
private Provider() {
super(PROVIDER_NAME, AndroidResourcesInfo.class);
}
@Override
public AndroidResourcesInfo createInfo(
Label label,
ProcessedAndroidManifest manifest,
Artifact rTxt,
SkylarkNestedSet transitiveAndroidResources,
SkylarkNestedSet directAndroidResources,
SkylarkNestedSet transitiveResources,
SkylarkNestedSet transitiveAssets,
SkylarkNestedSet transitiveManifests,
SkylarkNestedSet transitiveAapt2RTxt,
SkylarkNestedSet transitiveSymbolsBin,
SkylarkNestedSet transitiveCompiledSymbols,
SkylarkNestedSet transitiveStaticLib,
SkylarkNestedSet transitiveRTxt)
throws EvalException {
return new AndroidResourcesInfo(
label,
manifest,
rTxt,
nestedSet(transitiveAndroidResources, ValidatedAndroidData.class),
nestedSet(directAndroidResources, ValidatedAndroidData.class),
nestedSet(transitiveResources, Artifact.class),
nestedSet(transitiveAssets, Artifact.class),
nestedSet(transitiveManifests, Artifact.class),
nestedSet(transitiveAapt2RTxt, Artifact.class),
nestedSet(transitiveSymbolsBin, Artifact.class),
nestedSet(transitiveCompiledSymbols, Artifact.class),
nestedSet(transitiveStaticLib, Artifact.class),
nestedSet(transitiveRTxt, Artifact.class));
}
private <T> NestedSet<T> nestedSet(SkylarkNestedSet from, Class<T> with) {
return NestedSetBuilder.<T>stableOrder().addTransitive(from.getSet(with)).build();
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2009 Luaj.org. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
******************************************************************************/
package org.luaj.vm2.lib;
import java.io.IOException;
import java.util.Calendar;
import java.util.Date;
import org.luaj.vm2.Buffer;
import org.luaj.vm2.Globals;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.Varargs;
/**
* Subclass of {@link LibFunction} which implements the standard lua {@code os} library.
* <p>
* It is a usable base with simplified stub functions
* for library functions that cannot be implemented uniformly
* on Jse and Jme.
* <p>
* This can be installed as-is on either platform, or extended
* and refined to be used in a complete Jse implementation.
* <p>
* Because the nature of the {@code os} library is to encapsulate
* os-specific features, the behavior of these functions varies considerably
* from their counterparts in the C platform.
* <p>
* The following functions have limited implementations of features
* that are not supported well on Jme:
* <ul>
* <li>{@code execute()}</li>
* <li>{@code remove()}</li>
* <li>{@code rename()}</li>
* <li>{@code tmpname()}</li>
* </ul>
* <p>
* Typically, this library is included as part of a call to either
* {@link org.luaj.vm2.lib.jse.JsePlatform#standardGlobals()} or {@link org.luaj.vm2.lib.jme.JmePlatform#standardGlobals()}
* <pre> {@code
* Globals globals = JsePlatform.standardGlobals();
* System.out.println( globals.get("os").get("time").call() );
* } </pre>
* In this example the platform-specific {@link org.luaj.vm2.lib.jse.JseOsLib} library will be loaded, which will include
* the base functionality provided by this class.
* <p>
* To instantiate and use it directly,
* link it into your globals table via {@link LuaValue#load(LuaValue)} using code such as:
* <pre> {@code
* Globals globals = new Globals();
* globals.load(new JseBaseLib());
* globals.load(new PackageLib());
* globals.load(new OsLib());
* System.out.println( globals.get("os").get("time").call() );
* } </pre>
* <p>
* @see LibFunction
* @see org.luaj.vm2.lib.jse.JseOsLib
* @see org.luaj.vm2.lib.jse.JsePlatform
* @see org.luaj.vm2.lib.jme.JmePlatform
* @see <a href="http://www.lua.org/manual/5.1/manual.html#5.8">http://www.lua.org/manual/5.1/manual.html#5.8</a>
*/
public class OsLib extends TwoArgFunction {
public static String TMP_PREFIX = ".luaj";
public static String TMP_SUFFIX = "tmp";
private static final int CLOCK = 0;
private static final int DATE = 1;
private static final int DIFFTIME = 2;
private static final int EXECUTE = 3;
private static final int EXIT = 4;
private static final int GETENV = 5;
private static final int REMOVE = 6;
private static final int RENAME = 7;
private static final int SETLOCALE = 8;
private static final int TIME = 9;
private static final int TMPNAME = 10;
private static final String[] NAMES = {
"clock",
"date",
"difftime",
"execute",
"exit",
"getenv",
"remove",
"rename",
"setlocale",
"time",
"tmpname",
};
private static final long t0 = System.currentTimeMillis();
private static long tmpnames = t0;
protected Globals globals;
/**
* Create and OsLib instance.
*/
public OsLib() {
}
/** Perform one-time initialization on the library by creating a table
* containing the library functions, adding that table to the supplied environment,
* adding the table to package.loaded, and returning table as the return value.
* @param modname the module name supplied if this is loaded via 'require'.
* @param env the environment to load into, typically a Globals instance.
*/
public LuaValue call(LuaValue modname, LuaValue env) {
globals = env.checkglobals();
LuaTable os = new LuaTable();
for (int i = 0; i < NAMES.length; ++i)
os.set(NAMES[i], new OsLibFunc(i, NAMES[i]));
env.set("os", os);
env.get("package").get("loaded").set("os", os);
return os;
}
class OsLibFunc extends VarArgFunction {
public OsLibFunc(int opcode, String name) {
this.opcode = opcode;
this.name = name;
}
public Varargs invoke(Varargs args) {
try {
switch ( opcode ) {
case CLOCK:
return valueOf(clock());
case DATE: {
String s = args.optjstring(1, "%c");
double t = args.isnumber(2)? args.todouble(2): time(null);
if (s.equals("*t")) {
Calendar d = Calendar.getInstance();
d.setTime(new Date((long)(t*1000)));
LuaTable tbl = LuaValue.tableOf();
tbl.set("year", LuaValue.valueOf(d.get(Calendar.YEAR)));
tbl.set("month", LuaValue.valueOf(d.get(Calendar.MONTH)+1));
tbl.set("day", LuaValue.valueOf(d.get(Calendar.DAY_OF_MONTH)));
tbl.set("hour", LuaValue.valueOf(d.get(Calendar.HOUR_OF_DAY)));
tbl.set("min", LuaValue.valueOf(d.get(Calendar.MINUTE)));
tbl.set("sec", LuaValue.valueOf(d.get(Calendar.SECOND)));
tbl.set("wday", LuaValue.valueOf(d.get(Calendar.DAY_OF_WEEK)));
tbl.set("yday", LuaValue.valueOf(d.get(0x6))); // Day of year
tbl.set("isdst", LuaValue.valueOf(isDaylightSavingsTime(d)));
return tbl;
}
return valueOf( date(s, t==-1? time(null): t) );
}
case DIFFTIME:
return valueOf(difftime(args.checkdouble(1),args.checkdouble(2)));
case EXECUTE:
return execute(args.optjstring(1, null));
case EXIT:
exit(args.optint(1, 0));
return NONE;
case GETENV: {
final String val = getenv(args.checkjstring(1));
return val!=null? valueOf(val): NIL;
}
case REMOVE:
remove(args.checkjstring(1));
return LuaValue.TRUE;
case RENAME:
rename(args.checkjstring(1), args.checkjstring(2));
return LuaValue.TRUE;
case SETLOCALE: {
String s = setlocale(args.optjstring(1,null), args.optjstring(2, "all"));
return s!=null? valueOf(s): NIL;
}
case TIME:
return valueOf(time(args.opttable(1, null)));
case TMPNAME:
return valueOf(tmpname());
}
return NONE;
} catch ( IOException e ) {
return varargsOf(NIL, valueOf(e.getMessage()));
}
}
}
/**
* @return an approximation of the amount in seconds of CPU time used by
* the program. For luaj this simple returns the elapsed time since the
* OsLib class was loaded.
*/
protected double clock() {
return (System.currentTimeMillis()-t0) / 1000.;
}
/**
* Returns the number of seconds from time t1 to time t2.
* In POSIX, Windows, and some other systems, this value is exactly t2-t1.
* @param t2
* @param t1
* @return diffeence in time values, in seconds
*/
protected double difftime(double t2, double t1) {
return t2 - t1;
}
/**
* If the time argument is present, this is the time to be formatted
* (see the os.time function for a description of this value).
* Otherwise, date formats the current time.
*
* Date returns the date as a string,
* formatted according to the same rules as ANSII strftime, but without
* support for %g, %G, or %V.
*
* When called without arguments, date returns a reasonable date and
* time representation that depends on the host system and on the
* current locale (that is, os.date() is equivalent to os.date("%c")).
*
* @param format
* @param time time since epoch, or -1 if not supplied
* @return a LString or a LTable containing date and time,
* formatted according to the given string format.
*/
public String date(String format, double time) {
Calendar d = Calendar.getInstance();
d.setTime(new Date((long)(time*1000)));
if (format.startsWith("!")) {
time -= timeZoneOffset(d);
d.setTime(new Date((long)(time*1000)));
format = format.substring(1);
}
byte[] fmt = format.getBytes();
final int n = fmt.length;
Buffer result = new Buffer(n);
byte c;
for ( int i = 0; i < n; ) {
switch ( c = fmt[i++ ] ) {
case '\n':
result.append( "\n" );
break;
default:
result.append( c );
break;
case '%':
if (i >= n) break;
switch ( c = fmt[i++ ] ) {
default:
LuaValue.argerror(1, "invalid conversion specifier '%"+c+"'");
break;
case '%':
result.append( (byte)'%' );
break;
case 'a':
result.append(WeekdayNameAbbrev[d.get(Calendar.DAY_OF_WEEK)-1]);
break;
case 'A':
result.append(WeekdayName[d.get(Calendar.DAY_OF_WEEK)-1]);
break;
case 'b':
result.append(MonthNameAbbrev[d.get(Calendar.MONTH)]);
break;
case 'B':
result.append(MonthName[d.get(Calendar.MONTH)]);
break;
case 'c':
result.append(date("%a %b %d %H:%M:%S %Y", time));
break;
case 'd':
result.append(String.valueOf(100+d.get(Calendar.DAY_OF_MONTH)).substring(1));
break;
case 'H':
result.append(String.valueOf(100+d.get(Calendar.HOUR_OF_DAY)).substring(1));
break;
case 'I':
result.append(String.valueOf(100+(d.get(Calendar.HOUR_OF_DAY)%12)).substring(1));
break;
case 'j': { // day of year.
Calendar y0 = beginningOfYear(d);
int dayOfYear = (int) ((d.getTime().getTime() - y0.getTime().getTime()) / (24 * 3600L * 1000L));
result.append(String.valueOf(1001+dayOfYear).substring(1));
break;
}
case 'm':
result.append(String.valueOf(101+d.get(Calendar.MONTH)).substring(1));
break;
case 'M':
result.append(String.valueOf(100+d.get(Calendar.MINUTE)).substring(1));
break;
case 'p':
result.append(d.get(Calendar.HOUR_OF_DAY) < 12? "AM": "PM");
break;
case 'S':
result.append(String.valueOf(100+d.get(Calendar.SECOND)).substring(1));
break;
case 'U':
result.append(String.valueOf(weekNumber(d, 0)));
break;
case 'w':
result.append(String.valueOf((d.get(Calendar.DAY_OF_WEEK)+6)%7));
break;
case 'W':
result.append(String.valueOf(weekNumber(d, 1)));
break;
case 'x':
result.append(date("%m/%d/%y", time));
break;
case 'X':
result.append(date("%H:%M:%S", time));
break;
case 'y':
result.append(String.valueOf(d.get(Calendar.YEAR)).substring(2));
break;
case 'Y':
result.append(String.valueOf(d.get(Calendar.YEAR)));
break;
case 'z': {
final int tzo = timeZoneOffset(d) / 60;
final int a = Math.abs(tzo);
final String h = String.valueOf(100 + a / 60).substring(1);
final String m = String.valueOf(100 + a % 60).substring(1);
result.append((tzo>=0? "+": "-") + h + m);
break;
}
}
}
}
return result.tojstring();
}
private static final String[] WeekdayNameAbbrev = { "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" };
private static final String[] WeekdayName = { "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" };
private static final String[] MonthNameAbbrev = { "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" };
private static final String[] MonthName = { "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" };
private Calendar beginningOfYear(Calendar d) {
Calendar y0 = Calendar.getInstance();
y0.setTime(d.getTime());
y0.set(Calendar.MONTH, 0);
y0.set(Calendar.DAY_OF_MONTH, 1);
y0.set(Calendar.HOUR_OF_DAY, 0);
y0.set(Calendar.MINUTE, 0);
y0.set(Calendar.SECOND, 0);
y0.set(Calendar.MILLISECOND, 0);
return y0;
}
private int weekNumber(Calendar d, int startDay) {
Calendar y0 = beginningOfYear(d);
y0.set(Calendar.DAY_OF_MONTH, 1 + (startDay + 8 - y0.get(Calendar.DAY_OF_WEEK)) % 7);
if (y0.after(d)) {
y0.set(Calendar.YEAR, y0.get(Calendar.YEAR) - 1);
y0.set(Calendar.DAY_OF_MONTH, 1 + (startDay + 8 - y0.get(Calendar.DAY_OF_WEEK)) % 7);
}
long dt = d.getTime().getTime() - y0.getTime().getTime();
return 1 + (int) (dt / (7L * 24L * 3600L * 1000L));
}
private int timeZoneOffset(Calendar d) {
int localStandarTimeMillis = (
d.get(Calendar.HOUR_OF_DAY) * 3600 +
d.get(Calendar.MINUTE) * 60 +
d.get(Calendar.SECOND)) * 1000;
return d.getTimeZone().getOffset(
1,
d.get(Calendar.YEAR),
d.get(Calendar.MONTH),
d.get(Calendar.DAY_OF_MONTH),
d.get(Calendar.DAY_OF_WEEK),
localStandarTimeMillis) / 1000;
}
private boolean isDaylightSavingsTime(Calendar d) {
return timeZoneOffset(d) != d.getTimeZone().getRawOffset() / 1000;
}
/**
* This function is equivalent to the C function system.
* It passes command to be executed by an operating system shell.
* It returns a status code, which is system-dependent.
* If command is absent, then it returns nonzero if a shell
* is available and zero otherwise.
* @param command command to pass to the system
*/
protected Varargs execute(String command) {
return varargsOf(NIL, valueOf("exit"), ONE);
}
/**
* Calls the C function exit, with an optional code, to terminate the host program.
* @param code
*/
protected void exit(int code) {
System.exit(code);
}
/**
* Returns the value of the process environment variable varname,
* or the System property value for varname,
* or null if the variable is not defined in either environment.
*
* The default implementation, which is used by the JmePlatform,
* only queryies System.getProperty().
*
* The JsePlatform overrides this behavior and returns the
* environment variable value using System.getenv() if it exists,
* or the System property value if it does not.
*
* A SecurityException may be thrown if access is not allowed
* for 'varname'.
* @param varname
* @return String value, or null if not defined
*/
protected String getenv(String varname) {
return System.getProperty(varname);
}
/**
* Deletes the file or directory with the given name.
* Directories must be empty to be removed.
* If this function fails, it throws and IOException
*
* @param filename
* @throws IOException if it fails
*/
protected void remove(String filename) throws IOException {
throw new IOException( "not implemented" );
}
/**
* Renames file or directory named oldname to newname.
* If this function fails,it throws and IOException
*
* @param oldname old file name
* @param newname new file name
* @throws IOException if it fails
*/
protected void rename(String oldname, String newname) throws IOException {
throw new IOException( "not implemented" );
}
/**
* Sets the current locale of the program. locale is a string specifying
* a locale; category is an optional string describing which category to change:
* "all", "collate", "ctype", "monetary", "numeric", or "time"; the default category
* is "all".
*
* If locale is the empty string, the current locale is set to an implementation-
* defined native locale. If locale is the string "C", the current locale is set
* to the standard C locale.
*
* When called with null as the first argument, this function only returns the
* name of the current locale for the given category.
*
* @param locale
* @param category
* @return the name of the new locale, or null if the request
* cannot be honored.
*/
protected String setlocale(String locale, String category) {
return "C";
}
/**
* Returns the current time when called without arguments,
* or a time representing the date and time specified by the given table.
* This table must have fields year, month, and day,
* and may have fields hour, min, sec, and isdst
* (for a description of these fields, see the os.date function).
* @param table
* @return long value for the time
*/
protected double time(LuaTable table) {
java.util.Date d;
if (table == null) {
d = new java.util.Date();
} else {
Calendar c = Calendar.getInstance();
c.set(Calendar.YEAR, table.get("year").checkint());
c.set(Calendar.MONTH, table.get("month").checkint()-1);
c.set(Calendar.DAY_OF_MONTH, table.get("day").checkint());
c.set(Calendar.HOUR_OF_DAY, table.get("hour").optint(12));
c.set(Calendar.MINUTE, table.get("min").optint(0));
c.set(Calendar.SECOND, table.get("sec").optint(0));
c.set(Calendar.MILLISECOND, 0);
d = c.getTime();
}
return d.getTime() / 1000.;
}
/**
* Returns a string with a file name that can be used for a temporary file.
* The file must be explicitly opened before its use and explicitly removed
* when no longer needed.
*
* On some systems (POSIX), this function also creates a file with that name,
* to avoid security risks. (Someone else might create the file with wrong
* permissions in the time between getting the name and creating the file.)
* You still have to open the file to use it and to remove it (even if you
* do not use it).
*
* @return String filename to use
*/
protected String tmpname() {
synchronized ( OsLib.class ) {
return TMP_PREFIX+(tmpnames++)+TMP_SUFFIX;
}
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.uif.element;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.krad.datadictionary.parse.BeanTag;
import org.kuali.rice.krad.datadictionary.parse.BeanTagAttribute;
import org.kuali.rice.krad.datadictionary.parse.BeanTags;
import org.kuali.rice.krad.uif.component.Component;
import org.kuali.rice.krad.uif.component.ListAware;
import org.kuali.rice.krad.uif.container.Group;
import org.kuali.rice.krad.uif.lifecycle.ViewLifecycleRestriction;
import org.kuali.rice.krad.uif.util.LifecycleElement;
/**
* Renders a toggle menu (aka sub menu, dropdown menu) of items.
*
* <p>The toggle menu component can be used to build context menus or full application menus. Essentially the
* component is configured by first setting the text that will appear as a link (optionally with a caret). When the
* user clicks the link, the items ({@link #getMenuItems()} will be presented.</p>
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
@BeanTags({@BeanTag(name = "dropdownMenu", parent = "Uif-DropdownToggleMenu"),
@BeanTag(name = "sidebarMenu", parent = "Uif-SidebarToggleMenu")})
public class ToggleMenu extends ContentElementBase implements ListAware {
private static final long serialVersionUID = -1759659012620124641L;
private String toggleText;
private Message toggleMessage;
private String toggleCaretClass;
private String iconClass;
private boolean renderToggleButton;
private boolean renderedInList;
private List<Component> menuItems;
private Group menuGroup;
public ToggleMenu() {
super();
}
/**
* {@inheritDoc}
*/
@Override
public void performInitialization(Object model) {
super.performInitialization(model);
if ((this.menuItems != null) && !this.menuItems.isEmpty()) {
this.menuGroup.setItems(menuItems);
}
}
/**
* {@inheritDoc}
*/
@Override
public void performApplyModel(Object model, LifecycleElement parent) {
super.performApplyModel(model, parent);
if (StringUtils.isNotBlank(toggleText) && StringUtils.isBlank(toggleMessage.getMessageText())) {
toggleMessage.setMessageText(toggleText);
}
}
/**
* Text to display as the toggle menu toggle link
*
* <p>
* This text will appear as a link for the user to click on, which then will bring up the toggle
* menu menu. This property is a shortcut for {@link #getToggleMessage()}
* {@link Message#setMessageText(String) .setMessageText}. This text is not required, in which
* case only the caret will render
* </p>
*
* @return text to display for the toggle menu toggle link
*/
@BeanTagAttribute
public String getToggleText() {
return toggleText;
}
/**
* @see ToggleMenu#getToggleText()
*/
public void setToggleText(String toggleText) {
this.toggleText = toggleText;
}
/**
* {@code Message} component that is associated with the toggle menu toggle text, can be used to adjust styling
* and so forth
*
* @return Message instance for toggle text
*/
@BeanTagAttribute
public Message getToggleMessage() {
return toggleMessage;
}
/**
* @see ToggleMenu#getToggleMessage()
*/
public void setToggleMessage(Message toggleMessage) {
this.toggleMessage = toggleMessage;
}
/**
* Css class to use when rendering a caret icon which will appear to the right of the toggleText
*
* @return the caret icon class
*/
@BeanTagAttribute
public String getToggleCaretClass() {
return toggleCaretClass;
}
/**
* @see org.kuali.rice.krad.uif.element.ToggleMenu#getToggleCaretClass()
*/
public void setToggleCaretClass(String toggleCaretClass) {
this.toggleCaretClass = toggleCaretClass;
}
/**
* Css class for an icon that will appear to the left of the toggleText
*
* @return the css class for an icon
*/
@BeanTagAttribute
public String getIconClass() {
return iconClass;
}
/**
* @see org.kuali.rice.krad.uif.element.ToggleMenu#getIconClass()
*/
public void setIconClass(String iconClass) {
this.iconClass = iconClass;
}
/**
* Indicates whether a caret button should be rendered to the right of the toggle text (if present)
*
* @return boolean true if caret button should be rendered, false if not
*/
@BeanTagAttribute
public boolean isRenderToggleButton() {
return renderToggleButton;
}
/**
* @see ToggleMenu#isRenderToggleButton()
*/
public void setRenderToggleButton(boolean renderToggleButton) {
this.renderToggleButton = renderToggleButton;
}
/**
* @see org.kuali.rice.krad.uif.component.ListAware#setRenderedInList(boolean)
*/
@BeanTagAttribute
public boolean isRenderedInList() {
return renderedInList;
}
/**
* @see ToggleMenu#isRenderedInList()
*/
public void setRenderedInList(boolean renderedInList) {
this.renderedInList = renderedInList;
}
/**
* List of components that should be rendered for the toggle menu.
*
* <p>Items for the menu are configured through this list. The order of the items within the list is
* the order they will appear in the toggle menu</p>
*
* @return List of menu items for the toggle menu
*/
@ViewLifecycleRestriction
@BeanTagAttribute
public List<Component> getMenuItems() {
return menuItems;
}
/**
* @see ToggleMenu#getMenuItems()
*/
public void setMenuItems(List<Component> menuItems) {
this.menuItems = menuItems;
}
/**
* Group instance that is rendered when the toggle menu is toggled.
*
* <p>Note in most cases this group will be a simple list group. The component allows for the list group
* to be initialized in a base bean, then child beans can simply define the item using
* {@link ToggleMenu#getMenuItems()}</p>
*
* @return Group instance
*/
@BeanTagAttribute
public Group getMenuGroup() {
return menuGroup;
}
/**
* @see ToggleMenu#getMenuGroup()
*/
public void setMenuGroup(Group menuGroup) {
this.menuGroup = menuGroup;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.support.model;
import java.io.Serializable;
/**
* <p>
* The five most recent communications associated with the case.
* </p>
*/
public class RecentCaseCommunications implements Serializable, Cloneable {
/**
* <p>
* The five most recent communications associated with the case.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Communication> communications;
/**
* <p>
* A resumption point for pagination.
* </p>
*/
private String nextToken;
/**
* <p>
* The five most recent communications associated with the case.
* </p>
*
* @return The five most recent communications associated with the case.
*/
public java.util.List<Communication> getCommunications() {
if (communications == null) {
communications = new com.amazonaws.internal.SdkInternalList<Communication>();
}
return communications;
}
/**
* <p>
* The five most recent communications associated with the case.
* </p>
*
* @param communications
* The five most recent communications associated with the case.
*/
public void setCommunications(
java.util.Collection<Communication> communications) {
if (communications == null) {
this.communications = null;
return;
}
this.communications = new com.amazonaws.internal.SdkInternalList<Communication>(
communications);
}
/**
* <p>
* The five most recent communications associated with the case.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setCommunications(java.util.Collection)} or
* {@link #withCommunications(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param communications
* The five most recent communications associated with the case.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RecentCaseCommunications withCommunications(
Communication... communications) {
if (this.communications == null) {
setCommunications(new com.amazonaws.internal.SdkInternalList<Communication>(
communications.length));
}
for (Communication ele : communications) {
this.communications.add(ele);
}
return this;
}
/**
* <p>
* The five most recent communications associated with the case.
* </p>
*
* @param communications
* The five most recent communications associated with the case.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RecentCaseCommunications withCommunications(
java.util.Collection<Communication> communications) {
setCommunications(communications);
return this;
}
/**
* <p>
* A resumption point for pagination.
* </p>
*
* @param nextToken
* A resumption point for pagination.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A resumption point for pagination.
* </p>
*
* @return A resumption point for pagination.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A resumption point for pagination.
* </p>
*
* @param nextToken
* A resumption point for pagination.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RecentCaseCommunications withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCommunications() != null)
sb.append("Communications: " + getCommunications() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RecentCaseCommunications == false)
return false;
RecentCaseCommunications other = (RecentCaseCommunications) obj;
if (other.getCommunications() == null
^ this.getCommunications() == null)
return false;
if (other.getCommunications() != null
&& other.getCommunications().equals(this.getCommunications()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getCommunications() == null) ? 0 : getCommunications()
.hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public RecentCaseCommunications clone() {
try {
return (RecentCaseCommunications) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package com.evollu.react.fcm;
import android.app.Activity;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.content.BroadcastReceiver;
import android.content.Intent;
import android.content.IntentFilter;
import com.facebook.react.bridge.ActivityEventListener;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.ReadableMapKeySetIterator;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import com.google.firebase.iid.FirebaseInstanceId;
import com.google.firebase.messaging.FirebaseMessaging;
import com.google.firebase.messaging.RemoteMessage;
import com.google.firebase.messaging.RemoteMessage.Notification;
import android.app.Application;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.NotificationManagerCompat;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.content.Context;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import com.google.firebase.FirebaseApp;
import static android.content.Context.NOTIFICATION_SERVICE;
public class FIRMessagingModule extends ReactContextBaseJavaModule implements LifecycleEventListener, ActivityEventListener {
private final static String TAG = FIRMessagingModule.class.getCanonicalName();
private FIRLocalMessagingHelper mFIRLocalMessagingHelper;
private BadgeHelper mBadgeHelper;
public FIRMessagingModule(ReactApplicationContext reactContext) {
super(reactContext);
mFIRLocalMessagingHelper = new FIRLocalMessagingHelper((Application) reactContext.getApplicationContext());
mBadgeHelper = new BadgeHelper(reactContext.getApplicationContext());
getReactApplicationContext().addLifecycleEventListener(this);
getReactApplicationContext().addActivityEventListener(this);
registerTokenRefreshHandler();
registerMessageHandler();
registerLocalMessageHandler();
}
@Override
public String getName() {
return "RNFIRMessaging";
}
@ReactMethod
public void getInitialNotification(Promise promise){
Activity activity = getCurrentActivity();
if(activity == null){
promise.resolve(null);
return;
}
promise.resolve(parseIntent(activity.getIntent()));
}
@ReactMethod
public void requestPermissions(Promise promise){
if(NotificationManagerCompat.from(getReactApplicationContext()).areNotificationsEnabled()){
promise.resolve(true);
} else {
promise.reject(null, "Notification disabled");
}
}
@ReactMethod
public void createNotificationChannel(ReadableMap details, Promise promise){
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
NotificationManager mngr = (NotificationManager) getReactApplicationContext().getSystemService(NOTIFICATION_SERVICE);
String id = details.getString("id");
String name = details.getString("name");
String priority = details.getString("priority");
int importance;
switch(priority) {
case "min":
importance = NotificationManager.IMPORTANCE_MIN;
break;
case "low":
importance = NotificationManager.IMPORTANCE_LOW;
break;
case "high":
importance = NotificationManager.IMPORTANCE_HIGH;
break;
case "max":
importance = NotificationManager.IMPORTANCE_MAX;
break;
default:
importance = NotificationManager.IMPORTANCE_DEFAULT;
}
if (mngr.getNotificationChannel(id) != null) {
promise.resolve(null);
return;
}
//
NotificationChannel channel = new NotificationChannel(
id,
name,
importance);
// Configure the notification channel.
if(details.hasKey("description")){
channel.setDescription(details.getString("description"));
}
mngr.createNotificationChannel(channel);
}
promise.resolve(null);
}
@ReactMethod
public void deleteNotificationChannel(String id, Promise promise) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
NotificationManager mngr = (NotificationManager) getReactApplicationContext().getSystemService(NOTIFICATION_SERVICE);
mngr.deleteNotificationChannel(id);
}
promise.resolve(null);
}
@ReactMethod
public void getFCMToken(Promise promise) {
try {
Log.d(TAG, "Firebase token: " + FirebaseInstanceId.getInstance().getToken());
promise.resolve(FirebaseInstanceId.getInstance().getToken());
} catch (Throwable e) {
e.printStackTrace();
promise.reject(null,e.getMessage());
}
}
@ReactMethod
public void getEntityFCMToken(Promise promise) {
try {
String senderId = FirebaseApp.getInstance().getOptions().getGcmSenderId();
String token = FirebaseInstanceId.getInstance().getToken(senderId, "FCM");
Log.d(TAG, "Firebase token: " + token);
promise.resolve(token);
} catch (Throwable e) {
e.printStackTrace();
promise.reject(null,e.getMessage());
}
}
@ReactMethod
public void deleteEntityFCMToken(Promise promise) {
try {
String senderId = FirebaseApp.getInstance().getOptions().getGcmSenderId();
FirebaseInstanceId.getInstance().deleteToken(senderId, "FCM");
promise.resolve(null);
} catch (Throwable e) {
e.printStackTrace();
promise.reject(null,e.getMessage());
}
}
@ReactMethod
public void deleteInstanceId(Promise promise){
try {
FirebaseInstanceId.getInstance().deleteInstanceId();
promise.resolve(null);
} catch (Exception e) {
e.printStackTrace();
promise.reject(null,e.getMessage());
}
}
@ReactMethod
public void presentLocalNotification(ReadableMap details) {
Bundle bundle = Arguments.toBundle(details);
mFIRLocalMessagingHelper.sendNotification(bundle);
}
@ReactMethod
public void scheduleLocalNotification(ReadableMap details) {
Bundle bundle = Arguments.toBundle(details);
mFIRLocalMessagingHelper.sendNotificationScheduled(bundle);
}
@ReactMethod
public void cancelLocalNotification(String notificationID) {
mFIRLocalMessagingHelper.cancelLocalNotification(notificationID);
}
@ReactMethod
public void cancelAllLocalNotifications() {
mFIRLocalMessagingHelper.cancelAllLocalNotifications();
}
@ReactMethod
public void removeDeliveredNotification(String notificationID) {
mFIRLocalMessagingHelper.removeDeliveredNotification(notificationID);
}
@ReactMethod
public void removeAllDeliveredNotifications(){
mFIRLocalMessagingHelper.removeAllDeliveredNotifications();
}
@ReactMethod
public void subscribeToTopic(String topic, Promise promise){
try {
FirebaseMessaging.getInstance().subscribeToTopic(topic);
promise.resolve(null);
} catch (Exception e) {
e.printStackTrace();
promise.reject(null,e.getMessage());
}
}
@ReactMethod
public void unsubscribeFromTopic(String topic, Promise promise){
try {
FirebaseMessaging.getInstance().unsubscribeFromTopic(topic);
promise.resolve(null);
} catch (Exception e) {
e.printStackTrace();
promise.reject(null,e.getMessage());
}
}
@ReactMethod
public void getScheduledLocalNotifications(Promise promise){
ArrayList<Bundle> bundles = mFIRLocalMessagingHelper.getScheduledLocalNotifications();
WritableArray array = Arguments.createArray();
for(Bundle bundle:bundles){
array.pushMap(Arguments.fromBundle(bundle));
}
promise.resolve(array);
}
@ReactMethod
public void setBadgeNumber(int badgeNumber) {
mBadgeHelper.setBadgeCount(badgeNumber);
}
@ReactMethod
public void getBadgeNumber(Promise promise) {
promise.resolve(mBadgeHelper.getBadgeCount());
}
private void sendEvent(String eventName, Object params) {
getReactApplicationContext()
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, params);
}
private void registerTokenRefreshHandler() {
IntentFilter intentFilter = new IntentFilter("com.evollu.react.fcm.FCMRefreshToken");
LocalBroadcastManager.getInstance(getReactApplicationContext()).registerReceiver(new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (getReactApplicationContext().hasActiveCatalystInstance()) {
String token = intent.getStringExtra("token");
sendEvent("FCMTokenRefreshed", token);
}
}
}, intentFilter);
}
@ReactMethod
public void send(String senderId, ReadableMap payload) throws Exception {
FirebaseMessaging fm = FirebaseMessaging.getInstance();
RemoteMessage.Builder message = new RemoteMessage.Builder(senderId + "@gcm.googleapis.com")
.setMessageId(UUID.randomUUID().toString());
ReadableMapKeySetIterator iterator = payload.keySetIterator();
while (iterator.hasNextKey()) {
String key = iterator.nextKey();
String value = getStringFromReadableMap(payload, key);
message.addData(key, value);
}
fm.send(message.build());
}
private String getStringFromReadableMap(ReadableMap map, String key) throws Exception {
switch (map.getType(key)) {
case String:
return map.getString(key);
case Number:
try {
return String.valueOf(map.getInt(key));
} catch (Exception e) {
return String.valueOf(map.getDouble(key));
}
case Boolean:
return String.valueOf(map.getBoolean(key));
default:
throw new Exception("Unknown data type: " + map.getType(key).name() + " for message key " + key );
}
}
private void registerMessageHandler() {
IntentFilter intentFilter = new IntentFilter("com.evollu.react.fcm.ReceiveNotification");
LocalBroadcastManager.getInstance(getReactApplicationContext()).registerReceiver(new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (getReactApplicationContext().hasActiveCatalystInstance()) {
RemoteMessage message = intent.getParcelableExtra("data");
WritableMap params = Arguments.createMap();
WritableMap fcmData = Arguments.createMap();
if (message.getNotification() != null) {
Notification notification = message.getNotification();
fcmData.putString("title", notification.getTitle());
fcmData.putString("body", notification.getBody());
fcmData.putString("color", notification.getColor());
fcmData.putString("icon", notification.getIcon());
fcmData.putString("tag", notification.getTag());
fcmData.putString("action", notification.getClickAction());
}
params.putMap("fcm", fcmData);
params.putString("collapse_key", message.getCollapseKey());
params.putString("from", message.getFrom());
params.putString("google.message_id", message.getMessageId());
params.putDouble("google.sent_time", message.getSentTime());
if(message.getData() != null){
Map<String, String> data = message.getData();
Set<String> keysIterator = data.keySet();
for(String key: keysIterator){
params.putString(key, data.get(key));
}
}
sendEvent("FCMNotificationReceived", params);
}
}
}, intentFilter);
}
private void registerLocalMessageHandler() {
IntentFilter intentFilter = new IntentFilter("com.evollu.react.fcm.ReceiveLocalNotification");
LocalBroadcastManager.getInstance(getReactApplicationContext()).registerReceiver(new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (getReactApplicationContext().hasActiveCatalystInstance()) {
sendEvent("FCMNotificationReceived", Arguments.fromBundle(intent.getExtras()));
}
}
}, intentFilter);
}
private WritableMap parseIntent(Intent intent){
WritableMap params;
Bundle extras = intent.getExtras();
if (extras != null) {
try {
params = Arguments.fromBundle(extras);
} catch (Exception e){
Log.e(TAG, e.getMessage());
params = Arguments.createMap();
}
} else {
params = Arguments.createMap();
}
WritableMap fcm = Arguments.createMap();
fcm.putString("action", intent.getAction());
params.putMap("fcm", fcm);
params.putInt("opened_from_tray", 1);
return params;
}
@Override
public void onHostResume() {
mFIRLocalMessagingHelper.setApplicationForeground(true);
}
@Override
public void onHostPause() {
mFIRLocalMessagingHelper.setApplicationForeground(false);
}
@Override
public void onHostDestroy() {
}
@Override
public void onActivityResult(Activity activity, int requestCode, int resultCode, Intent data) {
}
@Override
public void onNewIntent(Intent intent){
sendEvent("FCMNotificationReceived", parseIntent(intent));
}
}
| |
package com.jayantkrish.jklol.ccg.supertag;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import com.jayantkrish.jklol.ccg.CcgExample;
import com.jayantkrish.jklol.ccg.HeadedSyntacticCategory;
import com.jayantkrish.jklol.ccg.SyntacticCategory;
import com.jayantkrish.jklol.ccg.cli.TrainSyntacticCcgParser;
import com.jayantkrish.jklol.cli.AbstractCli;
import com.jayantkrish.jklol.evaluation.Example;
import com.jayantkrish.jklol.models.DiscreteVariable;
import com.jayantkrish.jklol.models.TableFactor;
import com.jayantkrish.jklol.models.TableFactorBuilder;
import com.jayantkrish.jklol.models.VariableNumMap;
import com.jayantkrish.jklol.models.dynamic.DynamicAssignment;
import com.jayantkrish.jklol.models.parametric.ParametricFactorGraph;
import com.jayantkrish.jklol.pos.WordPrefixSuffixFeatureGenerator;
import com.jayantkrish.jklol.preprocessing.DictionaryFeatureVectorGenerator;
import com.jayantkrish.jklol.preprocessing.FeatureGenerator;
import com.jayantkrish.jklol.preprocessing.FeatureGenerators;
import com.jayantkrish.jklol.preprocessing.FeatureVectorGenerator;
import com.jayantkrish.jklol.sequence.ConvertingLocalContext;
import com.jayantkrish.jklol.sequence.FactorGraphSequenceTagger;
import com.jayantkrish.jklol.sequence.ListTaggedSequence;
import com.jayantkrish.jklol.sequence.LocalContext;
import com.jayantkrish.jklol.sequence.TaggedSequence;
import com.jayantkrish.jklol.sequence.TaggerUtils;
import com.jayantkrish.jklol.tensor.DenseTensor;
import com.jayantkrish.jklol.tensor.SparseTensorBuilder;
import com.jayantkrish.jklol.tensor.Tensor;
import com.jayantkrish.jklol.training.GradientOptimizer;
import com.jayantkrish.jklol.util.CountAccumulator;
import com.jayantkrish.jklol.util.IndexedList;
import com.jayantkrish.jklol.util.IoUtils;
import com.jayantkrish.jklol.util.PairCountAccumulator;
/**
* Trains a CCG supertagger. The supertagger takes as input a
* POS-tagged sentence, and predicts a sequence of CCG syntactic
* categories.
*
* @author jayantk
*/
public class TrainSupertagger extends AbstractCli {
private OptionSpec<String> trainingFilename;
private OptionSpec<String> modelOutput;
private OptionSpec<String> syntaxMap;
// Model construction options.
private OptionSpec<Void> noTransitions;
private OptionSpec<Void> locallyNormalized;
private OptionSpec<Void> maxMargin;
private OptionSpec<Integer> commonWordCountThreshold;
private OptionSpec<Integer> labelRestrictionCountThreshold;
private OptionSpec<Integer> posContextFeatureCountThreshold;
private OptionSpec<Integer> prefixSuffixFeatureCountThreshold;
private OptionSpec<String> wordEmbeddingFeatures;
private OptionSpec<Void> usePosWithEmbedding;
private static final String UNK_PREFIX = "UNK-";
private static final String EMBEDDING_UNKNOWN_WORD = "*UNKNOWN*";
public TrainSupertagger() {
super(CommonOptions.STOCHASTIC_GRADIENT, CommonOptions.LBFGS, CommonOptions.MAP_REDUCE);
}
@Override
public void initializeOptions(OptionParser parser) {
trainingFilename = parser.accepts("training").withRequiredArg()
.ofType(String.class).required();
modelOutput = parser.accepts("output").withRequiredArg().ofType(String.class).required();
syntaxMap = parser.accepts("syntaxMap").withRequiredArg().ofType(String.class).required();
noTransitions = parser.accepts("noTransitions");
locallyNormalized = parser.accepts("locallyNormalized");
maxMargin = parser.accepts("maxMargin");
commonWordCountThreshold = parser.accepts("commonWordThreshold").withRequiredArg()
.ofType(Integer.class).defaultsTo(0); // old value: 5
labelRestrictionCountThreshold = parser.accepts("labelRestrictionThreshold").withRequiredArg()
.ofType(Integer.class).defaultsTo(Integer.MAX_VALUE); // old value: 20
posContextFeatureCountThreshold = parser.accepts("posContextFeatureCountThreshold").withRequiredArg()
.ofType(Integer.class).defaultsTo(0); // old value: 30
prefixSuffixFeatureCountThreshold = parser.accepts("prefixSuffixThreshold").withRequiredArg()
.ofType(Integer.class).defaultsTo(0); // old values: 10, 35
wordEmbeddingFeatures = parser.accepts("wordEmbeddingFeatures").withRequiredArg().ofType(String.class);
usePosWithEmbedding = parser.accepts("usePosWithEmbedding");
}
@Override
public void run(OptionSet options) {
// Read in the training data as sentences, to use for
// feature generation.
System.out.println("Reading training data...");
List<CcgExample> ccgExamples = TrainSyntacticCcgParser.readTrainingData(
options.valueOf(trainingFilename), true, true, options.valueOf(syntaxMap));
System.out.println("Reformatting training data...");
List<TaggedSequence<WordAndPos, HeadedSyntacticCategory>> trainingData =
reformatTrainingExamples(ccgExamples, true);
Map<String, Tensor> wordEmbeddings = null;
if (options.has(wordEmbeddingFeatures)) {
wordEmbeddings = readWordVectors(options.valueOf(wordEmbeddingFeatures));
}
System.out.println("Generating features...");
FeatureVectorGenerator<LocalContext<WordAndPos>> featureGen =
buildFeatureVectorGenerator(TaggerUtils.extractContextsFromData(trainingData), wordEmbeddings,
options.valueOf(commonWordCountThreshold), options.valueOf(posContextFeatureCountThreshold),
options.valueOf(prefixSuffixFeatureCountThreshold), options.has(usePosWithEmbedding));
System.out.println(featureGen.getNumberOfFeatures() + " features per CCG category.");
System.out.println("Generating label restrictions...");
WordAndPos startInput = null;
HeadedSyntacticCategory startLabel = null;
if (options.has(locallyNormalized)) {
startInput = new WordAndPos("<START>", "<START>");
startLabel = HeadedSyntacticCategory.parseFrom("START{0}");
}
TableFactor labelRestrictions = getLabelRestrictions(trainingData, options.valueOf(labelRestrictionCountThreshold),
startInput, startLabel);
DiscreteVariable inputVariable = (DiscreteVariable) labelRestrictions.getVars().getVariable(0);
DiscreteVariable labelVariable = (DiscreteVariable) labelRestrictions.getVars().getVariable(1);
ParametricFactorGraph sequenceModelFamily = TaggerUtils.buildFeaturizedSequenceModel(
inputVariable, labelVariable, featureGen.getFeatureDictionary(), labelRestrictions.getWeights(),
options.has(noTransitions), options.has(locallyNormalized));
GradientOptimizer trainer = createGradientOptimizer(trainingData.size());
Function<LocalContext<WordAndPos>, String> inputGen = new WordAndPosToInput(inputVariable);
// Reformat the training examples to be suitable for training
// a factor graph.
System.out.println("Reformatting training data...");
List<Example<DynamicAssignment, DynamicAssignment>> examples = null;
if (options.has(locallyNormalized)) {
examples = TaggerUtils.reformatTrainingDataPerItem(trainingData, featureGen, inputGen,
sequenceModelFamily.getVariables(), startInput, startLabel);
} else {
examples = TaggerUtils.reformatTrainingData(trainingData, featureGen, inputGen,
sequenceModelFamily.getVariables(), startInput, startLabel);
}
FactorGraphSequenceTagger<WordAndPos, HeadedSyntacticCategory> tagger = TaggerUtils.trainSequenceModel(
sequenceModelFamily, examples, HeadedSyntacticCategory.class, featureGen, inputGen, startInput,
startLabel, trainer, options.has(maxMargin));
// Save model to disk.
System.out.println("Serializing trained model...");
FactorGraphSupertagger supertagger = new FactorGraphSupertagger(tagger.getModelFamily(),
tagger.getParameters(), tagger.getInstantiatedModel(), tagger.getFeatureGenerator(),
tagger.getInputGenerator(), tagger.getMaxMarginalCalculator(), tagger.getMarginalCalculator(),
tagger.getStartInput(), tagger.getStartLabel());
IoUtils.serializeObjectToFile(supertagger, options.valueOf(modelOutput));
}
private static Map<String, Tensor> readWordVectors(String filename) {
Map<String, Tensor> tensorMap = Maps.newHashMap();
int[] dims = new int[] {0};
int[] sizes = null;
for (String line : IoUtils.readLines(filename)) {
String[] parts = line.split("\\s");
String word = parts[0];
double[] values = new double[parts.length - 1];
if (sizes == null) {
sizes = new int[] {parts.length - 1};
}
for (int i = 1; i < parts.length; i++) {
values[i - 1] = Double.parseDouble(parts[i]);
}
Tensor tensor = new DenseTensor(dims, sizes, values);
tensorMap.put(word, tensor);
}
return tensorMap;
}
/**
* Converts {@code ccgExamples} into word sequences tagged with
* syntactic categories.
*
* @param ccgExamples
* @param ignoreInvalid
* @param addStartSymbol
* @return
*/
public static List<TaggedSequence<WordAndPos, HeadedSyntacticCategory>> reformatTrainingExamples(
Collection<CcgExample> ccgExamples, boolean ignoreInvalid) {
List<TaggedSequence<WordAndPos, HeadedSyntacticCategory>> examples = Lists.newArrayList();
for (CcgExample example : ccgExamples) {
Preconditions.checkArgument(example.hasSyntacticParse());
List<WordAndPos> taggedWords = WordAndPos.createExample(example.getSentence().getWords(),
example.getSentence().getPosTags());
List<HeadedSyntacticCategory> syntacticCategories = example.getSyntacticParse()
.getAllSpannedHeadedSyntacticCategories();
if (!ignoreInvalid || !syntacticCategories.contains(null)) {
examples.add(new ListTaggedSequence<WordAndPos, HeadedSyntacticCategory>(taggedWords, syntacticCategories));
} else {
List<SyntacticCategory> unheadedCategories = example.getSyntacticParse().getAllSpannedLexiconEntries();
System.out.println("Discarding sentence: " + taggedWords);
for (int i = 0; i < taggedWords.size(); i++) {
if (syntacticCategories.get(i) == null) {
System.out.println("No headed syntactic category for: " + taggedWords.get(i) + " " + unheadedCategories.get(i));
}
}
}
}
return examples;
}
public static FeatureVectorGenerator<LocalContext<WordAndPos>> buildFeatureVectorGenerator(
List<LocalContext<WordAndPos>> contexts, Map<String, Tensor> wordEmbeddings,
int commonWordCountThreshold, int posContextCountThreshold, int prefixSuffixCountThreshold,
boolean usePosWithEmbedding) {
CountAccumulator<String> wordCounts = CountAccumulator.create();
for (LocalContext<WordAndPos> context : contexts) {
wordCounts.increment(context.getItem().getWord(), 1.0);
}
Set<String> commonWords = Sets.newHashSet(wordCounts.getKeysAboveCountThreshold(
commonWordCountThreshold));
// Build a dictionary of words and POS tags which occur frequently
// enough in the data set.
FeatureGenerator<LocalContext<WordAndPos>, String> wordGen = new
WordAndPosFeatureGenerator(new int[] { -2, -1, 0, 1, 2 }, commonWords);
CountAccumulator<String> wordPosFeatureCounts = FeatureGenerators.getFeatureCounts(wordGen, contexts);
FeatureGenerator<LocalContext<WordAndPos>, String> posContextGen = new
PosContextFeatureGenerator(new int[][] {{-2}, {-1}, {0}, {1}, {2}, {-1, 0}, {0, 1},
{-2, -1, 0}, {-1, 0, 1}, {0, 1, 2}, {-2, 0}, {-1, 1}, {0, 2}});
CountAccumulator<String> posContextFeatureCounts = FeatureGenerators.getFeatureCounts(posContextGen, contexts);
// Generate prefix/suffix features for common prefixes and suffixes.
FeatureGenerator<LocalContext<WordAndPos>, String> prefixGen =
FeatureGenerators.convertingFeatureGenerator(new WordPrefixSuffixFeatureGenerator(1, 1, 2, 5, commonWords),
new WordAndPosContextToWordContext());
// Count feature occurrences and discard infrequent features.
CountAccumulator<String> prefixFeatureCounts = FeatureGenerators.getFeatureCounts(prefixGen, contexts);
IndexedList<String> featureDictionary = IndexedList.create();
List<FeatureGenerator<LocalContext<WordAndPos>, String>> featureGenerators = Lists.newArrayList();
featureGenerators.add(wordGen);
featureGenerators.add(posContextGen);
featureGenerators.add(prefixGen);
Set<String> frequentWordFeatures = wordPosFeatureCounts.getKeysAboveCountThreshold(commonWordCountThreshold - 1);
Set<String> frequentContextFeatures = posContextFeatureCounts.getKeysAboveCountThreshold(posContextCountThreshold - 1);
Set<String> frequentPrefixFeatures = prefixFeatureCounts.getKeysAboveCountThreshold(prefixSuffixCountThreshold - 1);
featureDictionary.addAll(frequentWordFeatures);
featureDictionary.addAll(frequentContextFeatures);
featureDictionary.addAll(frequentPrefixFeatures);
System.out.println(frequentWordFeatures.size() + " word and POS features");
System.out.println(frequentContextFeatures.size() + " POS context features");
System.out.println(frequentPrefixFeatures.size() + " prefix/suffix features");
if (wordEmbeddings != null) {
EmbeddingFeatureGenerator embeddingFeatureGenerator = new EmbeddingFeatureGenerator(
wordEmbeddings, EMBEDDING_UNKNOWN_WORD, usePosWithEmbedding);
featureGenerators.add(embeddingFeatureGenerator);
CountAccumulator<String> embeddingFeatureCounts = FeatureGenerators.getFeatureCounts(embeddingFeatureGenerator, contexts);
Set<String> embeddingFeatures = embeddingFeatureCounts.keySet();
System.out.println(embeddingFeatures.size() + " word embedding features");
featureDictionary.addAll(embeddingFeatures);
}
FeatureGenerator<LocalContext<WordAndPos>, String> featureGen = FeatureGenerators
.<LocalContext<WordAndPos>, String>combinedFeatureGenerator(featureGenerators);
return new DictionaryFeatureVectorGenerator<LocalContext<WordAndPos>, String>(
featureDictionary, featureGen, true);
}
private static TableFactor getLabelRestrictions(List<TaggedSequence<WordAndPos, HeadedSyntacticCategory>> trainingData,
int minWordCount, WordAndPos startInput, HeadedSyntacticCategory startLabel) {
PairCountAccumulator<String, HeadedSyntacticCategory> wordCategoryCounts = PairCountAccumulator.create();
PairCountAccumulator<String, HeadedSyntacticCategory> posCategoryCounts = PairCountAccumulator.create();
Set<HeadedSyntacticCategory> validCategories = Sets.newHashSet();
// Count cooccurrences between words/POS-tags and their labels.
for (TaggedSequence<WordAndPos, HeadedSyntacticCategory> seq : trainingData) {
List<WordAndPos> items = seq.getItems();
List<HeadedSyntacticCategory> labels = seq.getLabels();
for (int i = 0; i < items.size(); i++) {
wordCategoryCounts.incrementOutcome(items.get(i).getWord(), labels.get(i), 1.0);
posCategoryCounts.incrementOutcome(items.get(i).getPos(), labels.get(i), 1.0);
}
validCategories.addAll(labels);
}
if (startLabel != null) {
validCategories.add(startLabel);
}
System.out.println(validCategories.size() + " CCG categories");
Set<String> inputSet = Sets.newHashSet();
for (String word : wordCategoryCounts.keySet()) {
if (wordCategoryCounts.getTotalCount(word) >= minWordCount) {
inputSet.add(word);
}
}
if (startInput != null) {
inputSet.add(startInput.getWord());
}
System.out.println(inputSet.size() + " words with count >= " + minWordCount);
for (String pos : posCategoryCounts.keySet()) {
inputSet.add(UNK_PREFIX + pos);
}
DiscreteVariable inputVariable = new DiscreteVariable("input", inputSet);
DiscreteVariable labelVariable = new DiscreteVariable("labels", validCategories);
VariableNumMap inputLabelVars = new VariableNumMap(Ints.asList(0, 1),
Lists.newArrayList("input", "label"), Lists.newArrayList(inputVariable, labelVariable));
TableFactorBuilder builder = new TableFactorBuilder(inputLabelVars, SparseTensorBuilder.getFactory());
for (String word : wordCategoryCounts.keySet()) {
if (wordCategoryCounts.getTotalCount(word) >= minWordCount) {
for (HeadedSyntacticCategory cat : wordCategoryCounts.getValues(word)) {
builder.setWeight(1.0, word, cat);
}
}
}
for (String pos : posCategoryCounts.keySet()) {
for (HeadedSyntacticCategory cat : posCategoryCounts.getValues(pos)) {
builder.setWeight(1.0, UNK_PREFIX + pos, cat);
}
}
if (startInput != null) {
builder.setWeight(1.0, startInput.getWord(), startLabel);
}
return builder.build();
}
public static void main(String[] args) {
new TrainSupertagger().run(args);
}
private static class WordAndPosToWord implements Function<WordAndPos, String>, Serializable {
private static final long serialVersionUID = 1L;
@Override
public String apply(WordAndPos wordAndPos) {
return wordAndPos.getWord();
}
}
private static class WordAndPosToInput implements Function<LocalContext<WordAndPos>, String>, Serializable {
private static final long serialVersionUID = 1L;
private final DiscreteVariable inputVar;
public WordAndPosToInput(DiscreteVariable inputVar) {
this.inputVar = Preconditions.checkNotNull(inputVar);
}
@Override
public String apply(LocalContext<WordAndPos> wordAndPos) {
if (inputVar.canTakeValue(wordAndPos.getItem().getWord())) {
return wordAndPos.getItem().getWord();
} else {
return UNK_PREFIX + wordAndPos.getItem().getPos();
}
}
}
private static class WordAndPosContextToWordContext implements Function<LocalContext<WordAndPos>, LocalContext<String>>, Serializable {
private static final long serialVersionUID = 1L;
private final Function<WordAndPos, String> wordPosConverter;
public WordAndPosContextToWordContext() {
this.wordPosConverter = new WordAndPosToWord();
}
@Override
public LocalContext<String> apply(LocalContext<WordAndPos> context) {
return new ConvertingLocalContext<WordAndPos, String>(context, wordPosConverter);
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl.quickfix;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.controlFlow.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
public class DeferFinalAssignmentFix implements IntentionAction {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.quickfix.DeferFinalAssignmentFix");
private final PsiVariable variable;
private final PsiReferenceExpression expression;
public DeferFinalAssignmentFix(@NotNull PsiVariable variable, @NotNull PsiReferenceExpression expression) {
this.variable = variable;
this.expression = expression;
}
@Override
@NotNull
public String getFamilyName() {
return QuickFixBundle.message("defer.final.assignment.with.temp.family");
}
@Override
@NotNull
public String getText() {
return QuickFixBundle.message("defer.final.assignment.with.temp.text", variable.getName());
}
@Override
public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
if (!FileModificationService.getInstance().prepareFileForWrite(variable.getContainingFile())) return;
if (variable instanceof PsiField) {
deferField((PsiField)variable);
}
else {
deferLocalVariable((PsiLocalVariable)variable);
}
}
private void deferField(PsiField field) throws IncorrectOperationException {
PsiCodeBlock codeBlock = getEnclosingCodeBlock(field, expression);
if (codeBlock == null) return;
deferVariable(codeBlock, field, null);
}
private static PsiCodeBlock getEnclosingCodeBlock(PsiField field, PsiElement element) {
PsiClass aClass = field.getContainingClass();
if (aClass == null) return null;
PsiMethod[] constructors = aClass.getConstructors();
for (PsiMethod constructor : constructors) {
PsiCodeBlock body = constructor.getBody();
if (body == null) continue;
if (PsiTreeUtil.isAncestor(body, element, true)) return body;
}
//maybe inside class initalizer ?
PsiClassInitializer[] initializers = aClass.getInitializers();
for (PsiClassInitializer initializer : initializers) {
PsiCodeBlock body = initializer.getBody();
if (PsiTreeUtil.isAncestor(body, element, true)) return body;
}
return null;
}
private void deferLocalVariable(PsiLocalVariable variable) throws IncorrectOperationException {
PsiElement outerCodeBlock = PsiUtil.getVariableCodeBlock(variable, null);
deferVariable(outerCodeBlock, variable, variable.getParent());
}
private void deferVariable(PsiElement outerCodeBlock, PsiVariable variable, PsiElement tempDeclarationAnchor) throws IncorrectOperationException {
if (outerCodeBlock == null) return;
List<PsiReferenceExpression> outerReferences = new ArrayList<>();
collectReferences(outerCodeBlock, variable, outerReferences);
PsiElementFactory factory = JavaPsiFacade.getInstance(variable.getProject()).getElementFactory();
Project project = variable.getProject();
String tempName = suggestNewName(project, variable);
PsiDeclarationStatement tempVariableDeclaration = factory.createVariableDeclarationStatement(tempName, variable.getType(), null);
ControlFlow controlFlow;
try {
controlFlow = ControlFlowFactory.getInstance(project).getControlFlow(outerCodeBlock, LocalsOrMyInstanceFieldsControlFlowPolicy.getInstance(), false);
}
catch (AnalysisCanceledException e) {
return;
}
int minOffset = 0;
boolean writeReferenceOccurred = false;
PsiReferenceExpression writeReference = null;
for (int i = outerReferences.size()-1; i>=0; i--) {
PsiReferenceExpression reference = outerReferences.get(i);
if (!writeReferenceOccurred && !PsiUtil.isAccessedForWriting(reference)) {
// trailing read references need not be converted to temp var references
outerReferences.remove(i);
continue;
}
writeReferenceOccurred = true;
writeReference = reference;
PsiElement element = PsiUtil.getEnclosingStatement(reference);
int endOffset = element == null ? -1 : controlFlow.getEndOffset(element);
minOffset = Math.max(minOffset, endOffset);
}
LOG.assertTrue(writeReference != null);
PsiStatement finalAssignment = factory.createStatementFromText(writeReference.getText()+" = "+tempName+";", outerCodeBlock);
if (!insertToDefinitelyReachedPlace(outerCodeBlock, finalAssignment, controlFlow, minOffset, outerReferences)) return;
outerCodeBlock.addAfter(tempVariableDeclaration, tempDeclarationAnchor);
replaceReferences(outerReferences, factory.createExpressionFromText(tempName, outerCodeBlock));
}
private static boolean insertToDefinitelyReachedPlace(PsiElement codeBlock,
PsiStatement finalAssignment,
ControlFlow controlFlow,
int minOffset,
List references) throws IncorrectOperationException {
int offset = ControlFlowUtil.getMinDefinitelyReachedOffset(controlFlow, minOffset, references);
if (offset == controlFlow.getSize()) {
codeBlock.add(finalAssignment);
return true;
}
PsiElement element = null; //controlFlow.getEndOffset(codeBlock) == offset ? getEnclosingStatement(controlFlow.getElement(offset)) : null;
while (offset < controlFlow.getSize()) {
element = controlFlow.getElement(offset);
while (element != null) {
if (element.getParent() == codeBlock) break;
element = element.getParent();
}
int startOffset = controlFlow.getStartOffset(element);
if (startOffset != -1 && startOffset >= minOffset && element instanceof PsiStatement) break;
offset++;
}
if (!(offset < controlFlow.getSize())) return false;
// inside loop
if (ControlFlowUtil.isInstructionReachable(controlFlow, offset, offset)) return false;
codeBlock.addBefore(finalAssignment, element);
return true;
}
private static void replaceReferences(List references, PsiElement newExpression) throws IncorrectOperationException {
for (Object reference1 : references) {
PsiElement reference = (PsiElement)reference1;
reference.replace(newExpression);
}
}
private static void collectReferences(PsiElement context, final PsiVariable variable, final List<PsiReferenceExpression> references) {
context.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
if (expression.resolve() == variable) references.add(expression);
super.visitReferenceExpression(expression);
}
});
}
private static String suggestNewName(Project project, PsiVariable variable) {
// new name should not conflict with another variable at the variable declaration level and usage level
String name = variable.getName();
// trim last digit to suggest variable names like i1,i2, i3...
if (name.length() > 1 && Character.isDigit(name.charAt(name.length()-1))) {
name = name.substring(0,name.length()-1);
}
return JavaCodeStyleManager.getInstance(project).suggestUniqueVariableName(name, variable, true);
}
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
return
variable.isValid() &&
!(variable instanceof PsiParameter) &&
!(variable instanceof ImplicitVariable) &&
expression.isValid() &&
variable.getManager().isInProject(variable)
;
}
@Override
public boolean startInWriteAction() {
return true;
}
}
| |
package com.stripe.model;
import com.stripe.exception.APIConnectionException;
import com.stripe.exception.APIException;
import com.stripe.exception.AuthenticationException;
import com.stripe.exception.CardException;
import com.stripe.exception.InvalidRequestException;
import com.stripe.net.APIResource;
import com.stripe.net.RequestOptions;
import java.util.Map;
public class Invoice extends APIResource implements MetadataStore<Invoice>{
Integer subtotal;
Integer total;
Integer amountDue;
Integer startingBalance;
Integer endingBalance;
String id;
Long created;
Long nextPaymentAttempt;
Boolean attempted;
String charge;
String description;
Boolean closed;
String customer;
Long date;
Boolean paid;
Long periodStart;
Long periodEnd;
Discount discount;
InvoiceLineItemCollection lines;
Boolean livemode;
Integer attemptCount;
String currency;
String subscription;
Long applicationFee;
Map<String, String> metadata;
Boolean forgiven;
String statementDescriptor;
Integer tax;
Double taxPercent;
public Integer getSubtotal() {
return subtotal;
}
public void setSubtotal(Integer subtotal) {
this.subtotal = subtotal;
}
public Integer getTotal() {
return total;
}
public void setTotal(Integer total) {
this.total = total;
}
public Integer getAmountDue() {
return amountDue;
}
public void setAmountDue(Integer amountDue) {
this.amountDue = amountDue;
}
public Integer getStartingBalance() {
return startingBalance;
}
public void setStartingBalance(Integer startingBalance) {
this.startingBalance = startingBalance;
}
public Integer getEndingBalance() {
return endingBalance;
}
public void setEndingBalance(Integer endingBalance) {
this.endingBalance = endingBalance;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Long getCreated() {
return created;
}
public void setCreated(Long created) {
this.created = created;
}
public Long getNextPaymentAttempt() {
return nextPaymentAttempt;
}
public void setNextPaymentAttempt(Long nextPaymentAttempt) {
this.nextPaymentAttempt = nextPaymentAttempt;
}
public Boolean getAttempted() {
return attempted;
}
public void setAttempted(Boolean attempted) {
this.attempted = attempted;
}
public String getCharge() {
return charge;
}
public void setCharge(String charge) {
this.charge = charge;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Boolean getClosed() {
return closed;
}
public void setClosed(Boolean closed) {
this.closed = closed;
}
public String getCustomer() {
return customer;
}
public void setCustomer(String customer) {
this.customer = customer;
}
public Long getDate() {
return date;
}
public void setDate(Long date) {
this.date = date;
}
public Boolean getPaid() {
return paid;
}
public void setPaid(Boolean paid) {
this.paid = paid;
}
public Boolean getForgiven() {
return forgiven;
}
public void setForgiven(Boolean forgiven) {
this.forgiven = forgiven;
}
public Long getPeriodStart() {
return periodStart;
}
public void setPeriodStart(Long periodStart) {
this.periodStart = periodStart;
}
public Long getPeriodEnd() {
return periodEnd;
}
public void setPeriodEnd(Long periodEnd) {
this.periodEnd = periodEnd;
}
public Discount getDiscount() {
return discount;
}
public void setDiscount(Discount discount) {
this.discount = discount;
}
public InvoiceLineItemCollection getLines() {
return lines;
}
public Boolean getLivemode() {
return livemode;
}
public void setLivemode(Boolean livemode) {
this.livemode = livemode;
}
public Integer getAttemptCount() {
return attemptCount;
}
public void setAttemptCount(Integer attemptCount) {
this.attemptCount = attemptCount;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
public String getSubscription() {
return subscription;
}
public void setSubscription(String subscription) {
this.subscription = subscription;
}
public Long getApplicationFee() {
return applicationFee;
}
public void setApplicationFee(Long applicationFee) {
this.applicationFee = applicationFee;
}
public String getStatementDescriptor() {
return statementDescriptor;
}
public void setStatementDescriptor(String statementDescriptor) {
this.statementDescriptor = statementDescriptor;
}
public Integer getTax() {
return tax;
}
public void setTax(Integer tax) {
this.tax = tax;
}
public Double getTaxPercent() {
return taxPercent;
}
public void setTaxPercent(Double taxPercent) {
this.taxPercent = taxPercent;
}
public static Invoice retrieve(String id) throws AuthenticationException,
InvalidRequestException, APIConnectionException, CardException,
APIException {
return retrieve(id, (RequestOptions) null);
}
public static Invoice create(Map<String, Object> params)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return create(params, (RequestOptions) null);
}
public static InvoiceCollection all(Map<String, Object> params)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return all(params, (RequestOptions) null);
}
public static Invoice upcoming(Map<String, Object> params)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return upcoming(params, (RequestOptions) null);
}
public Invoice pay() throws AuthenticationException,
InvalidRequestException, APIConnectionException, CardException,
APIException {
return this.pay((RequestOptions) null);
}
public Invoice update(Map<String, Object> params)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return update(params, (RequestOptions) null);
}
@Deprecated
public static Invoice retrieve(String id, String apiKey)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return retrieve(id, RequestOptions.builder().setApiKey(apiKey).build());
}
public static Invoice retrieve(String id, RequestOptions options)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return request(RequestMethod.GET, instanceURL(Invoice.class, id), null, Invoice.class, options);
}
@Deprecated
public static Invoice create(Map<String, Object> params, String apiKey)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return create(params, RequestOptions.builder().setApiKey(apiKey).build());
}
public static Invoice create(Map<String, Object> params, RequestOptions options)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return request(RequestMethod.POST, classURL(Invoice.class), params, Invoice.class, options);
}
@Deprecated
public static Invoice upcoming(Map<String, Object> params, String apiKey)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return upcoming(params, RequestOptions.builder().setApiKey(apiKey).build());
}
public static Invoice upcoming(Map<String, Object> params, RequestOptions options)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return request(RequestMethod.GET, String.format("%s/upcoming", classURL(Invoice.class)), params, Invoice.class, options);
}
@Deprecated
public static InvoiceCollection all(Map<String, Object> params,
String apiKey) throws AuthenticationException,
InvalidRequestException, APIConnectionException, CardException,
APIException {
return all(params, RequestOptions.builder().setApiKey(apiKey).build());
}
public static InvoiceCollection all(Map<String, Object> params,
RequestOptions options) throws AuthenticationException,
InvalidRequestException, APIConnectionException, CardException,
APIException {
return request(RequestMethod.GET, classURL(Invoice.class), params, InvoiceCollection.class, options);
}
@Deprecated
public Invoice update(Map<String, Object> params, String apiKey)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return update(params, RequestOptions.builder().setApiKey(apiKey).build());
}
public Invoice update(Map<String, Object> params, RequestOptions options)
throws AuthenticationException, InvalidRequestException,
APIConnectionException, CardException, APIException {
return request(RequestMethod.POST, instanceURL(Invoice.class, this.id), params, Invoice.class, options);
}
@Deprecated
public Invoice pay(String apiKey) throws AuthenticationException,
InvalidRequestException, APIConnectionException, CardException,
APIException {
return pay(RequestOptions.builder().setApiKey(apiKey).build());
}
public Invoice pay(RequestOptions options) throws AuthenticationException,
InvalidRequestException, APIConnectionException, CardException,
APIException {
return request(RequestMethod.POST, String.format("%s/pay",
instanceURL(Invoice.class, this.getId())), null, Invoice.class, options);
}
public Map<String, String> getMetadata() {
return metadata;
}
public void setMetadata(Map<String, String> metadata) {
this.metadata = metadata;
}
}
| |
package com.pardot.rhombus;
import com.google.common.collect.Range;
import com.pardot.rhombus.cobject.shardingstrategy.*;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
/**
* Pardot, An ExactTarget Company
* User: robrighter
* Date: 4/16/13
*/
public class ShardStrategyTest extends TestCase {
/**
* Create the test case
*
* @param testName name of the test case
*/
public ShardStrategyTest(String testName) {
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite() {
return new TestSuite( ShardStrategyTest.class );
}
public void testShardingStrategyHourly() throws ShardStrategyException {
ShardingStrategyHourly subject = new ShardingStrategyHourly();
DateTime d = new DateTime(2000,1,1,0,0,0, DateTimeZone.UTC);
long actual = subject.getShardKey(d.getMillis());
// 0 based, but shard ids are relative so it doesn't really matter
assertEquals("Should generate correct shard key for date", 0, actual);
d = new DateTime(2006,1,1,1,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date given offset", 52609, actual);
//test with offset
subject = new ShardingStrategyHourly();
subject.setOffset(20);
d = new DateTime(2000,1,10,0,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
// 0 based, doesn't really matter
assertEquals("Should generate correct shard key for date", 236, actual);
//test the range bounded
subject = new ShardingStrategyHourly();
DateTime d1 = new DateTime(2013,2,22,2,0,0, DateTimeZone.UTC); //115226
DateTime d2 = new DateTime(2014,2,22,2,0,0, DateTimeZone.UTC); //123986
Range<Long> range = subject.getShardKeyRange(d1.getMillis(),d2.getMillis());
assertEquals("Range should have appropriate start point", 115226L, range.lowerEndpoint().longValue());
assertEquals("Range should have appropriate end point", 123986L, range.upperEndpoint().longValue());
//test range auto bounded
subject = new ShardingStrategyHourly();
d1 = new DateTime(2011,2,22,2,0,0, DateTimeZone.UTC); //97682
range = subject.getShardKeyRange(d1.getMillis(),null);
assertEquals("Range should have appropriate start point", 97682L, range.lowerEndpoint().longValue());
assertTrue("Range should have an upper bound", range.hasUpperBound());
assertTrue("Range should have a lower bound",range.hasLowerBound());
}
public void testShardingStrategyDaily() throws ShardStrategyException {
ShardingStrategyDaily subject = new ShardingStrategyDaily();
DateTime d = new DateTime(2000,1,1,0,0,0, DateTimeZone.UTC);
long actual = subject.getShardKey(d.getMillis());
// 0 based, but shard ids are relative so it doesn't really matter
assertEquals("Should generate correct shard key for date", 0, actual);
d = new DateTime(2006,1,1,1,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date given offset", 2192, actual);
//test with offset
subject = new ShardingStrategyDaily();
subject.setOffset(20);
d = new DateTime(2000,1,10,0,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
// 0 based, doesn't really matter
assertEquals("Should generate correct shard key for date", 29, actual);
//test the range bounded
subject = new ShardingStrategyDaily();
DateTime d1 = new DateTime(2013,2,22,2,0,0, DateTimeZone.UTC); //158
DateTime d2 = new DateTime(2014,2,22,2,0,0, DateTimeZone.UTC); //170
Range<Long> range = subject.getShardKeyRange(d1.getMillis(),d2.getMillis());
assertEquals("Range should have appropriate start point", 4801L, range.lowerEndpoint().longValue());
assertEquals("Range should have appropriate end point", 5166L, range.upperEndpoint().longValue());
//test range auto bounded
subject = new ShardingStrategyDaily();
d1 = new DateTime(2011,2,22,2,0,0, DateTimeZone.UTC); //170
range = subject.getShardKeyRange(d1.getMillis(),null);
assertEquals("Range should have appropriate start point", 4070L, range.lowerEndpoint().longValue());
assertTrue("Range should have an upper bound", range.hasUpperBound());
assertTrue("Range should have a lower bound",range.hasLowerBound());
}
public void testShardingStrategyWeekly() throws ShardStrategyException {
ShardingStrategyWeekly subject = new ShardingStrategyWeekly();
DateTime d = new DateTime(2000,1,1,0,0,0, DateTimeZone.UTC);
long actual = subject.getShardKey(d.getMillis());
// 0 based, but shard ids are relative so it doesn't really matter
assertEquals("Should generate correct shard key for date", 0, actual);
// In the ISO week system, a year has 53 weeks every 5.6338 years, so we need to check between 5 and 6 years to make sure
// we handle the "leap week" thing correctly, even though we're not using weekyears
d = new DateTime(2005,12,31,1,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date given offset", 313,actual);
d = new DateTime(2006,1,1,1,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date given offset", 313,actual);
//test with offset
subject = new ShardingStrategyWeekly();
subject.setOffset(20);
d = new DateTime(2000,10,1,0,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
// 0 based, doesn't really matter
assertEquals("Should generate correct shard key for date", 59, actual);
//test the range bounded
subject = new ShardingStrategyWeekly();
DateTime d1 = new DateTime(2013,2,22,2,0,0, DateTimeZone.UTC); //158
DateTime d2 = new DateTime(2014,2,22,2,0,0, DateTimeZone.UTC); //170
Range<Long> range = subject.getShardKeyRange(d1.getMillis(),d2.getMillis());
assertEquals("Range should have appropriate start point", 685L, range.lowerEndpoint().longValue());
assertEquals("Range should have appropriate end point", 738L, range.upperEndpoint().longValue());
//test range auto bounded
subject = new ShardingStrategyWeekly();
d1 = new DateTime(2011,2,22,2,0,0, DateTimeZone.UTC); //170
range = subject.getShardKeyRange(d1.getMillis(),null);
assertEquals("Range should have appropriate start point", 581L, range.lowerEndpoint().longValue());
assertTrue("Range should have an upper bound", range.hasUpperBound());
assertTrue("Range should have a lower bound",range.hasLowerBound());
}
public void testShardingStrategyMonthly() throws ShardStrategyException {
ShardingStrategyMonthly subject = new ShardingStrategyMonthly();
DateTime d = new DateTime(2013,2,22,1,0,0, DateTimeZone.UTC);
long actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date", 158,actual);
//test with offset
subject = new ShardingStrategyMonthly();
subject.setOffset(20);
d = new DateTime(2013,2,22,1,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date given offset", 178,actual);
//test the range bounded
subject = new ShardingStrategyMonthly();
DateTime d1 = new DateTime(2013,2,22,2,0,0, DateTimeZone.UTC); //158
DateTime d2 = new DateTime(2014,2,22,2,0,0, DateTimeZone.UTC); //170
Range<Long> range = subject.getShardKeyRange(d1.getMillis(),d2.getMillis());
assertEquals("Range should have appropriate start point", 158L, range.lowerEndpoint().longValue());
assertEquals("Range should have appropriate end point", 170L, range.upperEndpoint().longValue());
//test range auto bounded
subject = new ShardingStrategyMonthly();
d1 = new DateTime(2011,2,22,2,0,0, DateTimeZone.UTC); //170
range = subject.getShardKeyRange(Long.valueOf(d1.getMillis()),null);
assertEquals("Range should have appropriate start point", 134L, range.lowerEndpoint().longValue());
assertTrue("Range should have an upper bound", range.hasUpperBound());
assertTrue("Range should have a lower bound",range.hasLowerBound());
}
public void testShardingStrategyNone() throws ShardStrategyException {
ShardingStrategyNone subject = new ShardingStrategyNone();
DateTime d = new DateTime(2013,2,22,1,0,0, DateTimeZone.UTC);
long actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date", 1L ,actual);
//test with offset
subject = new ShardingStrategyNone();
subject.setOffset(20);
d = new DateTime(2013,2,22,1,0,0, DateTimeZone.UTC);
actual = subject.getShardKey(d.getMillis());
assertEquals("Should generate correct shard key for date given offset", 21L ,actual);
//test the range bounded
subject = new ShardingStrategyNone();
DateTime d1 = new DateTime(2013,2,22,2,0,0, DateTimeZone.UTC);
DateTime d2 = new DateTime(2014,2,22,2,0,0, DateTimeZone.UTC);
Range<Long> range = subject.getShardKeyRange(d1.getMillis(),d2.getMillis());
assertEquals("Range should have appropriate start point", 1L , range.lowerEndpoint().longValue());
assertEquals("Range should have appropriate end point", 1L , range.upperEndpoint().longValue());
//test range unbounded
subject = new ShardingStrategyNone();
d1 = new DateTime(2012,2,22,2,0,0, DateTimeZone.UTC);
range = subject.getShardKeyRange(Long.valueOf(d1.getMillis()),null);
assertEquals("Range should have appropriate start point", 1L , range.lowerEndpoint().longValue());
assertTrue("Range should be just 1",range.upperEndpoint().longValue() == 1L);
assertTrue("Range should be just 1",range.lowerEndpoint().longValue() == range.upperEndpoint().longValue());
}
}
| |
/*
* Copyright (c) 2014 AsyncHttpClient Project. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.ning.http.client.uri;
import com.ning.http.util.MiscUtils;
import com.ning.http.util.StringUtils;
import java.net.URI;
import java.net.URISyntaxException;
public class Uri {
public static Uri create(String originalUrl) {
return create(null, originalUrl);
}
public static Uri create(Uri context, final String originalUrl) {
UriParser parser = new UriParser();
parser.parse(context, originalUrl);
return new Uri(parser.scheme,//
parser.userInfo,//
parser.host,//
parser.port,//
parser.path,//
parser.query);
}
private final String scheme;
private final String userInfo;
private final String host;
private final int port;
private final String query;
private final String path;
private String url;
public Uri(String scheme,//
String userInfo,//
String host,//
int port,//
String path,//
String query) {
if (scheme == null)
throw new NullPointerException("scheme");
if (host == null)
throw new NullPointerException("host");
this.scheme = scheme;
this.userInfo = userInfo;
this.host = host;
this.port = port;
this.path = path;
this.query = query;
}
public String getQuery() {
return query;
}
public String getPath() {
return path;
}
public String getUserInfo() {
return userInfo;
}
public int getPort() {
return port;
}
public String getScheme() {
return scheme;
}
public String getHost() {
return host;
}
public URI toJavaNetURI() throws URISyntaxException {
return new URI(toUrl());
}
public String toUrl() {
if (url == null) {
StringBuilder sb = StringUtils.stringBuilder();
sb.append(scheme).append("://");
if (userInfo != null)
sb.append(userInfo).append('@');
sb.append(host);
if (port != -1)
sb.append(':').append(port);
if (path != null)
sb.append(path);
if (query != null)
sb.append('?').append(query);
url = sb.toString();
sb.setLength(0);
}
return url;
}
public String toRelativeUrl() {
StringBuilder sb = StringUtils.stringBuilder();
if (MiscUtils.isNonEmpty(path))
sb.append(path);
else
sb.append('/');
if (query != null)
sb.append('?').append(query);
return sb.toString();
}
@Override
public String toString() {
// for now, but might change
return toUrl();
}
public Uri withNewScheme(String newScheme) {
return new Uri(newScheme,//
userInfo,//
host,//
port,//
path,//
query);
}
public Uri withNewQuery(String newQuery) {
return new Uri(scheme,//
userInfo,//
host,//
port,//
path,//
newQuery);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((host == null) ? 0 : host.hashCode());
result = prime * result + ((path == null) ? 0 : path.hashCode());
result = prime * result + port;
result = prime * result + ((query == null) ? 0 : query.hashCode());
result = prime * result + ((scheme == null) ? 0 : scheme.hashCode());
result = prime * result + ((userInfo == null) ? 0 : userInfo.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Uri other = (Uri) obj;
if (host == null) {
if (other.host != null)
return false;
} else if (!host.equals(other.host))
return false;
if (path == null) {
if (other.path != null)
return false;
} else if (!path.equals(other.path))
return false;
if (port != other.port)
return false;
if (query == null) {
if (other.query != null)
return false;
} else if (!query.equals(other.query))
return false;
if (scheme == null) {
if (other.scheme != null)
return false;
} else if (!scheme.equals(other.scheme))
return false;
if (userInfo == null) {
if (other.userInfo != null)
return false;
} else if (!userInfo.equals(other.userInfo))
return false;
return true;
}
}
| |
/*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.systemtest.security.oauth;
import io.strimzi.api.kafka.model.KafkaConnectResources;
import io.strimzi.api.kafka.model.KafkaResources;
import io.strimzi.api.kafka.model.listener.KafkaListenerAuthenticationOAuth;
import io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener;
import io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder;
import io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType;
import io.strimzi.systemtest.Constants;
import io.strimzi.systemtest.annotations.IsolatedSuite;
import io.strimzi.systemtest.annotations.IsolatedTest;
import io.strimzi.systemtest.annotations.ParallelTest;
import io.strimzi.systemtest.resources.ResourceOperation;
import io.strimzi.systemtest.resources.crd.KafkaResource;
import io.strimzi.systemtest.resources.crd.kafkaclients.KafkaBasicExampleClients;
import io.strimzi.systemtest.templates.crd.KafkaClientsTemplates;
import io.strimzi.systemtest.templates.crd.KafkaConnectTemplates;
import io.strimzi.systemtest.templates.crd.KafkaTemplates;
import io.strimzi.systemtest.templates.crd.KafkaTopicTemplates;
import io.strimzi.systemtest.utils.ClientUtils;
import io.strimzi.systemtest.utils.StUtils;
import io.strimzi.systemtest.utils.kubeUtils.controllers.JobUtils;
import io.strimzi.systemtest.utils.kubeUtils.controllers.StatefulSetUtils;
import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils;
import io.strimzi.systemtest.utils.specific.KeycloakUtils;
import io.strimzi.test.k8s.KubeClusterResource;
import org.hamcrest.CoreMatchers;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.extension.ExtensionContext;
import java.rmi.UnexpectedException;
import java.util.List;
import java.util.stream.Collectors;
import static io.strimzi.systemtest.Constants.CONNECT;
import static io.strimzi.systemtest.Constants.INFRA_NAMESPACE;
import static io.strimzi.systemtest.Constants.OAUTH;
import static io.strimzi.systemtest.Constants.REGRESSION;
import static io.strimzi.systemtest.resources.ResourceManager.kubeClient;
import static org.hamcrest.MatcherAssert.assertThat;
@Tag(OAUTH)
@Tag(REGRESSION)
@IsolatedSuite
public class OauthScopeST extends OauthAbstractST {
private final String oauthClusterName = "oauth-cluster-scope-name";
private final String scopeListener = "scopelist";
private final String scopeListenerPort = "9098";
private final String additionalOauthConfig =
"sasl.mechanism = PLAIN\n" +
"security.protocol = SASL_PLAINTEXT\n" +
"sasl.jaas.config = org.apache.kafka.common.security.plain.PlainLoginModule required username=\"kafka-client\" password=\"kafka-client-secret\" ;";
@ParallelTest
@Tag(CONNECT)
void testScopeKafkaConnectSetIncorrectly(ExtensionContext extensionContext) throws UnexpectedException {
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
final String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
// SCOPE TESTING
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, false, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, clusterName, 1)
.editMetadata()
.withNamespace(INFRA_NAMESPACE)
.endMetadata()
.withNewSpec()
.withReplicas(1)
.withBootstrapServers(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + scopeListenerPort)
.withConfig(connectorConfig)
.addToConfig("key.converter.schemas.enable", false)
.addToConfig("value.converter.schemas.enable", false)
.addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter")
.addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter")
.withNewKafkaClientAuthenticationOAuth()
.withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri())
.withClientId("kafka-client")
.withNewClientSecret()
.withSecretName(OAUTH_KAFKA_CLIENT_SECRET)
.withKey(OAUTH_KEY)
.endClientSecret()
// scope set in-correctly regarding to the scope-test realm
.withScope(null)
.endKafkaClientAuthenticationOAuth()
.withTls(null)
.endSpec()
.build());
String kafkaConnectPodName = PodUtils.getPodsByPrefixInNameWithDynamicWait(INFRA_NAMESPACE, KafkaConnectResources.deploymentName(clusterName)).get(0).getMetadata().getName();
// we except that "Token validation failed: Custom claim check failed because we specify scope='null'"
StUtils.waitUntilLogFromPodContainsString(INFRA_NAMESPACE, kafkaConnectPodName, KafkaConnectResources.deploymentName(clusterName), "30s", "Token validation failed: Custom claim check failed");
}
@ParallelTest
@Tag(CONNECT)
void testScopeKafkaConnectSetCorrectly(ExtensionContext extensionContext) throws UnexpectedException {
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
final String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
// SCOPE TESTING
resourceManager.createResource(extensionContext, KafkaClientsTemplates.kafkaClients(INFRA_NAMESPACE, false, kafkaClientsName).build());
resourceManager.createResource(extensionContext, KafkaConnectTemplates.kafkaConnect(extensionContext, clusterName, clusterName, 1)
.editMetadata()
.withNamespace(INFRA_NAMESPACE)
.endMetadata()
.withNewSpec()
.withReplicas(1)
.withBootstrapServers(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + scopeListenerPort)
.withConfig(connectorConfig)
.addToConfig("key.converter.schemas.enable", false)
.addToConfig("value.converter.schemas.enable", false)
.addToConfig("key.converter", "org.apache.kafka.connect.storage.StringConverter")
.addToConfig("value.converter", "org.apache.kafka.connect.storage.StringConverter")
.withNewKafkaClientAuthenticationOAuth()
.withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri())
.withClientId("kafka-client")
.withNewClientSecret()
.withSecretName(OAUTH_KAFKA_CLIENT_SECRET)
.withKey(OAUTH_KEY)
.endClientSecret()
// scope set correctly regarding to the scope-test realm
.withScope("test")
.endKafkaClientAuthenticationOAuth()
.withTls(null)
.endSpec()
.build());
// Kafka connect passed the validation process (implicit the KafkaConnect is up)
// explicitly verifying also logs
String kafkaPodName = kubeClient().listPodsByPrefixInName(INFRA_NAMESPACE, KafkaResources.kafkaPodName(oauthClusterName, 0)).get(0).getMetadata().getName();
String kafkaLog = KubeClusterResource.cmdKubeClient(INFRA_NAMESPACE).execInCurrentNamespace(false, "logs", kafkaPodName, "--tail", "50").out();
assertThat(kafkaLog, CoreMatchers.containsString("Access token expires at"));
assertThat(kafkaLog, CoreMatchers.containsString("Evaluating path: $[*][?]"));
assertThat(kafkaLog, CoreMatchers.containsString("Evaluating path: @['scope']"));
assertThat(kafkaLog, CoreMatchers.containsString("User validated"));
assertThat(kafkaLog, CoreMatchers.containsString("Set validated token on callback"));
}
@ParallelTest
void testClientScopeKafkaSetCorrectly(ExtensionContext extensionContext) throws UnexpectedException {
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
final String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
KafkaBasicExampleClients oauthInternalClientChecksJob = new KafkaBasicExampleClients.Builder()
.withNamespaceName(INFRA_NAMESPACE)
.withProducerName(producerName)
.withConsumerName(consumerName)
.withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + scopeListenerPort)
.withTopicName(topicName)
.withMessageCount(MESSAGE_COUNT)
// configures SASL/PLAIN to be used
.withAdditionalConfig(additionalOauthConfig)
.build();
// clientScope is set to 'test' by default
// verification phase the KafkaClient to authenticate.
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
resourceManager.createResource(extensionContext, oauthInternalClientChecksJob.producerStrimzi().build());
// client should succeeded because we set to `clientScope=test` and also Kafka has `scope=test`
ClientUtils.waitForClientSuccess(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
}
@IsolatedTest("Modification of shared Kafka cluster")
void testClientScopeKafkaSetIncorrectly(ExtensionContext extensionContext) throws UnexpectedException {
final String kafkaClientsName = mapWithKafkaClientNames.get(extensionContext.getDisplayName());
final String clusterName = mapWithClusterNames.get(extensionContext.getDisplayName());
final String producerName = OAUTH_PRODUCER_NAME + "-" + clusterName;
final String consumerName = OAUTH_CONSUMER_NAME + "-" + clusterName;
final String topicName = mapWithTestTopics.get(extensionContext.getDisplayName());
KafkaBasicExampleClients oauthInternalClientChecksJob = new KafkaBasicExampleClients.Builder()
.withNamespaceName(INFRA_NAMESPACE)
.withProducerName(producerName)
.withConsumerName(consumerName)
.withBootstrapAddress(KafkaResources.bootstrapServiceName(oauthClusterName) + ":" + scopeListenerPort)
.withTopicName(topicName)
.withMessageCount(MESSAGE_COUNT)
// configures SASL/PLAIN to be used
.withAdditionalConfig(additionalOauthConfig)
.build();
// re-configuring Kafka listener to have client scope assigned to null
KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
List<GenericKafkaListener> scopeListeners = kafka.getSpec().getKafka().getListeners()
.stream()
.filter(listener -> listener.getName().equals(scopeListener))
.collect(Collectors.toList());
((KafkaListenerAuthenticationOAuth) scopeListeners.get(0).getAuth()).setClientScope(null);
kafka.getSpec().getKafka().getListeners().set(0, scopeListeners.get(0));
}, INFRA_NAMESPACE);
StatefulSetUtils.waitForAllStatefulSetPodsReady(INFRA_NAMESPACE, KafkaResources.kafkaStatefulSetName(oauthClusterName), 1, ResourceOperation.getTimeoutForResourceReadiness(Constants.STATEFUL_SET));
// verification phase client should fail here because clientScope is set to 'null'
resourceManager.createResource(extensionContext, KafkaTopicTemplates.topic(oauthClusterName, topicName, INFRA_NAMESPACE).build());
resourceManager.createResource(extensionContext, oauthInternalClientChecksJob.producerStrimzi().build());
// client should fail because the listener requires scope: 'test' in JWT token but was (the listener) temporarily
// configured without clientScope resulting in a JWT token without the scope claim when using the clientId and
// secret passed via SASL/PLAIN to obtain an access token in client's name.
ClientUtils.waitForClientTimeout(producerName, INFRA_NAMESPACE, MESSAGE_COUNT);
JobUtils.deleteJobWithWait(INFRA_NAMESPACE, producerName);
// rollback previous configuration
// re-configuring Kafka listener to have client scope assigned to 'test'
KafkaResource.replaceKafkaResourceInSpecificNamespace(oauthClusterName, kafka -> {
List<GenericKafkaListener> scopeListeners = kafka.getSpec().getKafka().getListeners()
.stream()
.filter(listener -> listener.getName().equals(scopeListener))
.collect(Collectors.toList());
((KafkaListenerAuthenticationOAuth) scopeListeners.get(0).getAuth()).setClientScope("test");
kafka.getSpec().getKafka().getListeners().set(0, scopeListeners.get(0));
}, INFRA_NAMESPACE);
StatefulSetUtils.waitForAllStatefulSetPodsReady(INFRA_NAMESPACE, KafkaResources.kafkaStatefulSetName(oauthClusterName), 1, ResourceOperation.getTimeoutForResourceReadiness(Constants.STATEFUL_SET));
}
@BeforeAll
void setUp(ExtensionContext extensionContext) {
super.beforeAllMayOverride(extensionContext);
// for namespace
super.setupCoAndKeycloak(extensionContext, INFRA_NAMESPACE);
keycloakInstance.setRealm("scope-test", false);
resourceManager.createResource(extensionContext, KafkaTemplates.kafkaEphemeral(oauthClusterName, 1, 1)
.editMetadata()
.withNamespace(INFRA_NAMESPACE)
.endMetadata()
.editSpec()
.editKafka()
.withListeners(
new GenericKafkaListenerBuilder()
.withName(scopeListener)
.withPort(Integer.parseInt(scopeListenerPort))
.withType(KafkaListenerType.INTERNAL)
.withTls(false)
.withNewKafkaListenerAuthenticationOAuth()
.withValidIssuerUri(keycloakInstance.getValidIssuerUri())
.withJwksExpirySeconds(keycloakInstance.getJwksExpireSeconds())
.withJwksRefreshSeconds(keycloakInstance.getJwksRefreshSeconds())
.withJwksEndpointUri(keycloakInstance.getJwksEndpointUri())
.withUserNameClaim(keycloakInstance.getUserNameClaim())
.withEnablePlain(true)
.withTokenEndpointUri(keycloakInstance.getOauthTokenEndpointUri())
.withCheckAudience(false)
.withCustomClaimCheck("@.scope =~ /.*test.*/")
.withClientScope("test")
.withClientId("kafka-component")
.endKafkaListenerAuthenticationOAuth()
.build())
.endKafka()
.endSpec()
.build());
}
@AfterAll
void tearDown(ExtensionContext extensionContext) throws Exception {
// delete keycloak before namespace
KeycloakUtils.deleteKeycloak(INFRA_NAMESPACE);
// delete namespace etc.
super.afterAllMayOverride(extensionContext);
}
}
| |
package jeffaschenk.commons.frameworks.cnxidx.admin;
import jeffaschenk.commons.frameworks.cnxidx.utility.commandlinearguments.CommandLinePrincipalCredentials;
import jeffaschenk.commons.frameworks.cnxidx.utility.commandlinearguments.idxArgParser;
import jeffaschenk.commons.frameworks.cnxidx.utility.commandlinearguments.idxArgVerificationRules;
import jeffaschenk.commons.frameworks.cnxidx.utility.commandlinearguments.idxArgVerifier;
import jeffaschenk.commons.frameworks.cnxidx.utility.filtering.FilterString;
import jeffaschenk.commons.frameworks.cnxidx.utility.idxLogger;
import jeffaschenk.commons.frameworks.cnxidx.utility.ldap.idxCMDReturnCodes;
import jeffaschenk.commons.frameworks.cnxidx.utility.ldap.idxElapsedTime;
import jeffaschenk.commons.frameworks.cnxidx.utility.ldap.idxIRRException;
import jeffaschenk.commons.frameworks.cnxidx.utility.ldap.idxManageContext;
import java.util.*;
import java.util.regex.*;
import java.io.*;
import javax.naming.directory.*;
/**
* Java Command line utility, driven from properties and command
* line parameters to Drive the IRRChangeLogRestore Process.
* <br>
* <b>Usage:</b><br>
* IRRChangeLogRestoreDriver <Required Parameters> <Optional Parameters>
* <br>
* <b>Required Parameters are:</b>
* <pre>
* --hosturl
* Specify IRR(Directory) LDAP URL, ldap://hostname.acme.com
* --irrid
* Specify IRR(Directory) LDAP BIND DN, cn=irradmin,o=icosdsa
* --irrpw
* Specify IRR(Directory) LDAP BIND Password
* --idu
* Specify FRAMEWORK Keystore Alias to obtain IRRID and IRRPW.
* --inpath
* Specify Full Input file Path of Change Log Files to be read.
* --tagname
* Specify a Unique Name to Identify Files Processed to a Destination.
* </pre>
* <b>Optional Parameters are:</b>
* <pre>
* --changelogfiles
* Indicates that the input Files were created by the IRRChangeLogCollector.
* --version
* Display Version information and exit.
* --?
* This Display.
*
* </pre>
* <p/>
* See <CODE>IRRChangeLogCollector</CODE> for
* information regarding the contents and name of the files Written this file.
*
* @author jeff.schenk
* @version 3.0 $Revision
* Developed 2003
*/
public class IRRChangeLogRestoreDriver implements idxCMDReturnCodes {
public static String VERSION = "Version: 3.1 2003-09-12, " +
"FRAMEWORK, Incorporated.";
// *******************************
// Common Logging Facility.
public static final String CLASSNAME = IRRChangeLogRestoreDriver.class.getName();
public static idxLogger IDXLOG = new idxLogger();
public static String MP = CLASSNAME + ": ";
// ****************************
// Filename Search Patterns
// pattern: ^IRRCHGLOG.\d{12}.\d{12}\056ldif$
public static final String IRRCHGLOG_FILENAME_PATTERN =
"^IRRCHGLOG.\\d{12}.\\d{12}\\056ldif$";
// pattern: ^\d{12}.\d{4}\056\d{3}$
public static final String DCLLDIFEXPORT_FILENAME_PATTERN =
"^\\d{12}.\\d{4}\\056\\d{3}$";
private String FILENAME_SEARCH_PATTERN = DCLLDIFEXPORT_FILENAME_PATTERN;
// ****************************
// Type of Session we are
// Driving, either from
// IRRChangeCollector or
// from DCL's LDIF Export
// Facility.
//
// MOD_CHANGE_LOG,
// TRUE = DCL LDIF Export
// FALSE = IRRChangeCollector.
//
private boolean MOD_CHANGE_LOG = true;
// ****************************
// Runtime Statistics
private int files_processed = 0;
private int entries_exceptions = 0;
private int entries_skipped = 0;
private int entries_processed = 0;
private int entries_modified = 0;
private int entries_renamed = 0;
private int entries_deleted = 0;
private int entries_added = 0;
/**
* Usage
* Class to print Usage parameters and simple exit.
*/
static void Usage() {
System.err.println(MP + "Usage:");
System.err.println(MP + "IRRChangeLogRestoreDriver <Required Parameters> <Optional Parameters>");
System.err.println("\n" + MP + "Required Parameters are:");
System.err.println(MP + "--hosturl ");
System.err.println("\tSpecify IRR(Directory) LDAP URL, ldap://hostname.acme.com");
System.err.println(MP + "--irrid ");
System.err.println("\tSpecify IRR(Directory) LDAP BIND DN, cn=irradmin,o=icosdsa");
System.err.println(MP + "--irrpw ");
System.err.println("\tSpecify IRR(Directory) LDAP BIND Password");
System.err.println(MP + "--idu ");
System.err.println("\tSpecify FRAMEWORK Keystore Alias to obtain IRRID and IRRPW.");
System.err.println(MP + "--inpath ");
System.err.println("\tSpecify Full Input file Path of Change Log Files to be read.");
System.err.println(MP + "--tagname ");
System.err.println("\tSpecify TagName for LogRestore State Files.");
System.err.println("\n" + MP + "Optional Parameters are:");
System.err.println(MP + "--version");
System.err.println("\tDisplay Version information and exit.");
System.err.println(MP + "--?");
System.err.println("\tThe Above Display.");
System.exit(EXIT_USAGE);
} // End of Subclass
/**
* IRRChangeLogRestoreDriver Contructor class driven from
* Main or other Class Caller.
*/
public IRRChangeLogRestoreDriver() {
} // End of Constructor for IRRChangeLogRestoreDriver.
/**
* clearStats Method resets all statistics for this utility classs.
*/
public void clearStats() {
files_processed = 0;
entries_exceptions = 0;
entries_skipped = 0;
entries_processed = 0;
entries_modified = 0;
entries_renamed = 0;
entries_deleted = 0;
entries_added = 0;
} // End of clearStats Method.
/**
* dumpStats Method displays all statistics for this utility.
*/
public void dumpStats() {
String METHODNAME = "dumpStats";
if (files_processed == 0) {
IDXLOG.info(CLASSNAME, METHODNAME, "No LDIF Files Available for Processing.");
} else {
IDXLOG.info(CLASSNAME, METHODNAME, "LDIF Log Files Processed: [" + files_processed + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entries Processed: [" + entries_processed + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entry Exceptions: [" + entries_exceptions + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entries Skipped: [" + entries_skipped + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entries Added: [" + entries_added + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entries Deleted: [" + entries_deleted + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entries Renamed: [" + entries_renamed + "]");
IDXLOG.info(CLASSNAME, METHODNAME, "Entries Modified: [" + entries_modified + "]");
} // End of Else.
} // End of dumpStats Method.
/**
* wereFilesProcessed Method to allow caller to determine if Stats should be dumped or not.
*/
public boolean wereFilesProcessed() {
String METHODNAME = "dumpStats";
if (files_processed > 0) {
return (true);
}
return (false);
} // End of wereFilesProcessed Method.
/**
* enable the use of IRRChange Log Files, instead of DCL Log Files.
*/
public void useIRRChangeLogFiles() {
MOD_CHANGE_LOG = false;
FILENAME_SEARCH_PATTERN = IRRCHGLOG_FILENAME_PATTERN;
} // End of useIRRChangeLogFiles Method.
/**
* enable the use of DCL Export LDIF Files, instead of IRR Change Log Collector Files.
*/
public void useDCLExportLDIFFiles() {
MOD_CHANGE_LOG = true;
FILENAME_SEARCH_PATTERN = DCLLDIFEXPORT_FILENAME_PATTERN;
} // End of useDCLExportLDIFFiles Method.
/**
* perform Method class performs the requested IRR Function Utility.
*
* @param irrctx of Directory Destination.
* @param INPUT_PATH Input File System Path of Directory where Log Restore Files Reside.
* @param STATE_TAGNAME Tagname for our State file.
* @throws idxIRRException for any specific IRR unrecoverable errors during function.
* @throws Exception for any unrecoverable errors during function.
*/
public void perform(DirContext irrctx,
String INPUT_PATH,
String STATE_TAGNAME)
throws idxIRRException {
String METHODNAME = "perform";
IDXLOG.fine(CLASSNAME, METHODNAME, "Entering to Drive Restore from Input Path:[" +
INPUT_PATH + "], TAG Name:[" + STATE_TAGNAME + "].");
// ****************************************
// Note The Start Time.
idxElapsedTime elt = new idxElapsedTime();
// ************************************
// Ensure STATE TagName is correct.
STATE_TAGNAME = STATE_TAGNAME.trim();
STATE_TAGNAME = STATE_TAGNAME.replace(' ', '_');
// ************************************
// Obtain the current list of LOG Files
// that are contained within our
// Input Path File System Directory.
//
TreeMap LOGFILES = obtainDirListing(INPUT_PATH, FILENAME_SEARCH_PATTERN);
// ************************************
// Now simple Iterate Through the
// Log Files and drive the Restore.
Set mySet = LOGFILES.entrySet();
Iterator itr = mySet.iterator();
while (itr.hasNext()) {
Map.Entry oit = (Map.Entry) itr.next();
File _infile =
(File) oit.getValue();
// *******************************************
// Verify the file has yet to be processed or
// blocked already.
File _processedfile =
new File(_infile.getAbsolutePath() + "_" +
"PROCESSED" + "_" + STATE_TAGNAME);
if (_processedfile.exists()) {
continue;
}
File _blockedfile =
new File(_infile.getAbsolutePath() + "_" +
"BLOCKED" + "_" + STATE_TAGNAME);
if (_blockedfile.exists()) {
continue;
}
// ***************************
// Drive the Restore process
// to the Destination
// for this unprocessed
// input file.
//
try {
IDXLOG.fine(CLASSNAME, METHODNAME, "Processing File:[" +
_infile.getAbsolutePath() + "].");
driveLDIFRestore(irrctx, _infile.getAbsolutePath(),
_processedfile.getAbsolutePath());
IDXLOG.fine(CLASSNAME, METHODNAME, "Processed File:[" +
_infile.getAbsolutePath() + "].");
} catch (idxIRRException ire) {
IDXLOG.severe(CLASSNAME, METHODNAME, "IRR Processing Exception " + ire);
throw ire;
} catch (Exception e) {
IDXLOG.severe(CLASSNAME, METHODNAME, "Processing Exception " + e);
throw new idxIRRException(e.getMessage());
} // End of Exception Processing.
} // End of While Loop.
// ****************************************
// Note The End Time.
elt.setEnd();
// ****************************************
// Show Restore Timings.
IDXLOG.fine(CLASSNAME, METHODNAME, "Driven Restore Complete, Elapsed Time: " + elt.getElapsed());
} // End of Perform Method.
/**
* unprocessedFileCount, check for any unprocessed files before a Directory context
* has been established.
*
* @param INPUT_PATH Input File System Path of Directory where Log Restore Files Reside.
* @param STATE_TAGNAME Tagname for our State file.
*/
public long unprocessedFileCount(String INPUT_PATH, String STATE_TAGNAME) {
String METHODNAME = "unprocessedFileCount";
long count = 0;
// ************************************
// Ensure STATE TagName is correct.
STATE_TAGNAME = STATE_TAGNAME.trim();
STATE_TAGNAME = STATE_TAGNAME.replace(' ', '_');
// ************************************
// Obtain the current list of LOG Files
// that are contained within our
// Input Path File System Directory.
//
TreeMap LOGFILES = obtainDirListing(INPUT_PATH, FILENAME_SEARCH_PATTERN);
// ************************************
// Now simple Iterate Through the
// Log Files and drive the Restore.
Set mySet = LOGFILES.entrySet();
Iterator itr = mySet.iterator();
while (itr.hasNext()) {
Map.Entry oit = (Map.Entry) itr.next();
File _infile =
(File) oit.getValue();
// *******************************************
// Verify the file has yet to be processed or
// not.
File _processedfile =
new File(_infile.getAbsolutePath() + "_" +
"PROCESSED" + "_" + STATE_TAGNAME);
File _blockedfile =
new File(_infile.getAbsolutePath() + "_" +
"BLOCKED" + "_" + STATE_TAGNAME);
if (!_processedfile.exists() && !_blockedfile.exists()) {
count++;
}
} // End of While Loop.
// *************************
// return
return (count);
} // End of unprocessedFileCount Method.
/**
* Drive the LDIF Restore for this File.
*/
private void driveLDIFRestore(DirContext irrctx,
String _infilename,
String _processedfilename)
throws Exception, idxIRRException {
// *****************************
// Initialize our Statistics
int stats[] = new int[7];
// *****************************
// Perform Incremental changes
// to destination.
// Use appropreiate Object
// based upon current
// configuration.
//
if (MOD_CHANGE_LOG) {
IRRmodifyEntry RESTORE = new IRRmodifyEntry();
RESTORE.perform(irrctx, _infilename);
stats = RESTORE.obtainStats();
RESTORE = null;
} else {
IRRChangeLogRestore RESTORE = new IRRChangeLogRestore();
RESTORE.perform(irrctx, _infilename);
stats = RESTORE.obtainStats();
RESTORE = null;
} // End of Else.
// ************************************
// Accumulate the Statistics.
files_processed++;
entries_exceptions = +stats[0];
entries_skipped = +stats[1];
entries_processed = +stats[2];
entries_modified = +stats[3];
entries_renamed = +stats[4];
entries_deleted = +stats[5];
entries_added = +stats[6];
// *******************************
// CheckPoint a State file
// indicating this file
// has been processed.
IRRChangeLogRestoreStateFile STATEFILE =
new IRRChangeLogRestoreStateFile(_processedfilename);
// *******************************
// Write Stats to the State File
// for later review.
STATEFILE.persistAsProcessed(_infilename, stats);
// **********************
// Clean-up.
STATEFILE = null;
// **********************
// Return
return;
} // End of driveLDIFRestore.
/**
* obtain a Directory Listing using a Pattern and return a TreeMap sorted by the filename.
*/
private TreeMap<String,File> obtainDirListing(String _dirname, String _pattern) {
String METHODNAME = "obtainDirListing";
// *********************
// Compile Our Pattern.
Pattern pattern = null;
Matcher pm = null;
TreeMap<String,File> _tm = new TreeMap<>();
try {
pattern = Pattern.compile(_pattern);
} catch (PatternSyntaxException pse) {
IDXLOG.severe(CLASSNAME, METHODNAME, "Error Using RegEx Pattern in obtainDirListing Method, " + pse.getMessage());
return (_tm);
} // End of Exception.
// ******************************
// Obtain the Directory Listing.
File ld = new File(_dirname);
if (!ld.isDirectory()) {
return (_tm);
}
String contents[] = ld.list();
for (int i = 0; i < contents.length; i++) {
if (pm == null) {
pm = pattern.matcher((String) contents[i]);
} else {
pm.reset((String) contents[i]);
}
// ********************************
// Display the Filtered Contents
if (pm.find()) {
File DGD = new File(_dirname +
File.separator +
contents[i]);
if (DGD.isDirectory()) {
continue;
}
// *****************************
// Save to our TreeMap.
_tm.put(contents[i], DGD);
} // End of find if.
} // End of For Loop.
// *******************
// Return.
return (_tm);
} // End of obtainDirListing Method
/**
* filterUnprocessedFiles
*
* @param INPUT_PATH Input File System Path of Directory where Log Restore Files Reside.
* @param STATE_TAGNAME Tagname for our State file.
* @param LDIF_FILTER_FILE ldif filter file
*/
public void filterFiles(String INPUT_PATH, String STATE_TAGNAME, String LDIF_FILTER_FILE)
throws IRRLdifFilterException {
String METHODNAME = "filterFiles";
// ************************************
// Ensure STATE TagName is correct.
STATE_TAGNAME = STATE_TAGNAME.trim();
STATE_TAGNAME = STATE_TAGNAME.replace(' ', '_');
String line;
String dn = "";
String changetype = "";
boolean isReadFlag = false;
boolean isMatch = false;
String lineBuffer;
String emptyString = "";
String ldifFileString;
FilterString myFilterStrings;
// Open the ldif filter file and read any dn's to be blocked
HashSet filterSet = new HashSet();
try {
myFilterStrings = new FilterString(new File(LDIF_FILTER_FILE));
} catch (IOException e) {
System.err.println(METHODNAME + "Opening the ldif filter file: (" + LDIF_FILTER_FILE + ") error: " + e);
throw new IRRLdifFilterException(METHODNAME + "IO Problem with ldif filter file:" + LDIF_FILTER_FILE);
} catch (Exception e) {
System.err.println(METHODNAME + "Opening the ldif filter file: (" + LDIF_FILTER_FILE + ") error: " + e);
throw new IRRLdifFilterException(METHODNAME + "General Problem with ldif filter file:" + LDIF_FILTER_FILE);
}
// ************************************
// Obtain the current list of LOG Files
// that are contained within our
// Input Path File System Directory.
//
TreeMap LOGFILES = obtainDirListing(INPUT_PATH, FILENAME_SEARCH_PATTERN);
// ************************************
// Now simple Iterate Through the
// Log Files and drive the Restore.
HashSet<String> readDnSet = new HashSet<>();
Set mySet = LOGFILES.entrySet();
Iterator itr = mySet.iterator();
while (itr.hasNext()) {
Map.Entry oit = (Map.Entry) itr.next();
File _infile =
(File) oit.getValue();
String filePath = _infile.getAbsolutePath();
// *******************************************
// Verify the file has yet to be processed or blocked aleady
File _processedfile = new File(filePath + "_" + "PROCESSED" + "_" + STATE_TAGNAME);
File _blockedfile = new File(filePath + "_" + "BLOCKED" + "_" + STATE_TAGNAME);
readDnSet.clear();
if ((!_processedfile.exists()) && (!_blockedfile.exists())) {
//Open the file and read dn and changetype value
try {
BufferedReader infileBufferReader = new BufferedReader(new FileReader(filePath));
//TODO integrate with utility idxLDIFReader class
//idxLDIFReader myIdxLDIFReader = new idxLDIFReader(infileBufferReader);
//Attributes entryValues = myIdxLDIFReader.getNextEntryPreserveCase();
//System.out.println(METHODNAME+" " + entryValues.toString());
while ((lineBuffer = infileBufferReader.readLine()) != null) {
if (lineBuffer.startsWith("dn: ")) {
dn = lineBuffer.replaceFirst("dn: ", emptyString);
while (((lineBuffer = infileBufferReader.readLine()) != null) && (!isReadFlag)) {
if (lineBuffer.startsWith(" ")) {
dn = dn + lineBuffer.replaceFirst(" ", emptyString);
} else {
changetype = lineBuffer.replaceFirst("changetype: ", emptyString);
isReadFlag = true;
}
}
// Build the string to match with
ldifFileString = dn + ":" + changetype;
readDnSet.add(ldifFileString);
isReadFlag = false;
}
}
infileBufferReader.close();
} catch (IOException e) {
System.err.println(MP + "Opening the ldif file for checking: (" + LDIF_FILTER_FILE + ") error: " + e);
throw new IRRLdifFilterException(METHODNAME + "IO Problem with reading ldif update file: " + filePath);
} catch (Exception e) {
System.err.println(MP + "Opening the ldif file for checking: (" + LDIF_FILTER_FILE + ") error: " + e);
throw new IRRLdifFilterException(METHODNAME + "General Problem with reading ldif update file: " + filePath);
}
Iterator itrReadDnSet = readDnSet.iterator();
String nextFilterValue;
String nextldifFileString;
int numberOfMatches = 0;
isMatch = false;
//Iterate round set now until we have data to match to block
while (itrReadDnSet.hasNext()) {
nextldifFileString = (String) itrReadDnSet.next();
if (myFilterStrings.match(nextldifFileString)) {
isMatch = true;
numberOfMatches++;
}
}
// Some or all of ldif file needs to be blocked
if (isMatch) {
String _blockedfilename;
if (numberOfMatches < readDnSet.size()) {
// remove dn entires from change file which are to be blocked
System.out.println(MP + "Calling edit file: " + filePath);
editChangeFile(filePath, myFilterStrings);
} else {
_blockedfilename = filePath + "_" + "BLOCKED" + "_" + STATE_TAGNAME;
// *******************************
// CheckPoint a State file
// indicating this file
// has been blocked.
IRRChangeLogRestoreStateFile STATEFILE =
new IRRChangeLogRestoreStateFile(_blockedfilename);
// *******************************
// Write Stats to the State File
// for later review.
try {
STATEFILE.persistAsBlocked(_blockedfilename);
System.out.println(MP + "Block File created: " + _blockedfilename);
} catch (IOException e) {
System.err.println(MP + "Opening the ldif file for checking: (" + LDIF_FILTER_FILE + ") error: " + e);
throw new IRRLdifFilterException(METHODNAME + "IO Problem with creating ldif block file: " + _blockedfilename);
} catch (Exception e) {
System.err.println(MP + "Opening the ldif file for checking: (" + LDIF_FILTER_FILE + ") error: " + e);
throw new IRRLdifFilterException(METHODNAME + "General Problem with creating ldif block file: " + _blockedfilename);
}
}
}
}// End of If statement
} // End of While Loop.
return;
} // End of filterUnprocessedFiles Method.
/**
* editChangeFile
*
* @param filePath Input File System Path of Directory where Log Restore Files Reside.
* @param myFilterStrings filterSet all dn's to be blocked
*/
private void editChangeFile(String filePath, FilterString myFilterStrings)
throws IRRLdifFilterException {
String METHODNAME = "editChangeFile";
String tempFileName = filePath + "TEMP";
//Create temp change file to hold new version of change file
File tempFILE = new File(tempFileName);
File originalFILE = new File(filePath);
String lineBuffer;
String dn;
String changetype = "";
String ldifFileString;
String emptyString = "";
try {
BufferedReader infileBufferReader = new BufferedReader(new FileReader(filePath));
BufferedWriter infileBufferWriter = new BufferedWriter(new FileWriter(tempFileName));
while ((lineBuffer = infileBufferReader.readLine()) != null) {
boolean isEndDnBlockFlag = false;
boolean isMatch = false;
if (lineBuffer.startsWith("dn: ")) {
dn = lineBuffer.replaceFirst("dn: ", emptyString);
while (!isEndDnBlockFlag) {
if (lineBuffer.startsWith(" ")) {
dn = dn + lineBuffer.replaceFirst(" ", emptyString);
} else if (lineBuffer.startsWith("changetype")) {
changetype = lineBuffer.replaceFirst("changetype: ", emptyString);
}
if (lineBuffer.equals("")) {
isEndDnBlockFlag = true;
} else if ((lineBuffer = infileBufferReader.readLine()) == null) {
isEndDnBlockFlag = true;
}
}
// Build the string to match with
ldifFileString = dn + ":" + changetype;
if (myFilterStrings.match(ldifFileString)) {
isMatch = true;
}
if (!isMatch) {
infileBufferReader.reset();
while (((lineBuffer = infileBufferReader.readLine()) != null) && (lineBuffer.length() != 0)) {
infileBufferWriter.newLine();
infileBufferWriter.write(lineBuffer, 0, lineBuffer.length());
}
if (lineBuffer != null)
infileBufferWriter.newLine();
}
} else {
if (!lineBuffer.startsWith("version"))
infileBufferWriter.newLine();
infileBufferWriter.write(lineBuffer, 0, lineBuffer.length());
}
infileBufferReader.mark(10000000);
}// end of while
infileBufferReader.close();
infileBufferWriter.close();
originalFILE.delete();
if (tempFILE.renameTo(originalFILE)) {
tempFILE.delete();
} else {
System.err.println(METHODNAME + " Failed to rename file to origianl");
}
} catch (IOException e) {
System.err.println(MP + "editFile error: " + e);
throw new IRRLdifFilterException(METHODNAME + "IO Problem editing file: " + filePath);
} catch (Exception e) {
System.err.println(MP + "editFile error: " + e);
throw new IRRLdifFilterException(METHODNAME + "General Problem editing change file: " + filePath);
}
return;
} // End of editChangeFile Method.
/**
* Main
*
* @param args Incoming Argument Array.
* @see jeffaschenk.commons.frameworks.cnxidx.admin.IRRChangeLogRestoreDriver
* @see IRRChangeLogger
*/
public static void main(String[] args) {
// ****************************************
// Local Objects
idxManageContext IRRDest = null;
String IRRHost = null;
String IRRPrincipal = null;
String IRRCredentials = null;
String INPUT_PATH = null;
String STATE_TAGNAME = null;
String LDIF_FILTER_FILE = null;
boolean VERBOSE = false;
boolean CHANGELOGFILES = false;
String METHODNAME = "main";
// ****************************************
// Send the Greeting.
System.out.println(MP + VERSION);
// ****************************************
// Parse the incoming Arguments and
// create objects for each entity.
//
idxArgParser Zin = new idxArgParser();
Zin.parse(args);
// ***************************************
// Do I have any unnamed Values?
if (!Zin.IsUnNamedEmpty()) {
System.out.println(MP + "Unknown Values Encountered, Terminating Process.");
Zin.showUnNamed();
Usage();
} // End of If.
// ***************************************
// Was Version Info Requested?
if (Zin.doesNameExist("version")) {
System.exit(EXIT_VERSION);
}
// ***************************************
// Was Help Info Requested?
if (Zin.doesNameExist("?")) {
Usage();
}
// ***************************************
// Was Verbosity Requested?
if (Zin.doesNameExist("verbose")) {
VERBOSE = true;
}
// ***************************************
// Show Arguments if Verbose Selected.
if (VERBOSE) {
Zin.show();
}
// ***************************************
// Build our verification Rule Set.
//
// idxArgVerificationRules Parameters are:
// String Name of argument name.
// Boolean Required Argument Indicator.
// Boolean StringObject Argument Indicator.
// String Name of Value Verification Routine.
//
LinkedList<idxArgVerificationRules> VAR = new LinkedList<>();
VAR.add(new idxArgVerificationRules("hosturl",
true, true));
VAR.add(new idxArgVerificationRules("irrid",
false, true));
VAR.add(new idxArgVerificationRules("irrpw",
false, true));
VAR.add(new idxArgVerificationRules("idu",
false, true));
VAR.add(new idxArgVerificationRules("inpath",
true, true));
VAR.add(new idxArgVerificationRules("tagname",
true, true));
VAR.add(new idxArgVerificationRules("changelogfiles",
false, false));
VAR.add(new idxArgVerificationRules("filterfile",
false, true));
VAR.add(new idxArgVerificationRules("verbose",
false, false));
// ***************************************
// Run the Verification Rule Set.
// If we do not have a positive return,
// then an invalid argument was detected,
// so show Usage and die.
//
idxArgVerifier AV = new idxArgVerifier();
AV.setVerbose(VERBOSE);
if (!AV.Verify(MP, Zin, VAR)) {
Usage();
}
// ***************************************
// Obtain Authentication Principal and
// Credentials from the KeyStore or
// the command line.
//
CommandLinePrincipalCredentials clPC =
new CommandLinePrincipalCredentials(Zin);
// **************************************************
// Load up the Principal/Credentials.
//
if (clPC.wasObtained()) {
IRRPrincipal = clPC.getPrincipal();
System.out.println(MP + "IRR ID:[" + IRRPrincipal + "]");
IRRCredentials = clPC.getCredentials();
//System.out.println(MP+"IRR Password:["+IRRCredentials+"]");
} else {
System.out.println(MP + "Required Principal and Credentials not Specified, unable to continue.");
Usage();
} // End of Else.
// *****************************************
// For all Specified Boolean indicators,
// set them appropreiately.
//
if (Zin.doesNameExist("changelogfiles")) {
CHANGELOGFILES = true;
}
// **************************************************
// Load up the RunTime Arguments.
//
IRRHost = (String) Zin.getValue("hosturl");
System.out.println(MP + "IRR Host URL:[" + IRRHost + "]");
INPUT_PATH = ((String) Zin.getValue("inpath")).trim();
STATE_TAGNAME = ((String) Zin.getValue("tagname")).trim();
LDIF_FILTER_FILE = ((String) Zin.getValue("filterfile")).trim();
// ****************************************
// Note The Start Time.
idxElapsedTime elt = new idxElapsedTime();
// ***********************************************
// Now initiate a Connection to the Directory
// for a LDAP Source Context
System.out.println(MP + "Attempting Destination Directory Connection to Host URL:[" + IRRHost + "]");
IRRDest = new idxManageContext(IRRHost,
IRRPrincipal,
IRRCredentials,
"Restore Destination");
// ************************************************
// Exit on all Exceptions.
IRRDest.setExitOnException(true);
// ************************************************
// Now Try to Open and Obtain Context.
try {
IRRDest.open();
} catch (Exception e) {
System.err.println(MP + "Error Opening Directory Context: " + e);
System.exit(EXIT_IRR_UNABLE_TO_OBTAIN_CONTEXT);
} // End of exception
// *****************************************
// Disable the Factories.
try {
IRRDest.disableDSAEFactories();
} catch (Exception e) {
System.err.println(MP + "Error Disabling DSAE Factories: " + e);
System.exit(EXIT_GENERIC_FAILURE);
} // End of exception
// ****************************************
// Initailize Constructor.
IRRChangeLogRestoreDriver LRD = new IRRChangeLogRestoreDriver();
// ****************************************
// Set the Type of files, if appropreiate.
if (CHANGELOGFILES) {
LRD.useIRRChangeLogFiles();
}
// check for files that contain updates that are to be blocked
try {
LRD.filterFiles(INPUT_PATH, STATE_TAGNAME, LDIF_FILTER_FILE);
} catch (IRRLdifFilterException e) {
IDXLOG.severe(CLASSNAME, METHODNAME, "It was not possible to filter updates" + e);
} catch (Exception e) {
IDXLOG.severe(CLASSNAME, METHODNAME, "Unexpected exception occured. It was not possible to filter updates" + e);
}
// ****************************************
// Perform Function.
try {
LRD.perform(IRRDest.irrctx, INPUT_PATH, STATE_TAGNAME);
} catch (idxIRRException ire) {
System.err.println(MP + " " + ire.getMessage());
System.exit(ire.getRC());
} catch (Exception e) {
System.err.println(MP + "IRR Exception Performing IRRChangeLogRestoreDriver.\n" + e);
System.exit(EXIT_GENERIC_FAILURE);
} // End of Exception.
// ***************************************
// Close up Shop.
System.out.println(MP + "Closing Destination Directory Context.");
try {
IRRDest.close();
} catch (Exception e) {
System.err.println(e);
System.exit(EXIT_IRR_CLOSE_FAILURE);
} // End of exception
// ****************************************
// Show the Statistics
LRD.dumpStats();
// ****************************************
// Note The End Time.
elt.setEnd();
// ****************************************
// Exit
System.out.println(MP + "Done, Elapsed Time: " + elt.getElapsed());
System.exit(EXIT_SUCCESSFUL);
} // End of Main
} // End of Class IRRChangeLogRestoreDriver
| |
/*
* @(#)RequestCtx.java
*
* Copyright 2003-2004 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistribution of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING
* ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN")
* AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
* AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST
* REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL,
* INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY
* OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed or intended for use in
* the design, construction, operation or maintenance of any nuclear facility.
*/
package org.wso2.balana.ctx.xacml2;
import org.wso2.balana.DOMHelper;
import org.wso2.balana.ctx.Attribute;
import org.wso2.balana.Indenter;
import org.wso2.balana.ParsingException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.URI;
import java.util.*;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.wso2.balana.XACMLConstants;
import org.wso2.balana.ctx.*;
import org.wso2.balana.xacml3.Attributes;
/**
* Represents a XACML2 request made to the PDP. This is the class that contains all the data used to start
* a policy evaluation.
*
* @since 1.0
* @author Seth Proctor
* @author Marco Barreno
*/
public class RequestCtx extends AbstractRequestCtx {
/**
* The optional, generic resource content
*/
private String resourceContent;
// There must be at least one subject
private Set<Subject> subjects = null;
// There must be exactly one resource
private Set resource = null;
// There must be exactly one action
private Set action = null;
// There may be any number of environment attributes
private Set environment = null;
/**
* Constructor that creates a <code>RequestCtx</code> from components.
*
*/
public RequestCtx(Set<Attributes> attributesSet, Node documentRoot) {
this(attributesSet, documentRoot, null);
}
/**
* Constructor that creates a <code>RequestCtx</code> from components.
*
* @param documentRoot the root node of the DOM tree for this request
* @param version xacml version of the request
*/
public RequestCtx(Set<Attributes> attributesSet, Node documentRoot, int version) {
this(attributesSet, documentRoot, null);
}
/**
* Constructor that creates a <code>RequestCtx</code> from components.
*
* @param resourceContent a text-encoded version of the content, suitable for including in the
* RequestType, including the root <code>RequestContent</code> node
*/
public RequestCtx(Set<Attributes> attributesSet, String resourceContent) {
this( attributesSet, null, resourceContent);
}
/**
* Constructor that creates a <code>RequestCtx</code> from components.
*
* @param attributesSet
* @param documentRoot the root node of the DOM tree for this request
* @param resourceContent a text-encoded version of the content, suitable for including in the
* RequestType, including the root <code>RequestContent</code> node
*
* @throws IllegalArgumentException if the inputs are not well formed
*/
public RequestCtx(Set<Attributes> attributesSet, Node documentRoot, String resourceContent)
throws IllegalArgumentException {
this.attributesSet = attributesSet;
this.documentRoot = documentRoot;
this.resourceContent = resourceContent;
this.xacmlVersion = XACMLConstants.XACML_VERSION_2_0;
}
/**
*
* @param subjects
* @param resource
* @param action
* @param environment
* @throws IllegalArgumentException
*/
public RequestCtx(Set<Subject> subjects, Set<Attribute> resource, Set<Attribute> action,
Set<Attribute> environment) throws IllegalArgumentException {
this(null, null, subjects, resource, action, environment, null);
}
/**
* Constructor that creates a <code>RequestCtx</code> from components.
*
* @param attributesSet
* @param documentRoot the root node of the DOM tree for this request
* @param resourceContent a text-encoded version of the content, suitable for including in the
* RequestType, including the root <code>RequestContent</code> node
*
* @throws IllegalArgumentException if the inputs are not well formed
*/
public RequestCtx(Set<Attributes> attributesSet, Node documentRoot, Set<Subject> subjects,
Set<Attribute> resource, Set<Attribute> action, Set<Attribute> environment,
String resourceContent) throws IllegalArgumentException {
this.attributesSet = attributesSet;
this.documentRoot = documentRoot;
this.subjects = subjects;
this.resource = resource;
this.action = action;
this.environment = environment;
this.resourceContent = resourceContent;
this.xacmlVersion = XACMLConstants.XACML_VERSION_2_0;
}
/**
* Create a new <code>RequestCtx</code> by parsing a node. This node should be created by
* schema-verified parsing of an <code>XML</code> document.
*
* @param root the node to parse for the <code>RequestCtx</code>
*
* @return a new <code>RequestCtx</code> constructed by parsing
*
* @throws ParsingException if the DOM node is invalid
*/
public static RequestCtx getInstance(Node root) throws ParsingException {
Set<Subject> newSubjects = new HashSet<Subject>();
Set<Attributes> attributesSet = new HashSet<Attributes>();
Node content = null;
Set<Attribute> newResource = null;
Set<Attribute> newAction = null;
Set<Attribute> newEnvironment = null;
// First check to be sure the node passed is indeed a Request node.
String tagName = DOMHelper.getLocalName(root);
if (!tagName.equals("Request")) {
throw new ParsingException("Request cannot be constructed using " + "type: "
+ DOMHelper.getLocalName(root));
}
// Now go through its child nodes, finding Subject,
// Resource, Action, and Environment data
NodeList children = root.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node node = children.item(i);
String tag = DOMHelper.getLocalName(node);
if (tag.equals("Subject")) {
// see if there is a category
Node catNode = node.getAttributes().getNamedItem("SubjectCategory");
URI category = null;
if (catNode != null) {
try {
category = new URI(catNode.getNodeValue());
} catch (Exception e) {
throw new ParsingException("Invalid Category URI", e);
}
}
// now we get the attributes
Set<Attribute> attributes = parseAttributes(node);
// finally, add the list to the set of subject attributes
newSubjects.add(new Subject(category, attributes));
// finally, add the list to the set of subject attributes
attributesSet.add(new Attributes(category, null, attributes, null));
// make sure that there is at least one Subject
if(newSubjects.size() < 1){
throw new ParsingException("Request must a contain subject");
}
} else if (tag.equals("Resource")) {
NodeList nodes = node.getChildNodes();
for (int j = 0; j < nodes.getLength(); j++) {
Node child = nodes.item(j);
if (DOMHelper.getLocalName(node).equals(XACMLConstants.RESOURCE_CONTENT)) {
// only one value can be in an Attribute
if (content != null){
throw new ParsingException("Too many resource content elements are defined.");
}
// now get the value
content = node;
}
}
// For now, this code doesn't parse the content, since it's
// a set of anys with a set of anyAttributes, and therefore
// no useful data can be gleaned from it anyway. The theory
// here is that it's only useful in the instance doc, so
// we won't bother parse it, but we may still want to go
// back and provide some support at some point...
newResource = parseAttributes(node);
attributesSet.add(new Attributes(null, content, newResource, null));
} else if (tag.equals("Action")) {
newAction = parseAttributes(node);
attributesSet.add(new Attributes(null, content, newAction, null));
} else if (tag.equals("Environment")) {
newEnvironment = parseAttributes(node);
attributesSet.add(new Attributes(null, content, newEnvironment, null));
}
}
// if we didn't have an environment section, the only optional section
// of the four, then create a new empty set for it
if (newEnvironment == null){
newEnvironment = new HashSet<Attribute>();
attributesSet.add(new Attributes(null, content, newEnvironment, null));
}
// Now create and return the RequestCtx from the information
// gathered
return new RequestCtx(attributesSet, root,newSubjects, newResource,
newAction, newEnvironment, null);
}
/*
* Helper method that parses a set of Attribute types. The Subject, Action and Environment
* sections all look like this.
*/
private static Set<Attribute> parseAttributes(Node root) throws ParsingException {
Set<Attribute> set = new HashSet<Attribute>();
// the Environment section is just a list of Attributes
NodeList nodes = root.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
if (DOMHelper.getLocalName(node).equals("Attribute"))
set.add(Attribute.getInstance(node, XACMLConstants.XACML_VERSION_2_0));
}
return set;
}
/**
* Returns a <code>Set</code> containing <code>Subject</code> objects.
*
* @return the request's subject attributes
*/
public Set getSubjects() {
return subjects;
}
/**
* Returns a <code>Set</code> containing <code>Attribute</code> objects.
*
* @return the request's resource attributes
*/
public Set getResource() {
return resource;
}
/**
* Returns a <code>Set</code> containing <code>Attribute</code> objects.
*
* @return the request's action attributes
*/
public Set getAction() {
return action;
}
/**
* Returns a <code>Set</code> containing <code>Attribute</code> objects.
*
* @return the request's environment attributes
*/
public Set getEnvironmentAttributes() {
return environment;
}
/**
* Returns the root DOM node of the document used to create this object, or null if this object
* was created by hand (ie, not through the <code>getInstance</code> method) or if the root node
* was not provided to the constructor.
*
* @return the root DOM node or null
*/
public Node getDocumentRoot() {
return documentRoot;
}
/**
* Encodes this <code>AbstractRequestCtx</code> into its XML representation and writes this encoding to the given
* <code>OutputStream</code>. No indentation is used.
*
* @param output a stream into which the XML-encoded data is written
*/
public void encode(OutputStream output) {
encode(output, new Indenter(0));
}
/**
* Encodes this <code>AbstractRequestCtx</code> into its XML representation and writes this encoding to the given
* <code>OutputStream</code> with indentation.
*
* @param output a stream into which the XML-encoded data is written
* @param indenter an object that creates indentation strings
*/
public void encode(OutputStream output, Indenter indenter) {
// Make a PrintStream for a nicer printing interface
PrintStream out = new PrintStream(output);
// Prepare the indentation string
String topIndent = indenter.makeString();
out.println(topIndent + "<Request xmlns=\"" + XACMLConstants.RESOURCE_SCOPE_2_0 + "\" >");
// go in one more for next-level elements...
indenter.in();
String indent = indenter.makeString();
// ...and go in again for everything else
indenter.in();
// first off, go through all subjects
Iterator it = subjects.iterator();
while (it.hasNext()) {
Subject subject = (Subject) (it.next());
out.print(indent + "<Subject SubjectCategory=\"" + subject.getCategory().toString()
+ "\"");
Set subjectAttrs = subject.getAttributes();
if (subjectAttrs.size() == 0) {
// there's nothing in this Subject, so just close the tag
out.println("/>");
} else {
// there's content, so fill it in
out.println(">");
encodeAttributes(subjectAttrs, out, indenter);
out.println(indent + "</Subject>");
}
}
// next do the resource
if ((resource.size() != 0) || (resourceContent != null)) {
out.println(indent + "<Resource>");
if (resourceContent != null)
out.println(indenter.makeString() + "<ResourceContent>" + resourceContent
+ "</ResourceContent>");
encodeAttributes(resource, out, indenter);
out.println(indent + "</Resource>");
} else {
out.println(indent + "<Resource/>");
}
// now the action
if (action.size() != 0) {
out.println(indent + "<Action>");
encodeAttributes(action, out, indenter);
out.println(indent + "</Action>");
} else {
out.println(indent + "<Action/>");
}
// finally the environment, if there are any attrs
if (environment.size() != 0) {
out.println(indent + "<Environment>");
encodeAttributes(environment, out, indenter);
out.println(indent + "</Environment>");
}
// we're back to the top
indenter.out();
indenter.out();
out.println(topIndent + "</Request>");
}
/**
* Private helper function to encode the attribute sets
*/
private void encodeAttributes(Set attributes, PrintStream out, Indenter indenter) {
indenter.in();
Iterator it = attributes.iterator();
while (it.hasNext()) {
Attribute attr = (Attribute) (it.next());
out.println(attr.encode());
}
indenter.out();
}
}
| |
package cd4017be.indlog.util;
import cd4017be.lib.Gui.ITankContainer;
import cd4017be.lib.capability.AbstractInventory;
import cd4017be.lib.util.ItemFluidUtil.StackedFluidAccess;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.capability.FluidTankProperties;
import net.minecraftforge.fluids.capability.IFluidHandler;
import net.minecraftforge.fluids.capability.IFluidTankProperties;
/**
* IFluidHandler implementation for a single tank with integrated FluidContainer fill/drain mechanism.
* @author cd4017be
*/
public class AdvancedTank extends AbstractInventory implements IFluidHandler, ITankContainer {
/**owner */
public final TileEntity tile;
/**stored fluid */
public FluidStack fluid;
/**internal fluid container slot */
public ItemStack cont;
/**whether the fluid type is (permanently) hard-fixed */
public final boolean fixed;
/**true = fill, false = drain fluid containers */
public boolean output;
/**whether the fluid type is currently locked */
public boolean lock;
/**[mB] capacity of the tank */
public int cap;
/**fill state required to transpose fluid container */
private int need;
/**
* creates a tank that allows any fluid type
* @param cap capacity
* @param out whether this is considered as output tank
*/
public AdvancedTank(TileEntity tile, int cap, boolean out) {
this(tile, cap, out, null);
}
/**
* creates a tank that is permanently locked to the given fluid type
* @param cap capacity
* @param out whether this is considered as output tank
* @param type fluid type to lock to
*/
public AdvancedTank(TileEntity tile, int cap, boolean out, Fluid type) {
this.tile = tile;
this.cap = cap;
this.output = out;
this.need = out ? Integer.MAX_VALUE : Integer.MIN_VALUE;
this.fixed = type != null;
this.lock = fixed;
this.fluid = fixed ? new FluidStack(type, 0) : null;
this.cont = ItemStack.EMPTY;
}
/**
* for changing the lock state during operation
* @param lock the new fluid type lock state
*/
public void setLock(boolean lock) {
if (fixed) return;
if (lock) this.lock = fluid != null;
else {
this.lock = false;
if (fluid != null && fluid.amount == 0) fluid = null;
}
}
/**
* for changing the I/O direction during operation
* @param out the new I/O direction for fluid containers
*/
public void setOut(boolean out) {
if (out != output) {
output = out;
if (out) fillContainer();
else drainContainer();
}
}
@Override
public void setStackInSlot(int slot, ItemStack stack) {
cont = stack;
if (!tile.hasWorld() || tile.getWorld().isRemote) return;
if (output) fillContainer();
else drainContainer();
tile.markDirty();
}
@Override
public int getSlots() {
return 1;
}
@Override
public ItemStack getStackInSlot(int slot) {
return cont;
}
@Override
public int getTanks() {
return 1;
}
@Override
public FluidStack getTank(int i) {
return fluid;
}
@Override
public int getCapacity(int i) {
return cap;
}
@Override
public void setTank(int i, FluidStack fluid) {
this.fluid = fluid;
}
@Override
public IFluidTankProperties[] getTankProperties() {
return new IFluidTankProperties[] {new FluidTankProperties(fluid, cap)};
}
@Override
public int fill(FluidStack res, boolean doFill) {
if (fluid == null) {
int m = Math.min(res.amount, cap);
if (doFill) {
fluid = new FluidStack(res, m);
tile.markDirty();
}
return m;
} else if (fluid.isFluidEqual(res)) {
int m = Math.min(res.amount, cap - fluid.amount);
if (m != 0 && doFill) increment(m);
return m;
} else return 0;
}
@Override
public FluidStack drain(FluidStack res, boolean doDrain) {
if (fluid == null || fluid.amount <= 0 || !fluid.isFluidEqual(res)) return null;
int m = Math.min(res.amount, fluid.amount);
FluidStack ret = new FluidStack(fluid, m);
if (doDrain) decrement(m);
return ret;
}
@Override
public FluidStack drain(int m, boolean doDrain) {
if (fluid == null || fluid.amount <= 0) return null;
if (fluid.amount < m) m = fluid.amount;
FluidStack ret = new FluidStack(fluid, m);
if (doDrain) decrement(m);
return ret;
}
/**
* @return [mB] stored fluid amount
*/
public int amount() {
return fluid == null ? 0 : fluid.amount;
}
/**
* @return [mB] remaining free capacity
*/
public int free() {
return fluid == null ? cap : cap - fluid.amount;
}
/**
* WARNING: contained fluid must not be null!
* @param n [mB] amount to increment the contained fluid by
*/
public void increment(int n) {
n = fluid.amount += n;
if (output && n >= need) fillContainer();
tile.markDirty();
}
/**
* WARNING: contained fluid must not be null!
* @param n [mB] amount to decrement the contained fluid by
*/
public void decrement(int n) {
n = fluid.amount -= n;
if (n <= 0 && !lock) fluid = null;
if (!output && n <= need) drainContainer();
tile.markDirty();
}
/**
* Fills the currently held fluid container from the tank.<br>
* This operation is automatically performed by the tank on changes.
*/
public void fillContainer() {
need = Integer.MAX_VALUE;
if (cont.getCount() == 0) return;
if (fluid == null) {
need = 0;
return;
}
StackedFluidAccess acc = new StackedFluidAccess(cont);
if (!acc.valid()) return;
int n = fluid.amount -= acc.fill(fluid, true);
int m = acc.fill(new FluidStack(fluid, cap), false);
if (m > 0) need = m;
if (n <= 0 && !lock) fluid = null;
cont = acc.result();
}
/**
* Drains the currently held fluid container into the tank.<br>
* This operation is automatically performed by the tank on changes.
*/
public void drainContainer() {
need = Integer.MIN_VALUE;
if (cont.getCount() == 0) return;
StackedFluidAccess acc = new StackedFluidAccess(cont);
if (!acc.valid()) return;
if (fluid == null) {
fluid = acc.drain(cap, true);
if (fluid == null) return;
} else {
FluidStack res = acc.drain(new FluidStack(fluid, cap - fluid.amount), true);
if (res != null) fluid.amount += res.amount;
}
FluidStack res = acc.drain(new FluidStack(fluid, cap), false);
if (res != null) need = cap - res.amount;
cont = acc.result();
}
/**
* @return whether the tank currently tries to fill/drain a fluid container
*/
public boolean transposing() {
return output ? need <= cap : need >= 0;
}
public void readNBT(NBTTagCompound nbt) {
if (fixed) fluid.amount = nbt.getInteger("Amount");
else {
fluid = FluidStack.loadFluidStackFromNBT(nbt);
lock = fluid != null && nbt.getBoolean("lock");
}
cont = new ItemStack(nbt);
if (cont.getCount() > 0) need = output ? 0 : cap;
}
public NBTTagCompound writeNBT(NBTTagCompound nbt) {
if (fluid != null) fluid.writeToNBT(nbt);
else nbt.removeTag("FluidName");
if (!cont.isEmpty()) cont.writeToNBT(nbt);
else nbt.removeTag("id");
nbt.setBoolean("lock", lock);
return nbt;
}
public int getComparatorValue() {
return fluid == null || fluid.amount <= 0 ? 0 : (int)((long)fluid.amount * 14 / cap) + 1;
}
}
| |
package com.rallydev.rest;
import com.google.gson.JsonArray;
import com.rallydev.rest.client.ApiKeyClient;
import com.rallydev.rest.client.BasicAuthClient;
import com.rallydev.rest.client.HttpClient;
import com.rallydev.rest.request.CreateRequest;
import com.rallydev.rest.request.DeleteRequest;
import com.rallydev.rest.request.GetRequest;
import com.rallydev.rest.request.QueryRequest;
import com.rallydev.rest.request.UpdateRequest;
import com.rallydev.rest.response.CreateResponse;
import com.rallydev.rest.response.DeleteResponse;
import com.rallydev.rest.response.GetResponse;
import com.rallydev.rest.response.QueryResponse;
import com.rallydev.rest.response.UpdateResponse;
import java.io.Closeable;
import java.io.IOException;
import java.net.URI;
/**
* <p>The main interface to the Rest API.</p>
* <p>Provides CRUD and query operations.</p>
*/
public class RallyRestApi implements Closeable {
protected HttpClient client;
/**
* Creates a new instance for the specified server using the specified credentials.
*
* @param server The server to connect to, e.g. {@code new URI("https://rally1.rallydev.com")}
* @param userName The username to be used for authentication.
* @param password The password to be used for authentication.
* @deprecated Use the api key constructor instead.
*/
public RallyRestApi(URI server, String userName, String password) {
this(new BasicAuthClient(server, userName, password));
}
/**
* Creates a new instance for the specified server using the specified API Key.
*
* @param server The server to connect to, e.g. {@code new URI("https://rally1.rallydev.com")}
* @param apiKey The API Key to be used for authentication.
*/
public RallyRestApi(URI server, String apiKey) {
this(new ApiKeyClient(server, apiKey));
}
protected RallyRestApi(HttpClient httpClient) {
this.client = httpClient;
}
/**
* Set the unauthenticated proxy server to use. By default no proxy is configured.
*
* @param proxy The proxy server, e.g. {@code new URI("http://my.proxy.com:8000")}
*/
public void setProxy(URI proxy) {
client.setProxy(proxy);
}
/**
* Set the authenticated proxy server to use. By default no proxy is configured.
*
* @param proxy The proxy server, e.g. {@code new URI("http://my.proxy.com:8000")}
* @param userName The username to be used for authentication.
* @param password The password to be used for authentication.
*/
public void setProxy(URI proxy, String userName, String password) {
client.setProxy(proxy, userName, password);
}
/**
* Set the value of the X-RallyIntegrationVendor header included on all requests.
* This should be set to your company name.
*
* @param value The vendor header to be included on all requests.
*/
public void setApplicationVendor(String value) {
client.setApplicationVendor(value);
}
/**
* Set the value of the X-RallyIntegrationVersion header included on all requests.
* This should be set to the version of your application.
*
* @param value The vendor header to be included on all requests.
*/
public void setApplicationVersion(String value) {
client.setApplicationVersion(value);
}
/**
* Set the value of the X-RallyIntegrationName header included on all requests.
* This should be set to the name of your application.
*
* @param value The vendor header to be included on all requests.
*/
public void setApplicationName(String value) {
client.setApplicationName(value);
}
/**
* Get the current version of the WSAPI being targeted.
* Defaults to v2.0
*
* @return the current WSAPI version.
*/
public String getWsapiVersion() {
return client.getWsapiVersion();
}
/**
* Set the current version of the WSAPI being targeted.
*
* @param wsapiVersion the new version, e.g. {@code "1.30"}
*/
public void setWsapiVersion(String wsapiVersion) {
client.setWsapiVersion(wsapiVersion);
}
/**
* Create the specified object.
*
* @param request the {@link CreateRequest} specifying the object to be created.
* @return the resulting {@link CreateResponse}
* @throws IOException if an error occurs during the creation.
*/
public CreateResponse create(CreateRequest request) throws IOException {
return new CreateResponse(client.doPost(request.toUrl(), request.getBody()));
}
/**
* Update the specified object.
*
* @param request the {@link UpdateRequest} specifying the object to be updated.
* @return the resulting {@link UpdateResponse}
* @throws IOException if an error occurs during the update.
*/
public UpdateResponse update(UpdateRequest request) throws IOException {
return new UpdateResponse(client.doPost(request.toUrl(), request.getBody()));
}
/**
* Delete the specified object.
*
* @param request the {@link DeleteRequest} specifying the object to be deleted.
* @return the resulting {@link DeleteResponse}
* @throws IOException if an error occurs during the deletion.
*/
public DeleteResponse delete(DeleteRequest request) throws IOException {
return new DeleteResponse(client.doDelete(request.toUrl()));
}
/**
* Query for objects matching the specified request.
* By default one page of data will be returned.
* Paging will automatically be performed if a limit is set on the request.
*
* @param request the {@link QueryRequest} specifying the object to be created.
* @return the resulting {@link QueryResponse}
* @throws IOException if an error occurs during the query.
*/
public QueryResponse query(QueryRequest request) throws IOException {
QueryResponse queryResponse = new QueryResponse(client.doGet(request.toUrl()));
if (queryResponse.wasSuccessful()) {
int receivedRecords = request.getPageSize();
while (receivedRecords < request.getLimit() &&
(receivedRecords + request.getStart() - 1) < queryResponse.getTotalResultCount()) {
QueryRequest pageRequest = request.clone();
pageRequest.setStart(receivedRecords + request.getStart());
QueryResponse pageResponse = new QueryResponse(client.doGet(pageRequest.toUrl()));
if (pageResponse.wasSuccessful()) {
JsonArray results = queryResponse.getResults();
results.addAll(pageResponse.getResults());
receivedRecords += pageRequest.getPageSize();
}
}
}
return queryResponse;
}
/**
* Get the specified object.
*
* @param request the {@link GetRequest} specifying the object to be retrieved.
* @return the resulting {@link GetResponse}
* @throws IOException if an error occurs during the retrieval.
*/
public GetResponse get(GetRequest request) throws IOException {
return new GetResponse(client.doGet(request.toUrl()));
}
/**
* Release all resources associated with this instance.
*
* @throws IOException if an error occurs releasing resources
*/
public void close() throws IOException {
client.close();
}
/**
* Get the underlying http client implementation.
* This is exposed with the intent of providing the ability to supply additional configuration to the client.
* It should not be used to directly make i/o calls.
*
* @return the raw http client
*/
public HttpClient getClient() {
return client;
}
}
| |
/**
* Copyright 2007-2008 University Of Southern California
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.isi.pegasus.planner.namespace;
import edu.isi.pegasus.planner.catalog.classes.Profiles;
import edu.isi.pegasus.planner.classes.Profile;
import edu.isi.pegasus.planner.common.PegasusProperties;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
/**
* An empty mechanical implementation for the namespace. At present we do not know what the meaning
* is. The meaning is is determined at the point of writing the submit files.
*
* @author Karan Vahi
* @author Gaurang Mehta
* @version $Revision$
*/
public class Hints extends Namespace {
/** The name of the namespace that this class implements. */
public static final String NAMESPACE_NAME = Profile.HINTS;
/** The jobmanager universe key. */
public static final String GRID_JOB_TYPE_KEY = Selector.GRID_JOB_TYPE_KEY;
/** The deprecated execution site key */
public static final String DEPRECATED_EXECUTION_SITE_KEY = "executionPool";
/** The execution pool key */
public static final String EXECUTION_SITE_KEY = Selector.EXECUTION_SITE_KEY;
/** The pfnHint key */
public static final String DEPRECATED_PFN_HINT_KEY = "pfnHint";
/** The pfnHint key */
public static final String PFN_HINT_KEY = Selector.PFN_HINT_KEY;
/** The table containing the mapping of the deprecated keys to the newer keys. */
protected static Map mDeprecatedTable = null;
/**
* The name of the implementing namespace. It should be one of the valid namespaces always.
*
* @see Namespace#isNamespaceValid(String)
*/
protected String mNamespace;
/**
* The default constructor. Note that the map is not allocated memory at this stage. It is done
* so in the overloaded construct function.
*/
public Hints() {
mProfileMap = null;
mNamespace = NAMESPACE_NAME;
}
/**
* The overloaded constructor.
*
* @param mp the map containing the profiles to be prepopulated with.
*/
public Hints(Map mp) {
mProfileMap = new TreeMap(mp);
mNamespace = NAMESPACE_NAME;
}
/**
* Returns the name of the namespace associated with the profile implementations.
*
* @return the namespace name.
* @see #NAMESPACE_NAME
*/
public String namespaceName() {
return mNamespace;
}
/**
* Provides an iterator to traverse the profiles by their keys.
*
* @return an iterator over the keys to walk the profile list.
*/
public Iterator getProfileKeyIterator() {
return (this.mProfileMap == null)
? new EmptyIterator()
: this.mProfileMap.keySet().iterator();
}
/**
* Constructs a new element of the format (key=value). It first checks if the map has been
* initialised or not. If not then allocates memory first.
*
* @param key is the left-hand-side.
* @param value is the right hand side.
*/
public void construct(String key, String value) {
if (mProfileMap == null) mProfileMap = new TreeMap();
mProfileMap.put(key, value);
}
/**
* Returns true if the namespace contains a mapping for the specified key. More formally,
* returns true if and only if this map contains at a mapping for a key k such that (key==null ?
* k==null : key.equals(k)). (There can be at most one such mapping.) It also returns false if
* the map does not exist.
*
* @param key The key that you want to search for in the namespace.
* @return boolean
*/
public boolean containsKey(Object key) {
return (mProfileMap == null) ? false : mProfileMap.containsKey(key);
}
/**
* This checks whether the key passed by the user is valid in the current namespace or not. At
* present, for this namespace only a limited number of keys have been assigned semantics.
*
* @param key (left hand side)
* @param value (right hand side)
* @return Namespace.VALID_KEY
* @return Namespace.NOT_PERMITTED_KEY
*/
public int checkKey(String key, String value) {
// sanity checks first
int res = 0;
if (key == null || key.length() < 2 || value == null || value.length() < 2) {
res = MALFORMED_KEY;
}
switch (key.charAt(0)) {
case 'e':
if (key.compareTo(Hints.EXECUTION_SITE_KEY) == 0) {
res = VALID_KEY;
} else if (key.compareTo(Hints.DEPRECATED_EXECUTION_SITE_KEY) == 0) {
res = DEPRECATED_KEY;
} else {
res = NOT_PERMITTED_KEY;
}
break;
case 'g':
if (key.compareTo(Hints.GRID_JOB_TYPE_KEY) == 0) {
res = VALID_KEY;
} else {
res = NOT_PERMITTED_KEY;
}
break;
case 'p':
if (key.compareTo(Hints.PFN_HINT_KEY) == 0 /*||
key.compareTo("pfnUniverse") == 0*/) {
res = VALID_KEY;
} else if (key.compareTo(Hints.DEPRECATED_PFN_HINT_KEY) == 0) {
res = DEPRECATED_KEY;
} else {
res = NOT_PERMITTED_KEY;
}
break;
default:
res = NOT_PERMITTED_KEY;
}
return res;
}
/**
* It puts in the namespace specific information specified in the properties file into the
* namespace. The name of the pool is also passed, as many of the properties specified in the
* properties file are on a per pool basis. An empty implementation for the timebeing.
*
* @param properties the <code>PegasusProperties</code> object containing all the properties
* that the user specified at various places (like .chimerarc, properties file, command
* line).
* @param pool the pool name where the job is scheduled to run.
*/
public void checkKeyInNS(PegasusProperties properties, String pool) {
// retrieve the relevant profiles from properties
// and merge them into the existing.
this.assimilate(properties, Profiles.NAMESPACES.hints);
}
/**
* Singleton access to the deprecated table that holds the deprecated keys, and the keys that
* replace them.
*
* @return Map
*/
public java.util.Map deprecatedTable() {
if (mDeprecatedTable == null) {
// only initialize once and only once, as needed.
mDeprecatedTable = new java.util.HashMap();
mDeprecatedTable.put(DEPRECATED_EXECUTION_SITE_KEY, EXECUTION_SITE_KEY);
mDeprecatedTable.put(DEPRECATED_PFN_HINT_KEY, PFN_HINT_KEY);
}
return mDeprecatedTable;
}
/**
* Merge the profiles in the namespace in a controlled manner. In case of intersection, the new
* profile value overrides, the existing profile value.
*
* @param profiles the <code>Namespace</code> object containing the profiles.
*/
public void merge(Namespace profiles) {
// check if we are merging profiles of same type
if (!(profiles instanceof Hints)) {
// throw an error
throw new IllegalArgumentException("Profiles mismatch while merging");
}
String key;
for (Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ) {
// construct directly. bypassing the checks!
key = (String) it.next();
this.construct(key, (String) profiles.get(key));
}
}
/**
* Converts the contents of the map into the string that can be put in the Condor file for
* printing.
*
* @return String
*/
public String toCondor() {
StringBuffer st = new StringBuffer();
String key = null;
String value = null;
if (mProfileMap == null) return "";
for (Iterator it = mProfileMap.keySet().iterator(); it.hasNext(); ) {
key = (String) it.next();
value = (String) mProfileMap.get(key);
st.append(key).append(" = ").append(value).append("\n");
}
return st.toString();
}
/**
* Returns a copy of the current namespace object.
*
* @return the Cloned object
*/
public Object clone() {
return (mProfileMap == null) ? new Hints() : new Hints(this.mProfileMap);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.taobao.weex.bridge;
import android.content.Context;
import android.os.Handler;
import android.os.Handler.Callback;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.taobao.weex.WXEnvironment;
import com.taobao.weex.WXRenderErrorCode;
import com.taobao.weex.WXSDKInstance;
import com.taobao.weex.WXSDKManager;
import com.taobao.weex.adapter.IWXJSExceptionAdapter;
import com.taobao.weex.adapter.IWXUserTrackAdapter;
import com.taobao.weex.common.IWXBridge;
import com.taobao.weex.common.IWXDebugProxy;
import com.taobao.weex.common.WXConfig;
import com.taobao.weex.common.WXErrorCode;
import com.taobao.weex.common.WXException;
import com.taobao.weex.common.WXJSBridgeMsgType;
import com.taobao.weex.common.WXJSExceptionInfo;
import com.taobao.weex.common.WXPerformance;
import com.taobao.weex.common.WXRefreshData;
import com.taobao.weex.common.WXRuntimeException;
import com.taobao.weex.common.WXThread;
import com.taobao.weex.dom.DOMAction;
import com.taobao.weex.dom.WXDomModule;
import com.taobao.weex.dom.action.Action;
import com.taobao.weex.dom.action.Actions;
import com.taobao.weex.utils.WXFileUtils;
import com.taobao.weex.utils.WXJsonUtils;
import com.taobao.weex.utils.WXLogUtils;
import com.taobao.weex.utils.WXUtils;
import com.taobao.weex.utils.WXViewUtils;
import com.taobao.weex.utils.batch.BactchExecutor;
import com.taobao.weex.utils.batch.Interceptor;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import static com.taobao.weex.bridge.WXModuleManager.getDomModule;
/**
* Manager class for communication between JavaScript and Android.
* <ol>
* <li>
* Handle Android to JavaScript call, can be one of the following
* <ul>
* <li>{@link #createInstance(String, String, Map, String)}</li>
* <li>{@link #destroyInstance(String)}</li>
* <li>{@link #refreshInstance(String, WXRefreshData)}</li>
* <li>{@link #registerModules(Map)}</li>
* <li>{@link #registerComponents(List)}</li>
* <li>{@link #invokeCallJSBatch(Message)}</li>
* </ul>
* </li>
* <li>
* Handle JavaScript to Android call
* </li>
* <li>
* Handle next tick of message.
* </li>
* </ol>
*/
public class WXBridgeManager implements Callback,BactchExecutor {
public static final String METHOD_CREATE_INSTANCE = "createInstance";
public static final String METHOD_DESTROY_INSTANCE = "destroyInstance";
public static final String METHOD_CALL_JS = "callJS";
public static final String METHOD_SET_TIMEOUT = "setTimeoutCallback";
public static final String METHOD_REGISTER_MODULES = "registerModules";
public static final String METHOD_REGISTER_COMPONENTS = "registerComponents";
public static final String METHOD_FIRE_EVENT = "fireEvent";
public static final String METHOD_CALLBACK = "callback";
public static final String METHOD_REFRESH_INSTANCE = "refreshInstance";
public static final String METHOD_NOTIFY_TRIM_MEMORY = "notifyTrimMemory";
public static final String METHOD_NOTIFY_SERIALIZE_CODE_CACHE =
"notifySerializeCodeCache";
public static final String KEY_METHOD = "method";
public static final String KEY_ARGS = "args";
// args
public static final String COMPONENT = "component";
public static final String REF = "ref";
public static final String MODULE = "module";
public static final String METHOD = "method";
public static final String ARGS = "args";
private static final String NON_CALLBACK = "-1";
private static final String UNDEFINED = "undefined";
private static final int INIT_FRAMEWORK_OK = 1;
private static long LOW_MEM_VALUE = 120;
static volatile WXBridgeManager mBridgeManager;
/**
* next tick tasks, can set priority
*/
private WXHashMap<String, ArrayList<WXHashMap<String, Object>>> mNextTickTasks = new WXHashMap<>();
/**
* JSThread
*/
private WXThread mJSThread;
/** package **/ Handler mJSHandler;
private IWXBridge mWXBridge;
private IWXDebugProxy mWxDebugProxy;
private boolean mMock = false;
/**
* Whether JS Framework(main.js) has been initialized.
*/
private boolean mInit =false;
private boolean isJSFrameworkInit(){
return mInit;
}
private List<Map<String, Object>> mRegisterComponentFailList = new ArrayList<>(8);
private List<Map<String, Object>> mRegisterModuleFailList = new ArrayList<>(8);
private List<String> mRegisterServiceFailList = new ArrayList<>(8);
private List<String> mDestroyedInstanceId = new ArrayList<>();
private StringBuilder mLodBuilder = new StringBuilder(50);
private Interceptor mInterceptor;
private WXBridgeManager() {
initWXBridge(WXEnvironment.sRemoteDebugMode);
mJSThread = new WXThread("WeexJSBridgeThread", this);
mJSHandler = mJSThread.getHandler();
}
public static WXBridgeManager getInstance() {
if (mBridgeManager == null) {
synchronized (WXBridgeManager.class) {
if (mBridgeManager == null) {
mBridgeManager = new WXBridgeManager();
}
}
}
return mBridgeManager;
}
private void initWXBridge(boolean remoteDebug) {
if (remoteDebug && WXEnvironment.isApkDebugable()) {
WXEnvironment.sDebugServerConnectable = true;
}
if (mWxDebugProxy != null) {
mWxDebugProxy.stop(false);
}
if (WXEnvironment.sDebugServerConnectable && WXEnvironment.isApkDebugable()) {
if (WXEnvironment.getApplication() != null) {
try {
Class clazz = Class.forName("com.taobao.weex.devtools.debug.DebugServerProxy");
if (clazz != null) {
Constructor constructor = clazz.getConstructor(Context.class, WXBridgeManager.class);
if (constructor != null) {
mWxDebugProxy = (IWXDebugProxy) constructor.newInstance(
WXEnvironment.getApplication(), WXBridgeManager.this);
if (mWxDebugProxy != null) {
mWxDebugProxy.start();
}
}
}
} catch (Throwable e) {
//Ignore, It will throw Exception on Release environment
}
WXServiceManager.execAllCacheJsService();
} else {
WXLogUtils.e("WXBridgeManager", "WXEnvironment.sApplication is null, skip init Inspector");
WXLogUtils.w("WXBridgeManager", new Throwable("WXEnvironment.sApplication is null when init Inspector"));
}
}
if (remoteDebug && mWxDebugProxy != null) {
mWXBridge = mWxDebugProxy.getWXBridge();
} else {
mWXBridge = new WXBridge();
}
}
public void stopRemoteDebug() {
if (mWxDebugProxy != null) {
mWxDebugProxy.stop(true);
}
}
public Object callModuleMethod(String instanceId, String moduleStr, String methodStr, JSONArray args) {
WXSDKInstance wxsdkInstance = WXSDKManager.getInstance()
.getSDKInstance(instanceId);
if (wxsdkInstance == null) {
return null;
}
if (wxsdkInstance.isNeedValidate()
&& WXSDKManager.getInstance().getValidateProcessor() != null) {
WXValidateProcessor.WXModuleValidateResult validateResult = WXSDKManager
.getInstance().getValidateProcessor()
.onModuleValidate(wxsdkInstance, moduleStr, methodStr, args);
if (validateResult == null) {
return null;
}
if (validateResult.isSuccess) {
return WXModuleManager.callModuleMethod(instanceId, moduleStr, methodStr,
args);
} else {
JSONObject validateInfo = validateResult.validateInfo;
WXLogUtils.e("[WXBridgeManager] module validate fail. >>> " + validateInfo.toJSONString());
return validateInfo;
}
}
return WXModuleManager.callModuleMethod(instanceId, moduleStr, methodStr, args);
}
/**
* Model switch. For now, debug model and release model are supported
*/
public void restart() {
mInit = false;
initWXBridge(WXEnvironment.sRemoteDebugMode);
}
/**
* Set current Instance
* @param instanceId {@link WXSDKInstance#mInstanceId}
*/
public synchronized void setStackTopInstance(final String instanceId) {
post(new Runnable() {
@Override
public void run() {
mNextTickTasks.setStackTopInstance(instanceId);
}
}, instanceId);
}
@Override
public void post(Runnable r){
if(mInterceptor != null && mInterceptor.take(r)){
//task is token by the interceptor
return;
}
if (mJSHandler == null){
return;
}
mJSHandler.post(WXThread.secure(r));
}
@Override
public void setInterceptor(Interceptor interceptor) {
mInterceptor = interceptor;
}
public void post(Runnable r, Object token) {
if (mJSHandler == null) {
return;
}
Message m = Message.obtain(mJSHandler, WXThread.secure(r));
m.obj = token;
m.sendToTarget();
}
void setTimeout(String callbackId, String time) {
Message message = Message.obtain();
message.what = WXJSBridgeMsgType.SET_TIMEOUT;
TimerInfo timerInfo = new TimerInfo();
timerInfo.callbackId = callbackId;
timerInfo.time = (long) Float.parseFloat(time);
message.obj = timerInfo;
mJSHandler.sendMessageDelayed(message, timerInfo.time);
}
public void sendMessageDelayed(Message message, long delayMillis){
if (message == null || mJSHandler == null || mJSThread == null
|| !mJSThread.isWXThreadAlive() || mJSThread.getLooper() == null) {
return;
}
mJSHandler.sendMessageDelayed(message,delayMillis);
}
public void removeMessage(int what,Object obj){
if (mJSHandler == null || mJSThread == null
|| !mJSThread.isWXThreadAlive() || mJSThread.getLooper() == null) {
return;
}
mJSHandler.removeMessages(what, obj);
}
public Object callNativeModule(String instanceId, String module, String method, JSONArray arguments, Object options) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callNativeModule >>>> instanceId:").append(instanceId)
.append(", module:").append(module).append(", method:").append(method).append(", arguments:").append(arguments);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
try {
if(WXDomModule.WXDOM.equals(module)){
WXDomModule dom = getDomModule(instanceId);
return dom.callDomMethod(method,arguments);
}else {
return callModuleMethod(instanceId, module,
method, arguments);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callNative exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE, "[WXBridgeManager] callNativeModule exception " + e.getCause());
}
return null;
}
public Object callNativeComponent(String instanceId, String componentRef, String method, JSONArray arguments, Object options) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callNativeComponent >>>> instanceId:").append(instanceId)
.append(", componentRef:").append(componentRef).append(", method:").append(method).append(", arguments:").append(arguments);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
try {
WXDomModule dom = getDomModule(instanceId);
dom.invokeMethod(componentRef, method, arguments);
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callNative exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE, "[WXBridgeManager] callNativeModule exception " + e.getCause());
}
return null;
}
/**
* Dispatch the native task to be executed.
*
* @param instanceId {@link WXSDKInstance#mInstanceId}
* @param tasks tasks to be executed
* @param callback next tick id
*/
public int callNative(String instanceId, String tasks, String callback) {
if (TextUtils.isEmpty(tasks)) {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.e("[WXBridgeManager] callNative: call Native tasks is null");
}
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE,"[WXBridgeManager] callNative: call Native tasks is null");
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callNative >>>> instanceId:").append(instanceId)
.append(", tasks:").append(tasks).append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId!=null &&mDestroyedInstanceId.contains(instanceId)){
return IWXBridge.DESTROY_INSTANCE;
}
long start = System.currentTimeMillis();
JSONArray array = JSON.parseArray(tasks);
if(WXSDKManager.getInstance().getSDKInstance(instanceId)!=null) {
WXSDKManager.getInstance().getSDKInstance(instanceId).jsonParseTime(System.currentTimeMillis() - start);
}
int size = array.size();
if (size > 0) {
try {
JSONObject task;
for (int i = 0; i < size; ++i) {
task = (JSONObject) array.get(i);
if (task != null && WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
Object target = task.get(MODULE);
if(target != null){
if(WXDomModule.WXDOM.equals(target)){
WXDomModule dom = getDomModule(instanceId);
dom.callDomMethod(task);
}else {
callModuleMethod(instanceId, (String) target,
(String) task.get(METHOD), (JSONArray) task.get(ARGS));
}
}else if(task.get(COMPONENT) != null){
//call component
WXDomModule dom = getDomModule(instanceId);
dom.invokeMethod((String) task.get(REF),(String) task.get(METHOD),(JSONArray) task.get(ARGS));
}else{
throw new IllegalArgumentException("unknown callNative");
}
}
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callNative exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE,"[WXBridgeManager] callNative exception "+e.getCause());
}
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callCreateBody
public int callCreateBody(String instanceId, String tasks, String callback) {
if (TextUtils.isEmpty(tasks)) {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.e("[WXBridgeManager] callCreateBody: call CreateBody tasks is null");
}
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_CREATEBODY,"[WXBridgeManager] callCreateBody: call CreateBody tasks is null");
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callCreateBody >>>> instanceId:").append(instanceId)
.append(", tasks:").append(tasks).append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)){
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
JSONObject domObject = JSON.parseObject(tasks);
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getCreateBody(domObject);
domModule.postAction((DOMAction)action, true);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callCreateBody exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_CREATEBODY,"[WXBridgeManager] callCreateBody exception "+e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callUpdateFinish
public int callUpdateFinish(String instanceId, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callUpdateFinish >>>> instanceId:").append(instanceId)
.append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)){
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getUpdateFinish();
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callUpdateFinish exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE,"[WXBridgeManager] callUpdateFinish exception "+e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callCreateFinish
public int callCreateFinish(String instanceId, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callCreateFinish >>>> instanceId:").append(instanceId)
.append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getCreateFinish();
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callCreateFinish exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERROR_DOM_CREATEFINISH,"[WXBridgeManager] callCreateFinish exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callRefreshFinish
public int callRefreshFinish(String instanceId, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callRefreshFinish >>>> instanceId:").append(instanceId)
.append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getRefreshFinish();
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callRefreshFinish exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERROR_DOM_REFRESHFINISH,"[WXBridgeManager] callRefreshFinish exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callUpdateAttrs
public int callUpdateAttrs(String instanceId, String ref, String task, String callback) {
if (TextUtils.isEmpty(task)) {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.e("[WXBridgeManager] callUpdateAttrs: call UpdateAttrs tasks is null");
}
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_UPDATEATTRS,"[WXBridgeManager] callUpdateAttrs: call UpdateAttrs tasks is null");
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callUpdateAttrs >>>> instanceId:").append(instanceId)
.append(", ref:").append(ref)
.append(", task:").append(task)
.append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
JSONObject domObject = JSON.parseObject(task);
Action action = Actions.getUpdateAttrs(ref, domObject);
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callUpdateAttrs exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_UPDATEATTRS,"[WXBridgeManager] callUpdateAttrs exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callUpdateStyle
public int callUpdateStyle(String instanceId, String ref, String task, String callback) {
if (TextUtils.isEmpty(task)) {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.e("[WXBridgeManager] callUpdateStyle: call UpdateStyle tasks is null");
}
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_UPDATESTYLE,"[WXBridgeManager] callUpdateStyle: call UpdateStyle tasks is null");
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callUpdateStyle >>>> instanceId:").append(instanceId)
.append(", ref:").append(ref)
.append(", task:").append(task)
.append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
JSONObject domObject = JSON.parseObject(task);
Action action = Actions.getUpdateStyle(ref, domObject, false);
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callUpdateStyle exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_UPDATESTYLE,"[WXBridgeManager] callUpdateStyle exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callUpdateStyle
public int callRemoveElement(String instanceId, String ref, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callRemoveElement >>>> instanceId:").append(instanceId)
.append(", ref:").append(ref);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getRemoveElement(ref);
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callRemoveElement exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_REMOVEELEMENT,"[WXBridgeManager] callRemoveElement exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
// callMoveElement
public int callMoveElement(String instanceId, String ref, String parentref, String index, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callMoveElement >>>> instanceId:").append(instanceId)
.append(", parentref:").append(parentref)
.append(", index:").append(index)
.append(", ref:").append(ref);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getMoveElement(ref, parentref, Integer.parseInt(index));
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callMoveElement exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_MOVEELEMENT,"[WXBridgeManager] callMoveElement exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
public int callAddEvent(String instanceId, String ref, String event, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callAddEvent >>>> instanceId:").append(instanceId)
.append(", ref:").append(ref)
.append(", event:").append(event);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge.DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getAddEvent(ref, event);
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callAddEvent exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_ADDEVENT,"[WXBridgeManager] callAddEvent exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
public int callRemoveEvent(String instanceId, String ref, String event, String callback) {
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callRemoveEvent >>>> instanceId:").append(instanceId)
.append(", ref:").append(ref)
.append(", event:").append(event);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId != null && mDestroyedInstanceId.contains(instanceId)) {
return IWXBridge .DESTROY_INSTANCE;
}
try {
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXDomModule domModule = getDomModule(instanceId);
Action action = Actions.getRemoveEvent(ref, event);
domModule.postAction((DOMAction)action, false);
}
} catch (Exception e) {
WXLogUtils.e("[WXBridgeManager] callRemoveEvent exception: ", e);
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_DOM_REMOVEEVENT,"[WXBridgeManager] callRemoveEvent exception " + e.getCause());
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
public int callAddElement(String instanceId, String ref,String dom,String index, String callback){
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("[WXBridgeManager] callNative::callAddElement >>>> instanceId:").append(instanceId)
.append(", ref:").append(ref).append(", dom:").append(dom).append(", callback:").append(callback);
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
if(mDestroyedInstanceId!=null && mDestroyedInstanceId.contains(instanceId)){
return IWXBridge.DESTROY_INSTANCE;
}
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
long start = System.currentTimeMillis();
JSONObject domObject = JSON.parseObject(dom);
if (WXSDKManager.getInstance().getSDKInstance(instanceId) != null) {
WXSDKManager.getInstance().getSDKInstance(instanceId).jsonParseTime(System.currentTimeMillis() - start);
}
WXDomModule domModule = getDomModule(instanceId);
domModule.postAction(Actions.getAddElement(domObject, ref,Integer.parseInt(index)),false);
}
if (UNDEFINED.equals(callback) || NON_CALLBACK.equals(callback)) {
return IWXBridge.INSTANCE_RENDERING_ERROR;
}
// get next tick
getNextTick(instanceId, callback);
return IWXBridge.INSTANCE_RENDERING;
}
private void getNextTick(final String instanceId, final String callback) {
addJSTask(METHOD_CALLBACK,instanceId, callback, "{}");
sendMessage(instanceId, WXJSBridgeMsgType.CALL_JS_BATCH);
}
private void addJSTask(final String method, final String instanceId, final Object... args) {
post(new Runnable() {
@Override
public void run() {
if (args == null || args.length == 0) {
return;
}
ArrayList<Object> argsList = new ArrayList<>();
for (Object arg : args) {
argsList.add(arg);
}
WXHashMap<String, Object> task = new WXHashMap<>();
task.put(KEY_METHOD, method);
task.put(KEY_ARGS, argsList);
if (mNextTickTasks.get(instanceId) == null) {
ArrayList<WXHashMap<String, Object>> list = new ArrayList<>();
list.add(task);
mNextTickTasks.put(instanceId, list);
} else {
mNextTickTasks.get(instanceId).add(task);
}
}
});
}
private void sendMessage(String instanceId, int what) {
Message msg = Message.obtain(mJSHandler);
msg.obj = instanceId;
msg.what = what;
msg.sendToTarget();
}
/**
* Initialize JavaScript framework
* @param framework String representation of the framework to be init.
*/
public synchronized void initScriptsFramework(String framework) {
Message msg = mJSHandler.obtainMessage();
msg.obj = framework;
msg.what = WXJSBridgeMsgType.INIT_FRAMEWORK;
msg.setTarget(mJSHandler);
msg.sendToTarget();
}
@Deprecated
public void fireEvent(final String instanceId, final String ref,
final String type, final Map<String, Object> data){
this.fireEvent(instanceId, ref, type, data, null);
}
/**
* Do not direct invoke this method in Components, use {@link WXSDKInstance#fireEvent(String, String, Map, Map)} instead.
* @param instanceId
* @param ref
* @param type
* @param data
* @param domChanges
*/
@Deprecated
public void fireEvent(final String instanceId, final String ref,
final String type, final Map<String, Object> data,final Map<String, Object> domChanges) {
fireEventOnNode(instanceId,ref,type,data,domChanges);
}
/**
* Notify the JavaScript about the event happened on Android
*/
public void fireEventOnNode(final String instanceId, final String ref,
final String type, final Map<String, Object> data,final Map<String, Object> domChanges) {
if (TextUtils.isEmpty(instanceId) || TextUtils.isEmpty(ref)
|| TextUtils.isEmpty(type) || mJSHandler == null) {
return;
}
if (!checkMainThread()) {
throw new WXRuntimeException(
"fireEvent must be called by main thread");
}
addJSTask(METHOD_FIRE_EVENT, instanceId, ref, type, data,domChanges);
sendMessage(instanceId, WXJSBridgeMsgType.CALL_JS_BATCH);
}
private boolean checkMainThread() {
return Looper.myLooper() == Looper.getMainLooper();
}
/**
* Invoke JavaScript callback. Use {@link JSCallback} instead.
* @see #callback(String, String, String)
*/
@Deprecated
public void callback(String instanceId, String callback,String data) {
callback(instanceId, callback,data,false);
}
/**
* Invoke JavaScript callback. Use {@link JSCallback} instead.
*/
@Deprecated
public void callback(final String instanceId, final String callback,
final Map<String, Object> data){
callback(instanceId,callback,data,false);
}
/**
* Use {@link JSCallback} instead.
* @param instanceId Weex Instance Id
* @param callback callback referenece handle
* @param data callback data
* @param keepAlive if keep callback instance alive for later use
*/
@Deprecated
public void callback(final String instanceId, final String callback,
final Object data,boolean keepAlive) {
callbackJavascript(instanceId,callback,data,keepAlive);
}
/**
* Callback to Javascript function.
* @param instanceId Weex Instance Id
* @param callback callback referenece handle
* @param data callback data
* @param keepAlive if keep callback instance alive for later use
*/
void callbackJavascript(final String instanceId, final String callback,
final Object data, boolean keepAlive) {
if (TextUtils.isEmpty(instanceId) || TextUtils.isEmpty(callback)
|| mJSHandler == null) {
return;
}
addJSTask(METHOD_CALLBACK, instanceId, callback, data,keepAlive);
sendMessage(instanceId, WXJSBridgeMsgType.CALL_JS_BATCH);
}
/**
* Refresh instance
*/
public void refreshInstance(final String instanceId, final WXRefreshData jsonData) {
if (TextUtils.isEmpty(instanceId) || jsonData == null) {
return;
}
mJSHandler.postDelayed(WXThread.secure(new Runnable() {
@Override
public void run() {
invokeRefreshInstance(instanceId, jsonData);
}
}), 0);
}
private void invokeRefreshInstance(String instanceId, WXRefreshData refreshData) {
try {
if (!isJSFrameworkInit()) {
WXSDKInstance instance = WXSDKManager.getInstance().getSDKInstance(instanceId);
if (instance != null) {
instance.onRenderError(WXRenderErrorCode.WX_CREATE_INSTANCE_ERROR,
"createInstance failed!");
}
String err = "[WXBridgeManager] invokeRefreshInstance: framework.js uninitialized.";
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE,err);
WXLogUtils.e(err);
return;
}
long start = System.currentTimeMillis();
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.d("refreshInstance >>>> instanceId:" + instanceId
+ ", data:" + refreshData.data + ", isDirty:" + refreshData.isDirty);
}
if (refreshData.isDirty) {
return;
}
WXJSObject instanceIdObj = new WXJSObject(WXJSObject.String,
instanceId);
WXJSObject dataObj = new WXJSObject(WXJSObject.JSON,
refreshData.data == null ? "{}" : refreshData.data);
WXJSObject[] args = {instanceIdObj, dataObj};
invokeExecJS(instanceId, null, METHOD_REFRESH_INSTANCE, args);
WXLogUtils.renderPerformanceLog("invokeRefreshInstance", System.currentTimeMillis() - start);
} catch (Throwable e) {
String err = "[WXBridgeManager] invokeRefreshInstance " + e.getCause();
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE,err);
WXLogUtils.e(err);
}
}
public void commitJSBridgeAlarmMonitor(String instanceId, WXErrorCode errCode, String errMsg) {
WXSDKInstance instance = WXSDKManager.getInstance().getSDKInstance(instanceId);
if (instance == null || errCode == null) {
return;
}
// TODO: We should move WXPerformance and IWXUserTrackAdapter
// into a adapter level.
// comment out the line below to prevent commiting twice.
//instance.commitUTStab(WXConst.JS_BRIDGE, errCode, errMsg);
IWXUserTrackAdapter adapter = WXSDKManager.getInstance().getIWXUserTrackAdapter();
if (adapter == null) {
return;
}
WXPerformance performance = new WXPerformance();
performance.args=instance.getBundleUrl();
performance.errCode=errCode.getErrorCode();
if (errCode != WXErrorCode.WX_SUCCESS) {
performance.appendErrMsg(TextUtils.isEmpty(errMsg)?errCode.getErrorMsg():errMsg);
WXLogUtils.e("wx_monitor",performance.toString());
}
adapter.commit(WXEnvironment.getApplication(), null, IWXUserTrackAdapter.JS_BRIDGE, performance, instance.getUserTrackParams());
}
public void commitJSFrameworkAlarmMonitor(final String type, final WXErrorCode errorCode, String errMsg) {
if (TextUtils.isEmpty(type) || errorCode == null) {
return;
}
if (WXSDKManager.getInstance().getWXStatisticsListener() != null) {
WXSDKManager.getInstance().getWXStatisticsListener().onException("0",
errorCode.getErrorCode(),
TextUtils.isEmpty(errMsg) ? errorCode.getErrorMsg() : errMsg);
}
final IWXUserTrackAdapter userTrackAdapter = WXSDKManager.getInstance().getIWXUserTrackAdapter();
if (userTrackAdapter == null) {
return;
}
WXPerformance performance = new WXPerformance();
performance.errCode = errorCode.getErrorCode();
if (errorCode != WXErrorCode.WX_SUCCESS) {
performance.appendErrMsg(TextUtils.isEmpty(errMsg)?errorCode.getErrorMsg():errMsg);
WXLogUtils.e("wx_monitor",performance.toString());
}
userTrackAdapter.commit(WXEnvironment.getApplication(), null, type, performance, null);
}
/**
* Create instance.
*/
public void createInstance(final String instanceId, final String template,
final Map<String, Object> options, final String data) {
final WXSDKInstance instance = WXSDKManager.getInstance().getSDKInstance(instanceId);
if(instance == null){
WXLogUtils.e("WXBridgeManager","createInstance failed, SDKInstance is not exist");
return;
}
if ( TextUtils.isEmpty(instanceId)
|| TextUtils.isEmpty(template) || mJSHandler == null) {
instance.onRenderError(WXRenderErrorCode.WX_CREATE_INSTANCE_ERROR, "createInstance fail!");
return;
}
WXModuleManager.createDomModule(instance);
post(new Runnable() {
@Override
public void run() {
long start = System.currentTimeMillis();
invokeCreateInstance(instance, template, options, data);
final long totalTime = System.currentTimeMillis() - start;
WXSDKManager.getInstance().postOnUiThread(new Runnable() {
@Override
public void run() {
instance.createInstanceFinished(totalTime);
}
}, 0);
}
}, instanceId);
}
private void invokeCreateInstance(@NonNull WXSDKInstance instance, String template,
Map<String, Object> options, String data) {
initFramework("");
if (mMock) {
mock(instance.getInstanceId());
} else {
if (!isJSFrameworkInit()) {
instance.onRenderError(WXRenderErrorCode.WX_CREATE_INSTANCE_ERROR, "createInstance "
+ "fail!");
String err = "[WXBridgeManager] invokeCreateInstance: framework.js uninitialized.";
commitJSBridgeAlarmMonitor(instance.getInstanceId(), WXErrorCode.WX_ERR_INVOKE_NATIVE,err);
WXLogUtils.e(err);
return;
}
try {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.d("createInstance >>>> instanceId:" + instance.getInstanceId()
+ ", options:"
+ WXJsonUtils.fromObjectToJSONString(options)
+ ", data:" + data);
}
WXJSObject instanceIdObj = new WXJSObject(WXJSObject.String,
instance.getInstanceId());
WXJSObject instanceObj = new WXJSObject(WXJSObject.String,
template);
WXJSObject optionsObj = new WXJSObject(WXJSObject.JSON,
options == null ? "{}"
: WXJsonUtils.fromObjectToJSONString(options));
WXJSObject dataObj = new WXJSObject(WXJSObject.JSON,
data == null ? "{}" : data);
WXJSObject[] args = {instanceIdObj, instanceObj, optionsObj,
dataObj};
invokeExecJS(instance.getInstanceId(), null, METHOD_CREATE_INSTANCE, args,false);
} catch (Throwable e) {
instance.onRenderError(WXRenderErrorCode.WX_CREATE_INSTANCE_ERROR,
"createInstance failed!");
String err = "[WXBridgeManager] invokeCreateInstance " + e.getCause();
commitJSBridgeAlarmMonitor(instance.getInstanceId(), WXErrorCode.WX_ERR_INVOKE_NATIVE,err);
WXLogUtils.e(err);
}
}
}
private void mock(String instanceId) {
}
public void destroyInstance(final String instanceId) {
if ( mJSHandler == null
|| TextUtils.isEmpty(instanceId)) {
return;
}
if(mDestroyedInstanceId!=null) {
mDestroyedInstanceId.add(instanceId);
}
// clear message with instanceId
mJSHandler.removeCallbacksAndMessages(instanceId);
post(new Runnable() {
@Override
public void run() {
removeTaskByInstance(instanceId);
invokeDestroyInstance(instanceId);
}
}, instanceId);
}
private void removeTaskByInstance(String instanceId) {
mNextTickTasks.removeFromMapAndStack(instanceId);
}
private void invokeDestroyInstance(String instanceId) {
try {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.d("destroyInstance >>>> instanceId:" + instanceId);
}
WXJSObject instanceIdObj = new WXJSObject(WXJSObject.String,
instanceId);
WXJSObject[] args = {instanceIdObj};
invokeExecJS(instanceId, null, METHOD_DESTROY_INSTANCE, args);
} catch (Throwable e) {
String err = "[WXBridgeManager] invokeDestroyInstance " + e.getCause();
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_INVOKE_NATIVE,err);
WXLogUtils.e(err);
}
}
@Override
public boolean handleMessage(Message msg) {
if (msg == null) {
return false;
}
int what = msg.what;
switch (what) {
case WXJSBridgeMsgType.INIT_FRAMEWORK:
invokeInitFramework(msg);
break;
case WXJSBridgeMsgType.CALL_JS_BATCH:
invokeCallJSBatch(msg);
break;
case WXJSBridgeMsgType.SET_TIMEOUT:
TimerInfo timerInfo = (TimerInfo) msg.obj;
if(timerInfo == null){
break;
}
WXJSObject obj = new WXJSObject(WXJSObject.String, timerInfo.callbackId);
WXJSObject[] args = {obj};
invokeExecJS("", null, METHOD_SET_TIMEOUT, args);
break;
case WXJSBridgeMsgType.TAKE_HEAP_SNAPSHOT:
if (msg.obj != null) {
String filename = (String) msg.obj;
mWXBridge.takeHeapSnapshot(filename);
}
break;
default:
break;
}
return false;
}
private void invokeExecJS(String instanceId, String namespace, String function, WXJSObject[] args) {
invokeExecJS(instanceId, namespace, function, args, true);
}
public void invokeExecJS(String instanceId, String namespace, String function,
WXJSObject[] args,boolean logTaskDetail){
if (WXEnvironment.isApkDebugable()) {
mLodBuilder.append("callJS >>>> instanceId:").append(instanceId)
.append("function:").append(function);
if(logTaskDetail)
mLodBuilder.append(" tasks:").append(WXJsonUtils.fromObjectToJSONString(args));
WXLogUtils.d(mLodBuilder.substring(0));
mLodBuilder.setLength(0);
}
mWXBridge.execJS(instanceId, namespace, function, args);
}
private void invokeInitFramework(Message msg) {
String framework = "";
if (msg.obj != null) {
framework = (String) msg.obj;
}
if(WXUtils.getAvailMemory(WXEnvironment.getApplication()) > LOW_MEM_VALUE) {
initFramework(framework);
}
}
private void initFramework(String framework){
if (!isJSFrameworkInit()) {
if (TextUtils.isEmpty(framework)) {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.d("weex JS framework from assets");
}
framework = WXFileUtils.loadAsset("main.js", WXEnvironment.getApplication());
}
if (TextUtils.isEmpty(framework)) {
mInit = false;
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK, WXErrorCode.WX_ERR_JS_FRAMEWORK, "JS Framework is empty!");
return;
}
try {
if (WXSDKManager.getInstance().getWXStatisticsListener() != null) {
WXSDKManager.getInstance().getWXStatisticsListener().onJsFrameworkStart();
}
long start = System.currentTimeMillis();
if(mWXBridge.initFramework(framework, assembleDefaultOptions())==INIT_FRAMEWORK_OK){
WXEnvironment.sJSLibInitTime = System.currentTimeMillis() - start;
WXLogUtils.renderPerformanceLog("initFramework", WXEnvironment.sJSLibInitTime);
WXEnvironment.sSDKInitTime = System.currentTimeMillis() - WXEnvironment.sSDKInitStart;
WXLogUtils.renderPerformanceLog("SDKInitTime", WXEnvironment.sSDKInitTime);
mInit = true;
if (WXSDKManager.getInstance().getWXStatisticsListener() != null) {
WXSDKManager.getInstance().getWXStatisticsListener().onJsFrameworkReady();
}
execRegisterFailTask();
WXEnvironment.JsFrameworkInit = true;
registerDomModule();
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK, WXErrorCode.WX_SUCCESS, "success");
}else{
WXLogUtils.e("[WXBridgeManager] invokeInitFramework ExecuteJavaScript fail");
String err="[WXBridgeManager] invokeInitFramework ExecuteJavaScript fail";
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK, WXErrorCode.WX_ERR_JS_FRAMEWORK, err);
}
} catch (Throwable e) {
WXLogUtils.e("[WXBridgeManager] invokeInitFramework ", e);
String err="[WXBridgeManager] invokeInitFramework exception!#"+e.toString();
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK, WXErrorCode.WX_ERR_JS_FRAMEWORK, err);
}
}
}
@SuppressWarnings("unchecked")
private void invokeCallJSBatch(Message message) {
if (mNextTickTasks.isEmpty() || !isJSFrameworkInit()) {
if (!isJSFrameworkInit()) {
WXLogUtils.e("[WXBridgeManager] invokeCallJSBatch: framework.js uninitialized!! message:"+message.toString());
}
return;
}
try {
Object instanceId = message.obj;
Object task = null;
Stack<String> instanceStack = mNextTickTasks.getInstanceStack();
int size = instanceStack.size();
for (int i = size - 1; i >= 0; i--) {
instanceId = instanceStack.get(i);
task = mNextTickTasks.remove(instanceId);
if (task != null && !((ArrayList) task).isEmpty()) {
break;
}
}
task = ((ArrayList) task).toArray();
WXJSObject[] args = {
new WXJSObject(WXJSObject.String, instanceId),
new WXJSObject(WXJSObject.JSON,
WXJsonUtils.fromObjectToJSONString(task))};
invokeExecJS(String.valueOf(instanceId), null, METHOD_CALL_JS, args);
} catch (Throwable e) {
WXLogUtils.e("WXBridgeManager", e);
String err="invokeCallJSBatch#"+e.toString();
commitJSBridgeAlarmMonitor(message.obj.toString(), WXErrorCode.WX_ERR_JS_EXECUTE,err);
}
// If task is not empty, loop until it is empty
if (!mNextTickTasks.isEmpty()) {
mJSHandler.sendEmptyMessage(WXJSBridgeMsgType.CALL_JS_BATCH);
}
}
private WXParams assembleDefaultOptions() {
Map<String, String> config = WXEnvironment.getConfig();
WXParams wxParams = new WXParams();
wxParams.setPlatform(config.get(WXConfig.os));
wxParams.setOsVersion(config.get(WXConfig.sysVersion));
wxParams.setAppVersion(config.get(WXConfig.appVersion));
wxParams.setWeexVersion(config.get(WXConfig.weexVersion));
wxParams.setDeviceModel(config.get(WXConfig.sysModel));
wxParams.setShouldInfoCollect(config.get("infoCollect"));
wxParams.setLogLevel(config.get(WXConfig.logLevel));
String appName = config.get(WXConfig.appName);
if (!TextUtils.isEmpty(appName)) {
wxParams.setAppName(appName);
}
wxParams.setDeviceWidth(TextUtils.isEmpty(config.get("deviceWidth")) ? String.valueOf(WXViewUtils.getScreenWidth(WXEnvironment.sApplication)) : config.get("deviceWidth"));
wxParams.setDeviceHeight(TextUtils.isEmpty(config.get("deviceHeight")) ? String.valueOf(WXViewUtils.getScreenHeight(WXEnvironment.sApplication)) : config.get("deviceHeight"));
wxParams.setOptions(WXEnvironment.getCustomOptions());
wxParams.setNeedInitV8(WXSDKManager.getInstance().needInitV8());
return wxParams;
}
private void execRegisterFailTask() {
if (mRegisterModuleFailList.size() > 0) {
List<Map<String, Object>> moduleReceiver = new ArrayList<>();
for (int i = 0, moduleCount = mRegisterModuleFailList.size(); i < moduleCount; ++i) {
invokeRegisterModules(mRegisterModuleFailList.get(i), moduleReceiver);
}
mRegisterModuleFailList.clear();
if (moduleReceiver.size() > 0) {
mRegisterModuleFailList.addAll(moduleReceiver);
}
}
if (mRegisterComponentFailList.size() > 0) {
List<Map<String, Object>> receiver = new ArrayList<>();
invokeRegisterComponents(mRegisterComponentFailList, receiver);
mRegisterComponentFailList.clear();
if (receiver.size() > 0) {
mRegisterComponentFailList.addAll(receiver);
}
}
if (mRegisterServiceFailList.size() > 0) {
List<String> receiver = new ArrayList<>();
for (String service : mRegisterServiceFailList) {
invokeExecJSService(service, receiver);
}
mRegisterServiceFailList.clear();
if (receiver.size() > 0) {
mRegisterServiceFailList.addAll(receiver);
}
}
}
/**
* Register Android module
* @param modules the format is like
* {'dom':['updateAttrs','updateStyle'],'event':['openUrl']}
*/
public void registerModules(final Map<String, Object> modules) {
if (modules != null && modules.size() != 0) {
if(isJSThread()){
invokeRegisterModules(modules, mRegisterModuleFailList);
}
else{
post(new Runnable() {
@Override
public void run() {
invokeRegisterModules(modules, mRegisterComponentFailList);
}
}, null);
}
}
}
/**
* Registered component
*/
public void registerComponents(final List<Map<String, Object>> components) {
if ( mJSHandler == null || components == null
|| components.size() == 0) {
return;
}
post(new Runnable() {
@Override
public void run() {
invokeRegisterComponents(components, mRegisterComponentFailList);
}
}, null);
}
public void execJSService(final String service) {
post(new Runnable() {
@Override
public void run() {
invokeExecJSService(service, mRegisterServiceFailList);
}
});
}
private void invokeExecJSService(String service, List<String> receiver) {
try {
if (!isJSFrameworkInit()) {
WXLogUtils.e("[WXBridgeManager] invoke execJSService: framework.js uninitialized.");
receiver.add(service);
return;
}
mWXBridge.execJSService(service);
} catch (Throwable e) {
WXLogUtils.e("[WXBridgeManager] invokeRegisterService:", e);
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK,WXErrorCode.WX_ERR_JS_EXECUTE,"invokeRegisterService");
}
}
private boolean isJSThread() {
return mJSThread != null && mJSThread.getId() == Thread.currentThread().getId();
}
private void invokeRegisterModules(Map<String, Object> modules, List<Map<String, Object>> failReceiver) {
if (modules == null || !isJSFrameworkInit()) {
if (!isJSFrameworkInit()) {
WXLogUtils.e("[WXBridgeManager] invokeRegisterModules: framework.js uninitialized.");
}
failReceiver.add(modules);
return;
}
WXJSObject[] args = {new WXJSObject(WXJSObject.JSON,
WXJsonUtils.fromObjectToJSONString(modules))};
try {
mWXBridge.execJS("", null, METHOD_REGISTER_MODULES, args);
} catch (Throwable e) {
WXLogUtils.e("[WXBridgeManager] invokeRegisterModules:", e);
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK,WXErrorCode.WX_ERR_JS_EXECUTE,"invokeRegisterModules");
}
}
private void invokeRegisterComponents(List<Map<String, Object>> components, List<Map<String, Object>> failReceiver) {
if(components == failReceiver){
throw new RuntimeException("Fail receiver should not use source.");
}
if (!isJSFrameworkInit()) {
WXLogUtils.e("[WXBridgeManager] invokeRegisterComponents: framework.js uninitialized.");
for (Map<String,Object> comp:components){
failReceiver.add(comp);
}
return;
}
if(components == null){
return;
}
WXJSObject[] args = {new WXJSObject(WXJSObject.JSON,
WXJsonUtils.fromObjectToJSONString(components))};
try {
mWXBridge.execJS("", null, METHOD_REGISTER_COMPONENTS, args);
} catch (Throwable e) {
WXLogUtils.e("[WXBridgeManager] invokeRegisterComponents ", e);
commitJSFrameworkAlarmMonitor(IWXUserTrackAdapter.JS_FRAMEWORK,WXErrorCode.WX_ERR_JS_EXECUTE,"invokeRegisterComponents");
}
}
public void destroy() {
if (mJSThread != null) {
mJSThread.quit();
}
mBridgeManager = null;
if(mDestroyedInstanceId!=null){
mDestroyedInstanceId.clear();
}
}
/**
* Report JavaScript Exception
*/
public void reportJSException(String instanceId, String function,
String exception) {
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.e("reportJSException >>>> instanceId:" + instanceId
+ ", exception function:" + function + ", exception:"
+ exception);
}
WXSDKInstance instance;
if (instanceId != null && (instance = WXSDKManager.getInstance().getSDKInstance(instanceId)) != null) {
instance.onJSException(WXErrorCode.WX_ERR_JS_EXECUTE.getErrorCode(), function, exception);
String err = "function:" + function + "#exception:" + exception;
commitJSBridgeAlarmMonitor(instanceId, WXErrorCode.WX_ERR_JS_EXECUTE, err);
IWXJSExceptionAdapter adapter = WXSDKManager.getInstance().getIWXJSExceptionAdapter();
if (adapter != null) {
WXJSExceptionInfo jsException = new WXJSExceptionInfo(instanceId, instance.getBundleUrl(), WXErrorCode.WX_ERR_JS_EXECUTE.getErrorCode(), function, exception, null);
adapter.onJSException(jsException);
if (WXEnvironment.isApkDebugable()) {
WXLogUtils.d(jsException.toString());
}
}
}
}
public static class TimerInfo {
public String callbackId;
public long time;
public String instanceId;
}
private void registerDomModule() throws WXException {
/** Tell Javascript Framework what methods you have. This is Required.**/
Map<String,Object> domMap=new HashMap<>();
domMap.put(WXDomModule.WXDOM,WXDomModule.METHODS);
registerModules(domMap);
}
//This method is deprecated because of performance issue.
@Deprecated
public void notifyTrimMemory() {
}
public
@Nullable
Looper getJSLooper() {
Looper ret = null;
if (mJSThread != null) {
ret = mJSThread.getLooper();
}
return ret;
}
public void notifySerializeCodeCache() {
post(new Runnable() {
@Override
public void run() {
if (!isJSFrameworkInit())
return;
invokeExecJS("", null, METHOD_NOTIFY_SERIALIZE_CODE_CACHE, new WXJSObject[0]);
}
});
}
public void takeJSHeapSnapshot(String filename) {
Message msg = mJSHandler.obtainMessage();
msg.obj = filename;
msg.what = WXJSBridgeMsgType.TAKE_HEAP_SNAPSHOT;
msg.setTarget(mJSHandler);
msg.sendToTarget();
}
}
| |
package edu.harvard.iq.dataverse.export;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.export.spi.Exporter;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.json.JsonPrinter;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
/**
*
* @author skraffmi
*/
public class ExportService {
private static ExportService service;
private ServiceLoader<Exporter> loader;
static SettingsServiceBean settingsService;
private ExportService() {
loader = ServiceLoader.load(Exporter.class);
}
/**
* TODO: Audit all calls to this getInstance method that doesn't take a
* SettingsServiceBean as an argument to make sure nothing broke.
*/
public static synchronized ExportService getInstance() {
return getInstance(null);
}
public static synchronized ExportService getInstance(SettingsServiceBean settingsService) {
ExportService.settingsService = settingsService;
if (service == null) {
service = new ExportService();
} else{
service.loader.reload();
}
return service;
}
public List< String[]> getExportersLabels() {
List<String[]> retList = new ArrayList();
Iterator<Exporter> exporters = ExportService.getInstance().loader.iterator();
while (exporters.hasNext()) {
Exporter e = exporters.next();
String[] temp = new String[2];
temp[0] = e.getDisplayName();
temp[1] = e.getProviderName();
retList.add(temp);
}
return retList;
}
public InputStream getExport(Dataset dataset, String formatName) throws ExportException {
// first we will try to locate an already existing, cached export
// for this format:
InputStream exportInputStream = getCachedExportFormat(dataset, formatName);
if (exportInputStream != null) {
return exportInputStream;
}
// if it doesn't exist, we'll try to run the export:
exportFormat(dataset, formatName);
// and then try again:
exportInputStream = getCachedExportFormat(dataset, formatName);
if (exportInputStream != null) {
return exportInputStream;
}
// if there is no cached export still - we have to give up and throw
// an exception!
throw new ExportException("Failed to export the dataset as "+formatName);
}
public String getExportAsString(Dataset dataset, String formatName) {
try {
InputStream inputStream = getExport(dataset, formatName);
if (inputStream != null) {
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, "UTF8"));
StringBuilder sb = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
sb.append(line);
sb.append('\n');
}
br.close();
return sb.toString();
}
} catch (Exception ex) {
//ex.printStackTrace();
return null;
}
return null;
}
// This method goes through all the Exporters and calls
// the "chacheExport()" method that will save the produced output
// in a file in the dataset directory, on each Exporter available.
public void exportAllFormats (Dataset dataset) throws ExportException {
clearAllCachedFormats(dataset);
try {
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (releasedVersion == null) {
throw new ExportException("No released version for dataset "+dataset.getGlobalId());
}
JsonPrinter jsonPrinter = new JsonPrinter(settingsService);
final JsonObjectBuilder datasetAsJsonBuilder = jsonPrinter.jsonAsDatasetDto(releasedVersion);
JsonObject datasetAsJson = datasetAsJsonBuilder.build();
Iterator<Exporter> exporters = loader.iterator();
while ( exporters.hasNext()) {
Exporter e = exporters.next();
String formatName = e.getProviderName();
cacheExport(releasedVersion, formatName, datasetAsJson, e);
}
} catch (ServiceConfigurationError serviceError) {
throw new ExportException("Service configuration error during export. "+serviceError.getMessage());
}
// Finally, if we have been able to successfully export in all available
// formats, we'll increment the "last exported" time stamp:
dataset.setLastExportTime(new Timestamp(new Date().getTime()));
}
public void clearAllCachedFormats(Dataset dataset) {
Iterator<Exporter> exporters = loader.iterator();
while (exporters.hasNext()) {
Exporter e = exporters.next();
String formatName = e.getProviderName();
clearCachedExport(dataset, formatName);
}
dataset.setLastExportTime(null);
}
// This method finds the exporter for the format requested,
// then produces the dataset metadata as a JsonObject, then calls
// the "chacheExport()" method that will save the produced output
// in a file in the dataset directory.
public void exportFormat(Dataset dataset, String formatName) throws ExportException {
try {
Iterator<Exporter> exporters = loader.iterator();
while (exporters.hasNext()) {
Exporter e = exporters.next();
if (e.getProviderName().equals(formatName)) {
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (releasedVersion == null) {
throw new IllegalStateException("No Released Version");
}
JsonPrinter jsonPrinter = new JsonPrinter(settingsService);
final JsonObjectBuilder datasetAsJsonBuilder = jsonPrinter.jsonAsDatasetDto(releasedVersion);
cacheExport(releasedVersion, formatName, datasetAsJsonBuilder.build(), e);
}
}
} catch (ServiceConfigurationError serviceError) {
throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
} catch (IllegalStateException e) {
throw new ExportException("No published version found during export. " + dataset.getGlobalId());
}
}
public Exporter getExporter(String formatName) throws ExportException {
try {
Iterator<Exporter> exporters = loader.iterator();
while (exporters.hasNext()) {
Exporter e = exporters.next();
if (e.getProviderName().equals(formatName)) {
return e;
}
}
} catch (ServiceConfigurationError serviceError) {
throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
} catch (Exception ex) {
throw new ExportException("Could not find Exporter \""+formatName+"\", unknown exception");
}
throw new ExportException("No such Exporter: "+formatName);
}
// This method runs the selected metadata exporter, caching the output
// in a file in the dataset dirctory:
private void cacheExport(DatasetVersion version, String format, JsonObject datasetAsJson, Exporter exporter) throws ExportException {
try {
if (version.getDataset().getFileSystemDirectory() != null && !Files.exists(version.getDataset().getFileSystemDirectory())) {
/* Note that "createDirectories()" must be used - not
* "createDirectory()", to make sure all the parent
* directories that may not yet exist are created as well.
*/
Files.createDirectories(version.getDataset().getFileSystemDirectory());
}
Path cachedMetadataFilePath = Paths.get(version.getDataset().getFileSystemDirectory().toString(), "export_" + format + ".cached");
FileOutputStream cachedExportOutputStream = new FileOutputStream(cachedMetadataFilePath.toFile());
exporter.exportDataset(version, datasetAsJson, cachedExportOutputStream);
cachedExportOutputStream.flush();
cachedExportOutputStream.close();
} catch (IOException ioex) {
throw new ExportException("IO Exception thrown exporting as " + format);
}
}
private void clearCachedExport(Dataset dataset, String format) {
if (dataset != null && dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) {
Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + format + ".cached");
try {
Files.delete(cachedMetadataFilePath);
} catch (IOException ioex) {
}
}
}
// This method checks if the metadata has already been exported in this
// format and cached on disk. If it has, it'll open the file and retun
// the file input stream. If not, it'll return null.
private InputStream getCachedExportFormat(Dataset dataset, String formatName) {
try {
if (dataset.getFileSystemDirectory() != null) {
Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + formatName + ".cached");
if (Files.exists(cachedMetadataFilePath)) {
FileInputStream cachedExportInputStream = new FileInputStream(cachedMetadataFilePath.toFile());
return cachedExportInputStream;
}
}
} catch (IOException ioex) {
// don't do anything - we'll just return null
}
return null;
}
public Long getCachedExportSize(Dataset dataset, String formatName) {
try {
if (dataset.getFileSystemDirectory() != null) {
Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + formatName + ".cached");
if (Files.exists(cachedMetadataFilePath)) {
return cachedMetadataFilePath.toFile().length();
}
}
} catch (Exception ioex) {
// don't do anything - we'll just return null
}
return null;
}
public Boolean isXMLFormat(String provider){
try {
Iterator<Exporter> exporters = loader.iterator();
while (exporters.hasNext()) {
Exporter e = exporters.next();
if (e.getProviderName().equals(provider)) {
return e.isXMLFormat();
}
}
} catch (ServiceConfigurationError serviceError) {
serviceError.printStackTrace();
}
return null;
}
}
| |
package gov.nih.nci.cadsr.formloader.service.impl;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import gov.nih.nci.cadsr.formloader.domain.FormCollection;
import gov.nih.nci.cadsr.formloader.domain.FormDescriptor;
import gov.nih.nci.cadsr.formloader.domain.QuestionDescriptor;
import gov.nih.nci.cadsr.formloader.service.ContentValidationService;
import gov.nih.nci.cadsr.formloader.service.XmlValidationService;
import gov.nih.nci.cadsr.formloader.service.common.FormLoaderServiceException;
import gov.nih.nci.cadsr.formloader.service.common.StatusFormatter;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath:/applicationContext.xml"})
public class LoadingServiceImplTest {
@Autowired
ContentValidationService contentValidationService;
@Autowired
XmlValidationService xmlValidator;
@Autowired
LoadingServiceImpl loadService;
FormCollection aColl;
@Before
public void setUp() {
//prepareCollectionToLoad();
}
@Test
public void testLoadBadParams() {
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "3256357_v1_0_newform.xml");
try {
aColl.setCreatedBy("");
aColl = this.loadService.loadForms(aColl);
fail("Should throw exception because longedin user is not valid");
aColl.setCreatedBy("jjuser");
aColl.setXmlFileName("");
aColl = this.loadService.loadForms(aColl);
fail("Should throw exception because xml file name is not valid");
aColl.setXmlFileName("dfaf"); //only checking valid string
aColl.setForms(null);
aColl = this.loadService.loadForms(aColl);
fail("Should throw exception because form list is null");
} catch (FormLoaderServiceException fle) {
//this is good
}
}
@Test
public void testUserHasRight() {
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "3256357_v1_0_newform.xml");
FormDescriptor form = aColl.getForms().get(0);
boolean hasRight = this.loadService.userHasRight(form, "");
assertTrue(hasRight); //form's createdBy is DWARZEL context is TEST
form.setCreatedBy("dafa");
hasRight = this.loadService.userHasRight(form, "");
assertFalse(hasRight);
hasRight = this.loadService.userHasRight(form, "FORMBUILDER");
assertTrue(hasRight);
}
@Test
public void testLoadNewForm() {
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "load_newform.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
form.setSelected(true);
aColl.setCreatedBy("RAJAS");
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
String status = StatusFormatter.getStatusInXml(form);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\loading\\load_newform-jim.status.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
@Test
public void testLoadNewFormRave() {
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "FourTheradexMedidataRaveFormsMinimumFields_01-11-2014.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
form.setSelected(true);
aColl.setCreatedBy("YANGS");
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
String status = StatusFormatter.getStatusInXml(form);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\loading\\FourTheradexMedidataRaveFormsMinimumFields_01-11-2014.status.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
@Test
public void testLoadNewFormWithQuestDefaultValue() {
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "load-newform-with-defaultVal.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
form.setSelected(true);
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
String status = StatusFormatter.getStatusInXml(form);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\loading\\load-newform-with-defaultVal.status.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
@Test
public void testLoadBiggerNewForm() {
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "3256357_v1_0_newform.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
form.setSelected(true);
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
String status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\LoadService-creatednew-bigger.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
@Test
public void testLoadNewVersion() {
this.prepareCollectionToLoad(".\\.\\test\\data", "3256357_v1_0_newform-partial-newversion.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
form.setSelected(true);
aColl.setCreatedBy("RAJAS");
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
String status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\LoadService-creatednew-version.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
//@Test
public void testLoadUpdateForm() {
//this.prepareCollectionToLoad(".\\test\\data", "update-form.xml");
this.prepareCollectionToLoad(".\\test\\data", "update-form-with-xmlval-errors.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
assertTrue(form.getLoadType() == FormDescriptor.LOAD_TYPE_UPDATE_FORM);
form.setSelected(true);
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
assertTrue(form.getLoadStatus() == FormDescriptor.STATUS_LOADED);
String status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\update-form.status.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
//@Test
public void testLoadUpdateFormWithRefdocs() {
this.prepareCollectionToLoad(".\\.\\test\\data", "update-with-refdocs.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
assertTrue(form.getLoadType() == FormDescriptor.LOAD_TYPE_UPDATE_FORM);
form.setSelected(true);
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
assertTrue(form.getLoadStatus() == FormDescriptor.STATUS_LOADED);
String status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\update-with-refdocs.status.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
//@Test
public void testLoadNewVersionFormWithRefdocs() {
//new-version-3643954: has a forever non-existing version, thus making it
//a new version load always.
this.prepareCollectionToLoad(".\\.\\test\\data\\loading", "new-version-3643954.xml");
try {
FormDescriptor form = aColl.getForms().get(0);
assertTrue(form.getLoadType() == FormDescriptor.LOAD_TYPE_NEW_VERSION);
form.setSelected(true);
aColl = this.loadService.loadForms(aColl);
form = aColl.getForms().get(0);
assertTrue(form.getLoadStatus() == FormDescriptor.STATUS_LOADED);
String status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\loading\\new-version-3643954.status.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
protected void prepareCollectionToLoad(String filepath, String testfile) {
assertNotNull(loadService);
try {
aColl = new FormCollection();
aColl.setXmlPathOnServer(filepath);
aColl.setXmlFileName(testfile);
aColl = xmlValidator.validateXml(aColl);
List<FormDescriptor> forms = aColl.getForms();
assertNotNull(forms);
assertTrue(forms.size() >= 1);
FormDescriptor form = forms.get(0);
String status = StatusFormatter.getStatusInXml(form);
StatusFormatter.writeStatusToXml(status, filepath + "\\load-preparation-xml.xml");
assertTrue(forms.get(0).getLoadStatus() == FormDescriptor.STATUS_XML_VALIDATED);
//aColl.setForms(forms);
aColl.setName("Testing Load 5 forms with FormLoaderv7.xsd");
aColl.setCreatedBy("YANGS");
aColl.setXmlFileName(testfile);
aColl.setXmlPathOnServer(filepath);
for (FormDescriptor f : forms)
f.setSelected(true);
assertNotNull(contentValidationService);
aColl = contentValidationService.validateXmlContent(aColl);
assertNotNull(aColl);
forms = aColl.getForms();
//assertTrue(forms.size() == 1);
status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, filepath + "\\load-preparation-content.xml");
assertTrue(forms.get(0).getLoadStatus() == FormDescriptor.STATUS_CONTENT_VALIDATED);
form = forms.get(0);
//status = StatusFormatter.getStatusInXml(aColl);
//StatusFormatter.writeStatusToXml(status, filepath + "\\LoadService-collection.xml");
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
@Test
public void testLoad5Forms() {
this.prepareCollectionToLoad(".\\test\\data\\loading", "load_forms-5.xml");
try {
List<FormDescriptor> forms = aColl.getForms();
assertTrue(forms.get(0).getLoadType().equals(FormDescriptor.LOAD_TYPE_NEW));
assertTrue(forms.get(1).getLoadType().equals(FormDescriptor.LOAD_TYPE_UNKNOWN));
assertTrue(forms.get(2).getLoadType().equals(FormDescriptor.LOAD_TYPE_UNKNOWN));
//assertTrue(forms.get(3).getLoadStatus() == FormDescriptor.STATUS_XML_VALIDATION_FAILED);
assertTrue(forms.get(4).getLoadType().equals(FormDescriptor.LOAD_TYPE_UNKNOWN));
forms.get(0).setSelected(true);
// forms.get(1).setSelected(true);
// forms.get(2).setSelected(true);
// forms.get(3).setSelected(false);
// forms.get(4).setSelected(true);
//
aColl = this.loadService.loadForms(aColl);
//
String status = StatusFormatter.getStatusInXml(aColl);
StatusFormatter.writeStatusToXml(status, ".\\test\\data\\loading\\load_forms-5.status.xml");
FormDescriptor form = aColl.getForms().get(0);
assertTrue(form.getLoadStatus() == FormDescriptor.STATUS_LOADED);
//status = StatusFormatter.getStatusMessagesInXml(aColl.getForms().get(1));
//StatusFormatter.writeStatusToXml(status, ".\\test\\data\\load_forms-5-1.status.xml");
//
} catch (FormLoaderServiceException fle) {
fail("Got exception: " + fle.getMessage());
}
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.candy1126xx.superrecorder.player;
import android.media.MediaCodec.CryptoException;
import android.os.SystemClock;
import android.util.Log;
import com.google.android.exoplayer.ExoPlayer;
import com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException;
import com.google.android.exoplayer.TimeRange;
import com.google.android.exoplayer.audio.AudioTrack;
import com.google.android.exoplayer.chunk.Format;
import com.google.android.exoplayer.util.VerboseLogUtil;
import java.io.IOException;
import java.text.NumberFormat;
import java.util.Locale;
/**
* Logs player events using {@link Log}.
*/
public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener,
DemoPlayer.InternalErrorListener {
private static final String TAG = "EventLogger";
private static final NumberFormat TIME_FORMAT;
static {
TIME_FORMAT = NumberFormat.getInstance(Locale.US);
TIME_FORMAT.setMinimumFractionDigits(2);
TIME_FORMAT.setMaximumFractionDigits(2);
}
private long sessionStartTimeMs;
private long[] loadStartTimeMs;
private long[] availableRangeValuesUs;
public EventLogger() {
loadStartTimeMs = new long[DemoPlayer.RENDERER_COUNT];
}
public void startSession() {
sessionStartTimeMs = SystemClock.elapsedRealtime();
Log.d(TAG, "start [0]");
}
public void endSession() {
Log.d(TAG, "end [" + getSessionTimeString() + "]");
}
// DemoPlayer.Listener
@Override
public void onStateChanged(boolean playWhenReady, int state) {
Log.d(TAG, "state [" + getSessionTimeString() + ", " + playWhenReady + ", "
+ getStateString(state) + "]");
}
@Override
public void onError(Exception e) {
Log.e(TAG, "playerFailed [" + getSessionTimeString() + "]", e);
}
@Override
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees,
float pixelWidthHeightRatio) {
Log.d(TAG, "videoSizeChanged [" + width + ", " + height + ", " + unappliedRotationDegrees
+ ", " + pixelWidthHeightRatio + "]");
}
// DemoPlayer.InfoListener
@Override
public void onBandwidthSample(int elapsedMs, long bytes, long bitrateEstimate) {
Log.d(TAG, "bandwidth [" + getSessionTimeString() + ", " + bytes + ", "
+ getTimeString(elapsedMs) + ", " + bitrateEstimate + "]");
}
@Override
public void onDroppedFrames(int count, long elapsed) {
Log.d(TAG, "droppedFrames [" + getSessionTimeString() + ", " + count + "]");
}
@Override
public void onLoadStarted(int sourceId, long length, int type, int trigger, Format format,
long mediaStartTimeMs, long mediaEndTimeMs) {
loadStartTimeMs[sourceId] = SystemClock.elapsedRealtime();
if (VerboseLogUtil.isTagEnabled(TAG)) {
Log.v(TAG, "loadStart [" + getSessionTimeString() + ", " + sourceId + ", " + type
+ ", " + mediaStartTimeMs + ", " + mediaEndTimeMs + "]");
}
}
@Override
public void onLoadCompleted(int sourceId, long bytesLoaded, int type, int trigger, Format format,
long mediaStartTimeMs, long mediaEndTimeMs, long elapsedRealtimeMs, long loadDurationMs) {
if (VerboseLogUtil.isTagEnabled(TAG)) {
long downloadTime = SystemClock.elapsedRealtime() - loadStartTimeMs[sourceId];
Log.v(TAG, "loadEnd [" + getSessionTimeString() + ", " + sourceId + ", " + downloadTime
+ "]");
}
}
@Override
public void onVideoFormatEnabled(Format format, int trigger, long mediaTimeMs) {
Log.d(TAG, "videoFormat [" + getSessionTimeString() + ", " + format.id + ", "
+ Integer.toString(trigger) + "]");
}
@Override
public void onAudioFormatEnabled(Format format, int trigger, long mediaTimeMs) {
Log.d(TAG, "audioFormat [" + getSessionTimeString() + ", " + format.id + ", "
+ Integer.toString(trigger) + "]");
}
// DemoPlayer.InternalErrorListener
@Override
public void onLoadError(int sourceId, IOException e) {
printInternalError("loadError", e);
}
@Override
public void onRendererInitializationError(Exception e) {
printInternalError("rendererInitError", e);
}
@Override
public void onDrmSessionManagerError(Exception e) {
printInternalError("drmSessionManagerError", e);
}
@Override
public void onDecoderInitializationError(DecoderInitializationException e) {
printInternalError("decoderInitializationError", e);
}
@Override
public void onAudioTrackInitializationError(AudioTrack.InitializationException e) {
printInternalError("audioTrackInitializationError", e);
}
@Override
public void onAudioTrackWriteError(AudioTrack.WriteException e) {
printInternalError("audioTrackWriteError", e);
}
@Override
public void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
printInternalError("audioTrackUnderrun [" + bufferSize + ", " + bufferSizeMs + ", "
+ elapsedSinceLastFeedMs + "]", null);
}
@Override
public void onCryptoError(CryptoException e) {
printInternalError("cryptoError", e);
}
@Override
public void onDecoderInitialized(String decoderName, long elapsedRealtimeMs,
long initializationDurationMs) {
Log.d(TAG, "decoderInitialized [" + getSessionTimeString() + ", " + decoderName + "]");
}
@Override
public void onAvailableRangeChanged(int sourceId, TimeRange availableRange) {
availableRangeValuesUs = availableRange.getCurrentBoundsUs(availableRangeValuesUs);
Log.d(TAG, "availableRange [" + availableRange.isStatic() + ", " + availableRangeValuesUs[0]
+ ", " + availableRangeValuesUs[1] + "]");
}
private void printInternalError(String type, Exception e) {
Log.e(TAG, "internalError [" + getSessionTimeString() + ", " + type + "]", e);
}
private String getStateString(int state) {
switch (state) {
case ExoPlayer.STATE_BUFFERING:
return "B";
case ExoPlayer.STATE_ENDED:
return "E";
case ExoPlayer.STATE_IDLE:
return "I";
case ExoPlayer.STATE_PREPARING:
return "P";
case ExoPlayer.STATE_READY:
return "R";
default:
return "?";
}
}
private String getSessionTimeString() {
return getTimeString(SystemClock.elapsedRealtime() - sessionStartTimeMs);
}
private String getTimeString(long timeMs) {
return TIME_FORMAT.format((timeMs) / 1000f);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.util;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* A test for MRAsyncDiskService.
*/
public class TestMRAsyncDiskService {
public static final Logger LOG =
LoggerFactory.getLogger(TestMRAsyncDiskService.class);
private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString();
@Before
public void setUp() {
FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
}
/**
* Given 'pathname', compute an equivalent path relative to the cwd.
* @param pathname the path to a directory.
* @return the path to that same directory, relative to ${user.dir}.
*/
private String relativeToWorking(String pathname) {
String cwd = System.getProperty("user.dir", "/");
// normalize pathname and cwd into full directory paths.
pathname = (new Path(pathname)).toUri().getPath();
cwd = (new Path(cwd)).toUri().getPath();
String [] cwdParts = cwd.split(Path.SEPARATOR);
String [] pathParts = pathname.split(Path.SEPARATOR);
// There are three possible cases:
// 1) pathname and cwd are equal. Return '.'
// 2) pathname is under cwd. Return the components that are under it.
// e.g., cwd = /a/b, path = /a/b/c, return 'c'
// 3) pathname is outside of cwd. Find the common components, if any,
// and subtract them from the returned path, then return enough '..'
// components to "undo" the non-common components of cwd, then all
// the remaining parts of pathname.
// e.g., cwd = /a/b, path = /a/c, return '../c'
if (cwd.equals(pathname)) {
LOG.info("relative to working: " + pathname + " -> .");
return "."; // They match exactly.
}
// Determine how many path components are in common between cwd and path.
int common = 0;
for (int i = 0; i < Math.min(cwdParts.length, pathParts.length); i++) {
if (cwdParts[i].equals(pathParts[i])) {
common++;
} else {
break;
}
}
// output path stringbuilder.
StringBuilder sb = new StringBuilder();
// For everything in cwd that isn't in pathname, add a '..' to undo it.
int parentDirsRequired = cwdParts.length - common;
for (int i = 0; i < parentDirsRequired; i++) {
sb.append("..");
sb.append(Path.SEPARATOR);
}
// Then append all non-common parts of 'pathname' itself.
for (int i = common; i < pathParts.length; i++) {
sb.append(pathParts[i]);
sb.append(Path.SEPARATOR);
}
// Don't end with a '/'.
String s = sb.toString();
if (s.endsWith(Path.SEPARATOR)) {
s = s.substring(0, s.length() - 1);
}
LOG.info("relative to working: " + pathname + " -> " + s);
return s;
}
@Test
/** Test that the relativeToWorking() method above does what we expect. */
public void testRelativeToWorking() {
assertEquals(".", relativeToWorking(System.getProperty("user.dir", ".")));
String cwd = System.getProperty("user.dir", ".");
Path cwdPath = new Path(cwd);
Path subdir = new Path(cwdPath, "foo");
assertEquals("foo", relativeToWorking(subdir.toUri().getPath()));
Path subsubdir = new Path(subdir, "bar");
assertEquals("foo/bar", relativeToWorking(subsubdir.toUri().getPath()));
Path parent = new Path(cwdPath, "..");
assertEquals("..", relativeToWorking(parent.toUri().getPath()));
Path sideways = new Path(parent, "baz");
assertEquals("../baz", relativeToWorking(sideways.toUri().getPath()));
}
@Test
/** Test that volumes specified as relative paths are handled properly
* by MRAsyncDiskService (MAPREDUCE-1887).
*/
public void testVolumeNormalization() throws Throwable {
LOG.info("TEST_ROOT_DIR is " + TEST_ROOT_DIR);
String relativeTestRoot = relativeToWorking(TEST_ROOT_DIR);
FileSystem localFileSystem = FileSystem.getLocal(new Configuration());
String [] vols = new String[] { relativeTestRoot + "/0",
relativeTestRoot + "/1" };
// Put a file in one of the volumes to be cleared on startup.
Path delDir = new Path(vols[0], MRAsyncDiskService.TOBEDELETED);
localFileSystem.mkdirs(delDir);
localFileSystem.create(new Path(delDir, "foo")).close();
MRAsyncDiskService service = new MRAsyncDiskService(
localFileSystem, vols);
makeSureCleanedUp(vols, service);
}
/**
* This test creates some directories and then removes them through
* MRAsyncDiskService.
*/
@Test
public void testMRAsyncDiskService() throws Throwable {
FileSystem localFileSystem = FileSystem.getLocal(new Configuration());
String[] vols = new String[]{TEST_ROOT_DIR + "/0",
TEST_ROOT_DIR + "/1"};
MRAsyncDiskService service = new MRAsyncDiskService(
localFileSystem, vols);
String a = "a";
String b = "b";
String c = "b/c";
String d = "d";
File fa = new File(vols[0], a);
File fb = new File(vols[1], b);
File fc = new File(vols[1], c);
File fd = new File(vols[1], d);
// Create the directories
fa.mkdirs();
fb.mkdirs();
fc.mkdirs();
fd.mkdirs();
assertTrue(fa.exists());
assertTrue(fb.exists());
assertTrue(fc.exists());
assertTrue(fd.exists());
// Move and delete them
service.moveAndDeleteRelativePath(vols[0], a);
assertFalse(fa.exists());
service.moveAndDeleteRelativePath(vols[1], b);
assertFalse(fb.exists());
assertFalse(fc.exists());
assertFalse(service.moveAndDeleteRelativePath(vols[1], "not_exists"));
// asyncDiskService is NOT able to delete files outside all volumes.
IOException ee = null;
try {
service.moveAndDeleteAbsolutePath(TEST_ROOT_DIR + "/2");
} catch (IOException e) {
ee = e;
}
assertNotNull("asyncDiskService should not be able to delete files "
+ "outside all volumes", ee);
// asyncDiskService is able to automatically find the file in one
// of the volumes.
assertTrue(service.moveAndDeleteAbsolutePath(vols[1] + Path.SEPARATOR_CHAR + d));
// Make sure everything is cleaned up
makeSureCleanedUp(vols, service);
}
/**
* This test creates some directories inside the volume roots, and then
* call asyncDiskService.MoveAndDeleteAllVolumes.
* We should be able to delete all files/dirs inside the volumes except
* the toBeDeleted directory.
*/
@Test
public void testMRAsyncDiskServiceMoveAndDeleteAllVolumes() throws Throwable {
FileSystem localFileSystem = FileSystem.getLocal(new Configuration());
String[] vols = new String[]{TEST_ROOT_DIR + "/0",
TEST_ROOT_DIR + "/1"};
MRAsyncDiskService service = new MRAsyncDiskService(
localFileSystem, vols);
String a = "a";
String b = "b";
String c = "b/c";
String d = "d";
File fa = new File(vols[0], a);
File fb = new File(vols[1], b);
File fc = new File(vols[1], c);
File fd = new File(vols[1], d);
// Create the directories
fa.mkdirs();
fb.mkdirs();
fc.mkdirs();
fd.mkdirs();
assertTrue(fa.exists());
assertTrue(fb.exists());
assertTrue(fc.exists());
assertTrue(fd.exists());
// Delete all of them
service.cleanupAllVolumes();
assertFalse(fa.exists());
assertFalse(fb.exists());
assertFalse(fc.exists());
assertFalse(fd.exists());
// Make sure everything is cleaned up
makeSureCleanedUp(vols, service);
}
/**
* This test creates some directories inside the toBeDeleted directory and
* then start the asyncDiskService.
* AsyncDiskService will create tasks to delete the content inside the
* toBeDeleted directories.
*/
@Test
public void testMRAsyncDiskServiceStartupCleaning() throws Throwable {
FileSystem localFileSystem = FileSystem.getLocal(new Configuration());
String[] vols = new String[]{TEST_ROOT_DIR + "/0",
TEST_ROOT_DIR + "/1"};
String a = "a";
String b = "b";
String c = "b/c";
String d = "d";
// Create directories inside SUBDIR
String suffix = Path.SEPARATOR_CHAR + MRAsyncDiskService.TOBEDELETED;
File fa = new File(vols[0] + suffix, a);
File fb = new File(vols[1] + suffix, b);
File fc = new File(vols[1] + suffix, c);
File fd = new File(vols[1] + suffix, d);
// Create the directories
fa.mkdirs();
fb.mkdirs();
fc.mkdirs();
fd.mkdirs();
assertTrue(fa.exists());
assertTrue(fb.exists());
assertTrue(fc.exists());
assertTrue(fd.exists());
// Create the asyncDiskService which will delete all contents inside SUBDIR
MRAsyncDiskService service = new MRAsyncDiskService(
localFileSystem, vols);
// Make sure everything is cleaned up
makeSureCleanedUp(vols, service);
}
private void makeSureCleanedUp(String[] vols, MRAsyncDiskService service)
throws Throwable {
// Sleep at most 5 seconds to make sure the deleted items are all gone.
service.shutdown();
if (!service.awaitTermination(5000)) {
fail("MRAsyncDiskService is still not shutdown in 5 seconds!");
}
// All contents should be gone by now.
for (int i = 0; i < vols.length; i++) {
File subDir = new File(vols[0]);
String[] subDirContent = subDir.list();
assertEquals("Volume should contain a single child: "
+ MRAsyncDiskService.TOBEDELETED, 1, subDirContent.length);
File toBeDeletedDir = new File(vols[0], MRAsyncDiskService.TOBEDELETED);
String[] content = toBeDeletedDir.list();
assertNotNull("Cannot find " + toBeDeletedDir, content);
assertEquals("" + toBeDeletedDir + " should be empty now.", 0,
content.length);
}
}
@Test
public void testToleratesSomeUnwritableVolumes() throws Throwable {
FileSystem localFileSystem = FileSystem.getLocal(new Configuration());
String[] vols = new String[]{TEST_ROOT_DIR + "/0",
TEST_ROOT_DIR + "/1"};
assertTrue(new File(vols[0]).mkdirs());
assertEquals(0, FileUtil.chmod(vols[0], "400")); // read only
try {
new MRAsyncDiskService(localFileSystem, vols);
} finally {
FileUtil.chmod(vols[0], "755"); // make writable again
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.util.jndi.ExampleBean;
/**
* Unit test for IntrospectionSupport
*/
public class IntrospectionSupportTest extends ContextTestSupport {
public void testOverloadSetterChooseStringSetter() throws Exception {
MyOverloadedBean overloadedBean = new MyOverloadedBean();
IntrospectionSupport.setProperty(context.getTypeConverter(), overloadedBean, "bean", "James");
assertEquals("James", overloadedBean.getName());
}
public void testOverloadSetterChooseBeanSetter() throws Exception {
MyOverloadedBean overloadedBean = new MyOverloadedBean();
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
IntrospectionSupport.setProperty(context.getTypeConverter(), overloadedBean, "bean", bean);
assertEquals("Claus", overloadedBean.getName());
}
public void testOverloadSetterChooseUsingTypeConverter() throws Exception {
MyOverloadedBean overloadedBean = new MyOverloadedBean();
Object value = "Willem".getBytes();
// should use byte[] -> String type converter and call the setBean(String) setter method
IntrospectionSupport.setProperty(context.getTypeConverter(), overloadedBean, "bean", value);
assertEquals("Willem", overloadedBean.getName());
}
public void testPassword() throws Exception {
MyPasswordBean passwordBean = new MyPasswordBean();
IntrospectionSupport.setProperty(context.getTypeConverter(), passwordBean, "oldPassword", "Donald");
IntrospectionSupport.setProperty(context.getTypeConverter(), passwordBean, "newPassword", "Duck");
assertEquals("Donald", passwordBean.getOldPassword());
assertEquals("Duck", passwordBean.getNewPassword());
}
public class MyPasswordBean {
private String oldPassword;
private String newPassword;
public String getOldPassword() {
return oldPassword;
}
public void setOldPassword(String oldPassword) {
this.oldPassword = oldPassword;
}
public String getNewPassword() {
return newPassword;
}
public void setNewPassword(String newPassword) {
this.newPassword = newPassword;
}
}
public class MyOverloadedBean {
private ExampleBean bean;
public void setBean(ExampleBean bean) {
this.bean = bean;
}
public void setBean(String name) {
bean = new ExampleBean();
bean.setName(name);
}
public String getName() {
return bean.getName();
}
}
public class MyBuilderBean {
private String name;
public String getName() {
return name;
}
public MyBuilderBean setName(String name) {
this.name = name;
return this;
}
}
public class MyOtherBuilderBean extends MyBuilderBean {
}
public class MyOtherOtherBuilderBean extends MyOtherBuilderBean {
public MyOtherOtherBuilderBean setName(String name) {
super.setName(name);
return this;
}
}
public void testIsSetterBuilderPatternSupport() throws Exception {
Method setter = MyBuilderBean.class.getMethod("setName", String.class);
Method setter2 = MyOtherBuilderBean.class.getMethod("setName", String.class);
Method setter3 = MyOtherOtherBuilderBean.class.getMethod("setName", String.class);
assertFalse(IntrospectionSupport.isSetter(setter, false));
assertTrue(IntrospectionSupport.isSetter(setter, true));
assertFalse(IntrospectionSupport.isSetter(setter2, false));
assertTrue(IntrospectionSupport.isSetter(setter2, true));
assertFalse(IntrospectionSupport.isSetter(setter3, false));
assertTrue(IntrospectionSupport.isSetter(setter3, true));
}
public void testHasProperties() throws Exception {
Map<String, Object> empty = Collections.emptyMap();
assertFalse(IntrospectionSupport.hasProperties(empty, null));
assertFalse(IntrospectionSupport.hasProperties(empty, ""));
assertFalse(IntrospectionSupport.hasProperties(empty, "foo."));
Map<String, Object> param = new HashMap<String, Object>();
assertFalse(IntrospectionSupport.hasProperties(param, null));
assertFalse(IntrospectionSupport.hasProperties(param, ""));
assertFalse(IntrospectionSupport.hasProperties(param, "foo."));
param.put("name", "Claus");
assertTrue(IntrospectionSupport.hasProperties(param, null));
assertTrue(IntrospectionSupport.hasProperties(param, ""));
assertFalse(IntrospectionSupport.hasProperties(param, "foo."));
param.put("foo.name", "Hadrian");
assertTrue(IntrospectionSupport.hasProperties(param, null));
assertTrue(IntrospectionSupport.hasProperties(param, ""));
assertTrue(IntrospectionSupport.hasProperties(param, "foo."));
}
public void testGetProperties() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, null);
assertEquals(3, map.size());
assertEquals("Claus", map.get("name"));
String price = map.get("price").toString();
assertTrue(price.startsWith("10"));
assertEquals(null, map.get("id"));
}
public void testAnotherGetProperties() throws Exception {
AnotherExampleBean bean = new AnotherExampleBean();
bean.setId("123");
bean.setName("Claus");
bean.setPrice(10.0);
Date date = new Date(0);
bean.setDate(date);
bean.setGoldCustomer(true);
bean.setLittle(true);
Collection<?> children = new ArrayList<Object>();
bean.setChildren(children);
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, null);
assertEquals(7, map.size());
assertEquals("Claus", map.get("name"));
String price = map.get("price").toString();
assertTrue(price.startsWith("10"));
assertSame(date, map.get("date"));
assertSame(children, map.get("children"));
assertEquals(Boolean.TRUE, map.get("goldCustomer"));
assertEquals(Boolean.TRUE, map.get("little"));
assertEquals("123", map.get("id"));
}
public void testGetPropertiesOptionPrefix() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
bean.setId("123");
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, "bean.");
assertEquals(3, map.size());
assertEquals("Claus", map.get("bean.name"));
String price = map.get("bean.price").toString();
assertTrue(price.startsWith("10"));
assertEquals("123", map.get("bean.id"));
}
public void testGetPropertiesSkipNull() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
bean.setId(null);
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, null, false);
assertEquals(2, map.size());
assertEquals("Claus", map.get("name"));
String price = map.get("price").toString();
assertTrue(price.startsWith("10"));
}
public void testGetProperty() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setId("123");
bean.setName("Claus");
bean.setPrice(10.0);
Object name = IntrospectionSupport.getProperty(bean, "name");
assertEquals("Claus", name);
}
public void testSetProperty() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setId("123");
bean.setName("Claus");
bean.setPrice(10.0);
IntrospectionSupport.setProperty(bean, "name", "James");
assertEquals("James", bean.getName());
}
public void testAnotherGetProperty() throws Exception {
AnotherExampleBean bean = new AnotherExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Date date = new Date(0);
bean.setDate(date);
bean.setGoldCustomer(true);
bean.setLittle(true);
Collection<?> children = new ArrayList<Object>();
bean.setChildren(children);
Object name = IntrospectionSupport.getProperty(bean, "name");
assertEquals("Claus", name);
assertSame(date, IntrospectionSupport.getProperty(bean, "date"));
assertSame(children, IntrospectionSupport.getProperty(bean, "children"));
assertEquals(Boolean.TRUE, IntrospectionSupport.getProperty(bean, "goldCustomer"));
assertEquals(Boolean.TRUE, IntrospectionSupport.getProperty(bean, "little"));
}
public void testGetPropertyLocaleIndependent() throws Exception {
Locale oldLocale = Locale.getDefault();
Locale.setDefault(new Locale("tr", "TR"));
try {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
bean.setId("1");
Object name = IntrospectionSupport.getProperty(bean, "name");
Object id = IntrospectionSupport.getProperty(bean, "id");
Object price = IntrospectionSupport.getProperty(bean, "price");
assertEquals("Claus", name);
assertEquals(10.0, price);
assertEquals("1", id);
} finally {
Locale.setDefault(oldLocale);
}
}
public void testGetPropertyGetter() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Method name = IntrospectionSupport.getPropertyGetter(ExampleBean.class, "name");
assertEquals("getName", name.getName());
try {
IntrospectionSupport.getPropertyGetter(ExampleBean.class, "xxx");
fail("Should have thrown exception");
} catch (NoSuchMethodException e) {
assertEquals("org.apache.camel.util.jndi.ExampleBean.getXxx()", e.getMessage());
}
}
public void testGetPropertySetter() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Method name = IntrospectionSupport.getPropertySetter(ExampleBean.class, "name");
assertEquals("setName", name.getName());
try {
IntrospectionSupport.getPropertySetter(ExampleBean.class, "xxx");
fail("Should have thrown exception");
} catch (NoSuchMethodException e) {
assertEquals("org.apache.camel.util.jndi.ExampleBean.setXxx", e.getMessage());
}
}
public void testIsGetter() throws Exception {
ExampleBean bean = new ExampleBean();
Method name = bean.getClass().getMethod("getName", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(name));
assertEquals(false, IntrospectionSupport.isSetter(name));
Method price = bean.getClass().getMethod("getPrice", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(price));
assertEquals(false, IntrospectionSupport.isSetter(price));
}
public void testIsSetter() throws Exception {
ExampleBean bean = new ExampleBean();
Method name = bean.getClass().getMethod("setName", String.class);
assertEquals(false, IntrospectionSupport.isGetter(name));
assertEquals(true, IntrospectionSupport.isSetter(name));
Method price = bean.getClass().getMethod("setPrice", double.class);
assertEquals(false, IntrospectionSupport.isGetter(price));
assertEquals(true, IntrospectionSupport.isSetter(price));
}
public void testOtherIsGetter() throws Exception {
OtherExampleBean bean = new OtherExampleBean();
Method customerId = bean.getClass().getMethod("getCustomerId", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(customerId));
assertEquals(false, IntrospectionSupport.isSetter(customerId));
Method goldCustomer = bean.getClass().getMethod("isGoldCustomer", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(goldCustomer));
assertEquals(false, IntrospectionSupport.isSetter(goldCustomer));
Method silverCustomer = bean.getClass().getMethod("isSilverCustomer", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(silverCustomer));
assertEquals(false, IntrospectionSupport.isSetter(silverCustomer));
Method company = bean.getClass().getMethod("getCompany", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(company));
assertEquals(false, IntrospectionSupport.isSetter(company));
Method setupSomething = bean.getClass().getMethod("setupSomething", Object.class);
assertEquals(false, IntrospectionSupport.isGetter(setupSomething));
assertEquals(false, IntrospectionSupport.isSetter(setupSomething));
}
public void testOtherIsSetter() throws Exception {
OtherExampleBean bean = new OtherExampleBean();
Method customerId = bean.getClass().getMethod("setCustomerId", int.class);
assertEquals(false, IntrospectionSupport.isGetter(customerId));
assertEquals(true, IntrospectionSupport.isSetter(customerId));
Method goldCustomer = bean.getClass().getMethod("setGoldCustomer", boolean.class);
assertEquals(false, IntrospectionSupport.isGetter(goldCustomer));
assertEquals(true, IntrospectionSupport.isSetter(goldCustomer));
Method silverCustomer = bean.getClass().getMethod("setSilverCustomer", Boolean.class);
assertEquals(false, IntrospectionSupport.isGetter(silverCustomer));
assertEquals(true, IntrospectionSupport.isSetter(silverCustomer));
Method company = bean.getClass().getMethod("setCompany", String.class);
assertEquals(false, IntrospectionSupport.isGetter(company));
assertEquals(true, IntrospectionSupport.isSetter(company));
Method setupSomething = bean.getClass().getMethod("setupSomething", Object.class);
assertEquals(false, IntrospectionSupport.isGetter(setupSomething));
assertEquals(false, IntrospectionSupport.isSetter(setupSomething));
}
public void testExtractProperties() throws Exception {
Map<String, Object> params = new LinkedHashMap<String, Object>();
params.put("foo.name", "Camel");
params.put("foo.age", 5);
params.put("bar", "yes");
// extract all "foo." properties
// and their keys should have the prefix removed
Map<String, Object> foo = IntrospectionSupport.extractProperties(params, "foo.");
assertEquals(2, foo.size());
assertEquals("Camel", foo.get("name"));
assertEquals(5, foo.get("age"));
// the extracted properties should be removed from original
assertEquals(1, params.size());
assertEquals("yes", params.get("bar"));
}
public void testFindSetterMethodsOrderedByParameterType() throws Exception {
List<Method> setters = IntrospectionSupport.findSetterMethodsOrderedByParameterType(MyOverloadedBean.class, "bean", false);
assertNotNull(setters);
assertEquals(2, setters.size());
assertEquals(ExampleBean.class, setters.get(0).getParameterTypes()[0]);
assertEquals(String.class, setters.get(1).getParameterTypes()[0]);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.parse;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* General IO stream manipulation utilities.
*/
/** package */ class ParseIOUtils {
private static final int EOF = -1;
/**
* The default buffer size ({@value}) to use for
* {@link #copyLarge(InputStream, OutputStream)}
*/
private static final int DEFAULT_BUFFER_SIZE = 1024 * 4;
/**
* The default buffer size to use for the skip() methods.
*/
private static final int SKIP_BUFFER_SIZE = 2048;
// Allocated in the relevant skip method if necessary.
/*
* N.B. no need to synchronize these because:
* - we don't care if the buffer is created multiple times (the data is ignored)
* - we always use the same size buffer, so if it it is recreated it will still be OK
* (if the buffer size were variable, we would need to synch. to ensure some other thread
* did not create a smaller one)
*/
private static byte[] SKIP_BYTE_BUFFER;
// read toByteArray
//-----------------------------------------------------------------------
/**
* Get the contents of an <code>InputStream</code> as a <code>byte[]</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
*
* @param input the <code>InputStream</code> to read from
* @return the requested byte array
* @throws NullPointerException if the input is null
* @throws IOException if an I/O error occurs
*/
public static byte[] toByteArray(InputStream input) throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
copy(input, output);
return output.toByteArray();
}
// copy from InputStream
//-----------------------------------------------------------------------
/**
* Copy bytes from an <code>InputStream</code> to an
* <code>OutputStream</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
* Large streams (over 2GB) will return a bytes copied value of
* <code>-1</code> after the copy has completed since the correct
* number of bytes cannot be returned as an int. For large streams
* use the <code>copyLarge(InputStream, OutputStream)</code> method.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @return the number of bytes copied, or -1 if > Integer.MAX_VALUE
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since 1.1
*/
public static int copy(InputStream input, OutputStream output) throws IOException {
long count = copyLarge(input, output);
if (count > Integer.MAX_VALUE) {
return -1;
}
return (int) count;
}
/**
* Copy bytes from a large (over 2GB) <code>InputStream</code> to an
* <code>OutputStream</code>.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
* The buffer size is given by {@link #DEFAULT_BUFFER_SIZE}.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @return the number of bytes copied
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since 1.3
*/
public static long copyLarge(InputStream input, OutputStream output)
throws IOException {
return copyLarge(input, output, new byte[DEFAULT_BUFFER_SIZE]);
}
/**
* Copy bytes from a large (over 2GB) <code>InputStream</code> to an
* <code>OutputStream</code>.
* <p>
* This method uses the provided buffer, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @param buffer the buffer to use for the copy
* @return the number of bytes copied
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since 2.2
*/
public static long copyLarge(InputStream input, OutputStream output, byte[] buffer)
throws IOException {
long count = 0;
int n = 0;
while (EOF != (n = input.read(buffer))) {
output.write(buffer, 0, n);
count += n;
}
return count;
}
/**
* Copy some or all bytes from a large (over 2GB) <code>InputStream</code> to an
* <code>OutputStream</code>, optionally skipping input bytes.
* <p>
* This method buffers the input internally, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
* The buffer size is given by {@link #DEFAULT_BUFFER_SIZE}.
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @param inputOffset : number of bytes to skip from input before copying
* -ve values are ignored
* @param length : number of bytes to copy. -ve means all
* @return the number of bytes copied
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since 2.2
*/
public static long copyLarge(InputStream input, OutputStream output, long inputOffset, long length)
throws IOException {
return copyLarge(input, output, inputOffset, length, new byte[DEFAULT_BUFFER_SIZE]);
}
/**
* Skip bytes from an input byte stream.
* This implementation guarantees that it will read as many bytes
* as possible before giving up; this may not always be the case for
* subclasses of {@link java.io.Reader}.
*
* @param input byte stream to skip
* @param toSkip number of bytes to skip.
* @return number of bytes actually skipped.
*
* @see InputStream#skip(long)
*
* @throws IOException if there is a problem reading the file
* @throws IllegalArgumentException if toSkip is negative
* @since 2.0
*/
public static long skip(InputStream input, long toSkip) throws IOException {
if (toSkip < 0) {
throw new IllegalArgumentException("Skip count must be non-negative, actual: " + toSkip);
}
/*
* N.B. no need to synchronize this because: - we don't care if the buffer is created multiple times (the data
* is ignored) - we always use the same size buffer, so if it it is recreated it will still be OK (if the buffer
* size were variable, we would need to synch. to ensure some other thread did not create a smaller one)
*/
if (SKIP_BYTE_BUFFER == null) {
SKIP_BYTE_BUFFER = new byte[SKIP_BUFFER_SIZE];
}
long remain = toSkip;
while (remain > 0) {
long n = input.read(SKIP_BYTE_BUFFER, 0, (int) Math.min(remain, SKIP_BUFFER_SIZE));
if (n < 0) { // EOF
break;
}
remain -= n;
}
return toSkip - remain;
}
/**
* Copy some or all bytes from a large (over 2GB) <code>InputStream</code> to an
* <code>OutputStream</code>, optionally skipping input bytes.
* <p>
* This method uses the provided buffer, so there is no need to use a
* <code>BufferedInputStream</code>.
* <p>
*
* @param input the <code>InputStream</code> to read from
* @param output the <code>OutputStream</code> to write to
* @param inputOffset : number of bytes to skip from input before copying
* -ve values are ignored
* @param length : number of bytes to copy. -ve means all
* @param buffer the buffer to use for the copy
*
* @return the number of bytes copied
* @throws NullPointerException if the input or output is null
* @throws IOException if an I/O error occurs
* @since 2.2
*/
public static long copyLarge(InputStream input, OutputStream output,
final long inputOffset, final long length, byte[] buffer) throws IOException {
if (inputOffset > 0) {
skipFully(input, inputOffset);
}
if (length == 0) {
return 0;
}
final int bufferLength = buffer.length;
int bytesToRead = bufferLength;
if (length > 0 && length < bufferLength) {
bytesToRead = (int) length;
}
int read;
long totalRead = 0;
while (bytesToRead > 0 && EOF != (read = input.read(buffer, 0, bytesToRead))) {
output.write(buffer, 0, read);
totalRead += read;
if (length > 0) { // only adjust length if not reading to the end
// Note the cast must work because buffer.length is an integer
bytesToRead = (int) Math.min(length - totalRead, bufferLength);
}
}
return totalRead;
}
/**
* Skip the requested number of bytes or fail if there are not enough left.
* <p>
* This allows for the possibility that {@link InputStream#skip(long)} may
* not skip as many bytes as requested (most likely because of reaching EOF).
*
* @param input stream to skip
* @param toSkip the number of bytes to skip
* @see InputStream#skip(long)
*
* @throws IOException if there is a problem reading the file
* @throws IllegalArgumentException if toSkip is negative
* @throws EOFException if the number of bytes skipped was incorrect
* @since 2.0
*/
public static void skipFully(InputStream input, long toSkip) throws IOException {
if (toSkip < 0) {
throw new IllegalArgumentException("Bytes to skip must not be negative: " + toSkip);
}
long skipped = skip(input, toSkip);
if (skipped != toSkip) {
throw new EOFException("Bytes to skip: " + toSkip + " actual: " + skipped);
}
}
/**
* Unconditionally close an <code>InputStream</code>.
* <p>
* Equivalent to {@link InputStream#close()}, except any exceptions will be ignored.
* This is typically used in finally blocks.
*
* @param input the InputStream to close, may be null or already closed
*/
public static void closeQuietly(InputStream input) {
try {
if (input != null) {
input.close();
}
} catch (IOException ioe) {
// ignore
}
}
/**
* Unconditionally close an <code>OutputStream</code>.
* <p>
* Equivalent to {@link OutputStream#close()}, except any exceptions will be ignored.
* This is typically used in finally blocks.
*
* @param output the OutputStream to close, may be null or already closed
*/
public static void closeQuietly(OutputStream output) {
try {
if (output != null) {
output.close();
}
} catch (IOException ioe) {
// ignore
}
}
/**
* Closes a <code>Closeable</code> unconditionally.
* <p>
* Equivalent to {@link Closeable#close()}, except any exceptions will be ignored.
* This is typically used in finally blocks.
* <p>
* Example code:
* <pre>
* Closeable closeable = null;
* try {
* closeable = new FileReader("foo.txt");
* // process closeable
* closeable.close();
* } catch (Exception e) {
* // error handling
* } finally {
* IOUtils.closeQuietly(closeable);
* }
* </pre>
*
* @param closeable the object to close, may be null or already closed
* @since 2.0
*/
public static void closeQuietly(final Closeable closeable) {
try {
if (closeable != null) {
closeable.close();
}
} catch (final IOException ioe) {
// ignore
}
}
}
| |
/*
* Copyright (c) 2008, 2012 Oracle and/or its affiliates. All rights reserved. Use is subject to license terms. This file is available and licensed under the
* following license: Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are
* met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in
* binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution. - Neither the name of Oracle Corporation nor the names of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package supernovae.game.controller;
import java.net.URL;
import java.util.ResourceBundle;
import com.brainless.alchemist.model.EditorPlatform;
import com.brainless.alchemist.model.ECS.pipeline.PipelineManager;
import com.brainless.alchemist.model.state.DataState;
import com.brainless.alchemist.model.state.SceneSelectorState;
import com.brainless.alchemist.model.tempImport.RendererPlatform;
import com.brainless.alchemist.presentation.sceneView.SceneViewBehavior;
import com.brainless.alchemist.view.ViewPlatform;
import com.jme3.app.SimpleApplication;
import com.jme3.app.state.AppState;
import com.jme3.app.state.AppStateManager;
import com.jme3.post.FilterPostProcessor;
import com.simsilica.es.EntityData;
import com.simsilica.es.EntityId;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.concurrent.Task;
import javafx.concurrent.Worker;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.Label;
import javafx.scene.control.ProgressBar;
import javafx.scene.layout.Pane;
import plugin.circleCollisionShapeInstrument.CircleCollisionShapeInstrument;
import plugin.planarStanceInstrument.PlanarStanceInstrument;
import supernovae.JmeImageView;
import supernovae.ProcessorManager;
import supernovae.SupernoveaApplication;
import supernovae.component.SupernovaeEntityData;
import supernovae.component.motion.PlanarStance;
import supernovae.game.SupernovaeGame;
import supernovae.state.SupernovaeCameraState;
import supernovae.world.RegionPager;
import util.LogUtil;
import util.geometry.geom2d.Point2D;
/**
* Profile Controller.
*/
public class LoadingGameController extends Pane implements Initializable {
protected SupernovaeGame application;
@FXML
protected ProgressBar progressBar;
@FXML
protected Label progressLbl;
public void setApp(SupernovaeGame application) {
this.application = application;
}
@Override
public void initialize(URL location, ResourceBundle resources) {
Task<Object> copyWorker = createLoadTask();
progressBar.progressProperty().unbind();
progressBar.progressProperty().bind(copyWorker.progressProperty());
copyWorker.messageProperty().addListener(new ChangeListener<String>() {
@Override
public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) {
progressLbl.setText(newValue);
}
});
copyWorker.stateProperty().addListener(new ChangeListener<Worker.State>() {
@Override
public void changed(ObservableValue<? extends Worker.State> observableValue, Worker.State oldState, Worker.State newState) {
System.out.println("NewState: " + newState);
if (newState == Worker.State.SUCCEEDED) {
application.startGame();
} else if (newState == Worker.State.FAILED) {
System.err.println("ERROR AT LOADING: " + observableValue.getValue());
}
}
});
new Thread(copyWorker).start();
}
protected <T extends AppState> void pauseState(Class<T> stateClass) {
T state = ViewPlatform.getScene().getApp().getStateManager().getState(stateClass);
if (state != null) {
state.setEnabled(false);
}
}
static private boolean createScene(SimpleApplication app, EntityData ed) {
// RendererPlatform.setApp(app);
app.getViewPort().addProcessor(new FilterPostProcessor(app.getAssetManager()));
AppStateManager stateManager = app.getStateManager();
stateManager.attach(new DataState(EditorPlatform.getEntityData()));
stateManager.attach(new SceneSelectorState());
return true;
}
public Task<Object> createLoadTask() {
int maxTasks = 13;
return new Task<Object>() {
@Override
protected Object call() throws Exception {
updateMessage("Prepare EditorPlattform");
updateProgress(1, maxTasks);
// copied from Alchemist
LogUtil.init();
// Model instanciation
EditorPlatform.setEntityData(new SupernovaeEntityData());
// EditorPlatform.setEntityData(new TraversableEntityData());
// EditorPlatform.getUserComponentList().setValue(new UserComponentList());
EditorPlatform.setPipelineManager(new PipelineManager());
// copied from SceneView
// presenter = new SceneViewPresenter(this);
updateMessage("Prepare RenderPlattform");
updateProgress(2, maxTasks);
JmeImageView jme = new JmeImageView();
ViewPlatform.setScene(jme);
SupernoveaApplication app1 = (SupernoveaApplication) ViewPlatform.getScene().getApp();
while (!app1.isInitialized()) {
Thread.sleep(100);
};
RendererPlatform.setApp(ViewPlatform.getScene().getApp());
ViewPlatform.getScene().enqueue(app -> createScene(app, EditorPlatform.getEntityData()));
updateMessage("Prepare LogicProcessors");
updateProgress(3, maxTasks);
ProcessorManager.addRendererSystems();
updateMessage("Load Map with Blueprints");
updateProgress(4, maxTasks);
// adding scene view behavior to place blueprint in view with correct planar stance
SceneViewBehavior.createEntityFunction = (blueprint, screenCoord) -> {
RendererPlatform.enqueue(app -> {
EntityData ed = app.getStateManager().getState(DataState.class).getEntityData();
EntityId newEntity = blueprint.createEntity(ed, null);
PlanarStance stance = ed.getComponent(newEntity, PlanarStance.class);
if (stance != null) {
Point2D planarCoord = app.getStateManager().getState(SceneSelectorState.class).getPointedCoordInPlan(screenCoord);
ed.setComponent(newEntity, new PlanarStance(planarCoord, stance.getOrientation(), stance.getElevation(), stance.getUpVector()));
}
});
return null;
};
updateMessage("Prepare Instruments");
updateProgress(5, maxTasks);
// adding instruments
new PlanarStanceInstrument();
new CircleCollisionShapeInstrument();
updateMessage("Start LogicProcessors");
updateProgress(6, maxTasks);
// RendererPlatform.enqueue(() -> EditorPlatform.getPipelineManager().stopEditionPiplines());
// RendererPlatform.enqueue(() -> EditorPlatform.getPipelineManager().runPipelines());
EditorPlatform.getPipelineManager().runEditionPiplines();
updateMessage("Pause the Game");
updateProgress(7, maxTasks);
// all States must be disabled => paused
pauseState(SupernovaeCameraState.class);
pauseState(SceneSelectorState.class);
updateMessage("Wait unitl JME is initialized");
updateProgress(8, maxTasks);
while (!jme.getApp().isInitialized()) {
Thread.sleep(200);
}
// updateMessage("Wait unitl Models are loaded");
// updateProgress(9, maxTasks);
// while (!jme.getApp().isInitialized()) {
// Thread.sleep(200);
// }
updateMessage("Wait unitl Worldproc is ready");
updateProgress(10, maxTasks);
// WorldProc state = jme.getApp().getStateManager().getState(WorldProc.class);
// while (state == null || !state.isEnabled() || !state.isInitialized()) {
// System.out.println("wait until world is ready " + state);
// Thread.sleep(200);
// }
updateMessage("Wait unitl RegionPager is ready");
updateProgress(11, maxTasks);
RegionPager pagerState = RendererPlatform.getStateManager().getState(RegionPager.class);
while (pagerState == null || !pagerState.isEnabled() || !pagerState.isInitialized()) {
Thread.sleep(200);
}
updateMessage("Loading Maps");
updateProgress(12, maxTasks);
try {
pagerState.getRegionsAtOnce();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
EditorPlatform.getPipelineManager().stopEditionPiplines();
EditorPlatform.getPipelineManager().runPipelines();
return true;
}
};
}
}
| |
/*
* Copyright 2013 Barzan Mozafari
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbseer.middleware.client;
import com.esotericsoftware.minlog.Log;
import dbseer.middleware.constant.MiddlewareConstants;
import dbseer.middleware.event.MiddlewareClientEvent;
import dbseer.middleware.packet.MiddlewarePacket;
import dbseer.middleware.packet.MiddlewarePacketDecoder;
import dbseer.middleware.packet.MiddlewarePacketEncoder;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.compression.ZlibCodecFactory;
import io.netty.handler.codec.compression.ZlibWrapper;
import io.netty.handler.timeout.IdleStateHandler;
import java.io.*;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* Created by Dong Young Yoon on 12/1/15.
*
* The client for the middleware.
*/
public class MiddlewareClient extends Observable implements Runnable
{
private static final int MAX_RETRY = 3;
private String id;
private String password;
private String host;
private int port;
private int retry;
private int reqId;
private boolean isMonitoring;
private String logPath;
private Channel channel = null;
private ExecutorService requesterExecutor = null;
private ExecutorService heartbeatSenderExecutor = null;
private MiddlewareClientHeartbeatSender heartbeatSender = null;
private MiddlewareClientLogRequester txLogRequester = null;
private Map<String, MiddlewareClientLogRequester> sysLogRequester = null;
private Map<String, PrintWriter> logWriterMap = null; // tx log writer for dbseer: <transaction type, writer>
private Map<Integer, String> statementMessageMap = null;
private ArrayList<String> serverNameList = null;
private ZipOutputStream txZipOutputStream = null;
private PrintWriter txPrintWriter = null;
private File txLogFileRaw = null;
public MiddlewareClient(String host, String id, String password, int port, String logPath)
{
this.retry = 0;
this.reqId = 0;
this.id = id;
this.password = password;
this.host = host;
this.port = port;
this.logPath = logPath;
this.isMonitoring = false;
this.logWriterMap = new HashMap<>();
this.sysLogRequester = new HashMap<>();
this.statementMessageMap = new HashMap<>();
this.serverNameList = new ArrayList<>();
}
public void setLogLevel(int level)
{
Log.set(level);
}
public void run()
{
// debug info
Log.debug(String.format("host = %s", host));
Log.debug(String.format("port = %d", port));
Log.debug(String.format("log path = %s", logPath));
// client needs to handle incoming messages from the middleware as well.
EventLoopGroup group = new NioEventLoopGroup(4);
try
{
// attach shutdown hook.
MiddlewareClientShutdown shutdownThread = new MiddlewareClientShutdown(this);
Runtime.getRuntime().addShutdownHook(shutdownThread);
File logDir = new File(logPath);
if (!logDir.exists())
{
logDir.mkdirs();
}
final MiddlewareClient client = this;
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioSocketChannel.class)
.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 5000)
.handler(new ChannelInitializer<SocketChannel>()
{
@Override
protected void initChannel(SocketChannel ch) throws Exception
{
ChannelPipeline p = ch.pipeline();
p.addLast(new IdleStateHandler(10, 0, 0));
p.addLast(ZlibCodecFactory.newZlibEncoder(ZlibWrapper.ZLIB));
p.addLast(ZlibCodecFactory.newZlibDecoder(ZlibWrapper.ZLIB));
p.addLast(new MiddlewarePacketDecoder());
p.addLast(new MiddlewarePacketEncoder());
p.addLast(new MiddlewareClientHandler(client));
}
});
ChannelFuture f = b.connect(host, port).sync();
channel = f.channel();
Log.debug("Connected to the middleware.");
MiddlewarePacket checkPacket = new MiddlewarePacket(MiddlewareConstants.PACKET_CHECK_VERSION, MiddlewareConstants.PROTOCOL_VERSION);
// ByteBuf buf = Unpooled.buffer();
// buf.writeInt(MiddlewareConstants.PACKET_CHECK_VERSION);
// buf.writeInt(MiddlewareConstants.PROTOCOL_VERSION.getBytes("UTF-8").length);
// buf.writeBytes(MiddlewareConstants.PROTOCOL_VERSION.getBytes("UTF-8"));
// channel.writeAndFlush(buf);
channel.writeAndFlush(checkPacket);
channel.closeFuture().sync();
}
catch (Exception e)
{
if (e instanceof InterruptedException)
{
}
else
{
setChanged();
notifyObservers(new MiddlewareClientEvent(MiddlewareClientEvent.ERROR, e));
}
Log.error(e.getMessage());
e.printStackTrace();
}
finally
{
group.shutdownGracefully();
this.stopExecutors();
if (txLogFileRaw.exists())
{
txLogFileRaw.delete();
}
if (txZipOutputStream != null)
{
try
{
txZipOutputStream.closeEntry();
txZipOutputStream.close();
}
catch (IOException e)
{
e.printStackTrace();
}
txZipOutputStream = null;
}
}
}
public Channel getChannel()
{
return channel;
}
public MiddlewareClientLogRequester getTxLogRequester()
{
return txLogRequester;
}
public MiddlewareClientLogRequester getSysLogRequester(String server)
{
return sysLogRequester.get(server);
}
public void startMonitoring() throws Exception
{
if (retry >= MAX_RETRY)
{
throw new Exception(String.format("Middleware failed to start with %d retries", MAX_RETRY));
}
// clear server names.
this.serverNameList.clear();
if (channel != null)
{
String idPassword = this.id + "@" + this.password;
// ByteBuf b = Unpooled.buffer();
// b.writeInt(MiddlewareConstants.PACKET_START_MONITORING);
// b.writeInt(idPassword.getBytes("UTF-8").length);
// b.writeBytes(idPassword.getBytes("UTF-8"));
// channel.writeAndFlush(b);
MiddlewarePacket packet = new MiddlewarePacket(MiddlewareConstants.PACKET_START_MONITORING, idPassword);
channel.writeAndFlush(packet);
}
Log.debug("Start monitoring packet sent.");
retry++;
}
public void stopMonitoring() throws Exception
{
this.stopExecutors();
if (channel != null)
{
// ByteBuf b = Unpooled.buffer();
// b.writeInt(MiddlewareConstants.PACKET_STOP_MONITORING);
// b.writeInt(0);
// channel.writeAndFlush(b);
MiddlewarePacket packet = new MiddlewarePacket(MiddlewareConstants.PACKET_STOP_MONITORING);
channel.writeAndFlush(packet);
}
Log.debug("Stop monitoring packet sent.");
if (txLogFileRaw != null && txLogFileRaw.exists())
{
txLogFileRaw.delete();
}
if (txZipOutputStream != null)
{
txZipOutputStream.closeEntry();
txZipOutputStream.close();
txZipOutputStream = null;
}
// reset retry count.
retry = 0;
isMonitoring = false;
}
public File getTxLogFileRaw()
{
return txLogFileRaw;
}
public void requestServerList() throws Exception
{
if (channel != null)
{
// ByteBuf b= Unpooled.buffer();
// b.writeInt(MiddlewareConstants.PACKET_REQUEST_SERVER_LIST);
// b.writeInt(0);
// channel.writeAndFlush(b);
MiddlewarePacket packet = new MiddlewarePacket(MiddlewareConstants.PACKET_REQUEST_SERVER_LIST);
channel.writeAndFlush(packet);
}
Log.debug("Server list request packet sent.");
}
public synchronized void requestStatistics(String serverName, int txId, int txType, int stId, long latency, int mode, Set<String> tables, String sql)
{
String msg = String.format("%d,%d,%d,%d,%d,%d,", txType, txId, stId, latency, mode, tables.size());
for (String table : tables)
{
msg += table + ",";
}
statementMessageMap.put(reqId, msg);
if (channel != null)
{
MiddlewarePacket packet = new MiddlewarePacket(MiddlewareConstants.PACKET_REQUEST_QUERY_STATISTICS, String.format("%s,%d,%d,%s", serverName, reqId, txType, sql));
channel.writeAndFlush(packet);
}
++reqId;
Log.debug("Table count request packet sent.");
}
public void requestTableCount(String serverName, String tableName)
{
if (channel != null)
{
MiddlewarePacket packet = new MiddlewarePacket(MiddlewareConstants.PACKET_REQUEST_TABLE_COUNT, String.format("%s,%s", serverName, tableName));
channel.writeAndFlush(packet);
}
Log.debug("Table count request packet sent.");
}
public void requestNumRowAccessedByQuery(String serverName, int txType, String sql)
{
if (channel != null)
{
MiddlewarePacket packet = new MiddlewarePacket(MiddlewareConstants.PACKET_REQUEST_NUM_ROW_BY_SQL, String.format("%s,%d,%s", serverName, txType, sql));
channel.writeAndFlush(packet);
}
Log.debug("Num row accessed by sql request packet sent.");
}
public synchronized void printQueryStatistics(String serverName, int txType, int reqId, String msg)
{
PrintWriter writer = logWriterMap.get(serverName + txType);
if (writer != null)
{
writer.print(statementMessageMap.get(reqId));
writer.println(msg);
writer.flush();
}
else
{
Log.error("Writer null");
}
statementMessageMap.remove(reqId);
}
public ZipOutputStream startTxLogRequester() throws Exception
{
if (requesterExecutor == null)
{
requesterExecutor = Executors.newCachedThreadPool();
}
txLogRequester =
new MiddlewareClientLogRequester(channel, MiddlewareConstants.PACKET_REQUEST_TX_LOG);
requesterExecutor.submit(txLogRequester);
File dbLogFile = new File(logPath + File.separator + MiddlewareConstants.TX_LOG_ZIP);
txLogFileRaw = new File(logPath + File.separator + MiddlewareConstants.TX_LOG_RAW);
txPrintWriter = new PrintWriter(new FileWriter(txLogFileRaw, false));
FileOutputStream fos = new FileOutputStream(dbLogFile);
txZipOutputStream = new ZipOutputStream(new BufferedOutputStream(fos));
try
{
txZipOutputStream.putNextEntry(new ZipEntry(MiddlewareConstants.TX_LOG_RAW));
}
catch (Exception e)
{
Log.error(e.getMessage());
e.printStackTrace();
}
Log.debug("Tx Log requester launched.");
return txZipOutputStream;
}
public PrintWriter getTxPrintWriter()
{
return txPrintWriter;
}
public Map<String, PrintWriter> startSysLogRequester(String serverStr) throws Exception
{
if (requesterExecutor == null)
{
requesterExecutor = Executors.newCachedThreadPool();
}
Map<String, PrintWriter> writers = new HashMap<>();
String[] servers = serverStr.split(MiddlewareConstants.SERVER_STRING_DELIMITER);
for (String server : servers)
{
MiddlewareClientLogRequester logRequester =
new MiddlewareClientLogRequester(channel, MiddlewareConstants.PACKET_REQUEST_SYS_LOG, server);
requesterExecutor.submit(logRequester);
sysLogRequester.put(server, logRequester);
File sysLogFile = new File(logPath + File.separator + MiddlewareConstants.SYS_LOG_PREFIX + "." + server);
PrintWriter writer = new PrintWriter(new FileWriter(sysLogFile, false));
writers.put(server, writer);
serverNameList.add(server);
}
Log.debug("Sys Log requesters launched.");
return writers;
}
public void startHeartbeatSender() throws Exception
{
heartbeatSender = new MiddlewareClientHeartbeatSender(channel);
heartbeatSenderExecutor = Executors.newSingleThreadExecutor();
heartbeatSenderExecutor.submit(heartbeatSender);
Log.debug("heartbeat sender launched.");
}
public void stopExecutors()
{
if (requesterExecutor != null)
{
requesterExecutor.shutdownNow();
}
if (heartbeatSenderExecutor != null)
{
heartbeatSenderExecutor.shutdownNow();
}
txLogRequester = null;
sysLogRequester = new HashMap<>();
// clear server names.
this.serverNameList.clear();
}
public ZipOutputStream getTxZipOutputStream()
{
return txZipOutputStream;
}
public void setMonitoring(boolean monitoring)
{
isMonitoring = monitoring;
setChanged();
MiddlewareClientEvent event;
if (isMonitoring)
{
event = new MiddlewareClientEvent(MiddlewareClientEvent.IS_MONITORING);
}
else
{
event = new MiddlewareClientEvent(MiddlewareClientEvent.IS_NOT_MONITORING);
}
notifyObservers(event);
}
public void setMonitoring(boolean monitoring, String serverStr)
{
isMonitoring = monitoring;
setChanged();
MiddlewareClientEvent event;
if (isMonitoring)
{
event = new MiddlewareClientEvent(MiddlewareClientEvent.IS_MONITORING);
}
else
{
event = new MiddlewareClientEvent(MiddlewareClientEvent.IS_NOT_MONITORING);
}
event.serverStr = serverStr;
notifyObservers(event);
}
public void setTableRowCount(String serverName, String tableName, long rowCount)
{
MiddlewareClientEvent event = new MiddlewareClientEvent(MiddlewareClientEvent.TABLE_ROW_COUNT, serverName, tableName, rowCount);
setChanged();
notifyObservers(event);
}
public boolean isMonitoring()
{
return isMonitoring;
}
public void registerLogWriter(String id, PrintWriter writer)
{
logWriterMap.put(id, writer);
}
public void disconnect() throws Exception
{
if (channel != null && channel.isActive())
{
channel.disconnect();
}
}
public ArrayList<String> getServerNameList()
{
return serverNameList;
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* LiveStreamEventPage.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202105;
/**
* Captures a page of {@link LiveStreamEvent} objects.
*/
public class LiveStreamEventPage implements java.io.Serializable , Iterable<com.google.api.ads.admanager.axis.v202105.LiveStreamEvent>{
/* The size of the total result set to which this page belongs. */
private java.lang.Integer totalResultSetSize;
/* The absolute index in the total result set on which this page
* begins. */
private java.lang.Integer startIndex;
/* The collection of live stream events contained within this
* page. */
private com.google.api.ads.admanager.axis.v202105.LiveStreamEvent[] results;
public LiveStreamEventPage() {
}
public LiveStreamEventPage(
java.lang.Integer totalResultSetSize,
java.lang.Integer startIndex,
com.google.api.ads.admanager.axis.v202105.LiveStreamEvent[] results) {
this.totalResultSetSize = totalResultSetSize;
this.startIndex = startIndex;
this.results = results;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
// Only include length of results to avoid overly verbose output
.add("results.length", getResults() == null ? 0 : getResults().length)
.add("startIndex", getStartIndex())
.add("totalResultSetSize", getTotalResultSetSize())
.toString();
}
/**
* Gets the totalResultSetSize value for this LiveStreamEventPage.
*
* @return totalResultSetSize * The size of the total result set to which this page belongs.
*/
public java.lang.Integer getTotalResultSetSize() {
return totalResultSetSize;
}
/**
* Sets the totalResultSetSize value for this LiveStreamEventPage.
*
* @param totalResultSetSize * The size of the total result set to which this page belongs.
*/
public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) {
this.totalResultSetSize = totalResultSetSize;
}
/**
* Gets the startIndex value for this LiveStreamEventPage.
*
* @return startIndex * The absolute index in the total result set on which this page
* begins.
*/
public java.lang.Integer getStartIndex() {
return startIndex;
}
/**
* Sets the startIndex value for this LiveStreamEventPage.
*
* @param startIndex * The absolute index in the total result set on which this page
* begins.
*/
public void setStartIndex(java.lang.Integer startIndex) {
this.startIndex = startIndex;
}
/**
* Gets the results value for this LiveStreamEventPage.
*
* @return results * The collection of live stream events contained within this
* page.
*/
public com.google.api.ads.admanager.axis.v202105.LiveStreamEvent[] getResults() {
return results;
}
/**
* Sets the results value for this LiveStreamEventPage.
*
* @param results * The collection of live stream events contained within this
* page.
*/
public void setResults(com.google.api.ads.admanager.axis.v202105.LiveStreamEvent[] results) {
this.results = results;
}
public com.google.api.ads.admanager.axis.v202105.LiveStreamEvent getResults(int i) {
return this.results[i];
}
public void setResults(int i, com.google.api.ads.admanager.axis.v202105.LiveStreamEvent _value) {
this.results[i] = _value;
}
/**
* Returns an iterator over this page's {@code results} that:
* <ul>
* <li>Will not be {@code null}.</li>
* <li>Will not support {@link java.util.Iterator#remove()}.</li>
* </ul>
*
* @return a non-null iterator.
*/
@Override
public java.util.Iterator<com.google.api.ads.admanager.axis.v202105.LiveStreamEvent> iterator() {
if (results == null) {
return java.util.Collections.<com.google.api.ads.admanager.axis.v202105.LiveStreamEvent>emptyIterator();
}
return java.util.Arrays.<com.google.api.ads.admanager.axis.v202105.LiveStreamEvent>asList(results).iterator();
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof LiveStreamEventPage)) return false;
LiveStreamEventPage other = (LiveStreamEventPage) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) ||
(this.totalResultSetSize!=null &&
this.totalResultSetSize.equals(other.getTotalResultSetSize()))) &&
((this.startIndex==null && other.getStartIndex()==null) ||
(this.startIndex!=null &&
this.startIndex.equals(other.getStartIndex()))) &&
((this.results==null && other.getResults()==null) ||
(this.results!=null &&
java.util.Arrays.equals(this.results, other.getResults())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getTotalResultSetSize() != null) {
_hashCode += getTotalResultSetSize().hashCode();
}
if (getStartIndex() != null) {
_hashCode += getStartIndex().hashCode();
}
if (getResults() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getResults());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(LiveStreamEventPage.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "LiveStreamEventPage"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("totalResultSetSize");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "totalResultSetSize"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startIndex");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "startIndex"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("results");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "results"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "LiveStreamEvent"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
package com.youcruit.billogram.client;
import static com.youcruit.billogram.client.http.HttpClient.Method.GET;
import static com.youcruit.billogram.client.http.HttpClient.Method.POST;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.google.gson.annotations.SerializedName;
import com.youcruit.billogram.client.http.HttpClient;
import com.youcruit.billogram.objects.request.Search;
import com.youcruit.billogram.objects.request.billogram.BillogramFilterField;
import com.youcruit.billogram.objects.request.billogram.BillogramOrderField;
import com.youcruit.billogram.objects.request.billogram.PdfFileResponse;
import com.youcruit.billogram.objects.request.billogram.SendMethod;
import com.youcruit.billogram.objects.response.billogram.Billogram;
import com.youcruit.billogram.objects.response.billogram.BillogramResponse;
import com.youcruit.billogram.objects.response.billogram.BillogramSearchResponse;
import com.youcruit.billogram.objects.response.callback.CallbackPost;
import okio.ByteString;
public class BillogramClient extends AbstractRestClient<BillogramFilterField, BillogramOrderField, BillogramSearchResponse, BillogramResponse, Billogram> {
public enum Command {
SELL("sell"), COLLECT("collect"), WRITEOFF("writeoff");
public final String command;
Command(String command) {
this.command = command;
}
}
public enum SendCommand {
SEND("send"), RESEND("resend"), REMIND("remind");
public final String command;
SendCommand(String command) {
this.command = command;
}
}
public enum CreditMode {
@SerializedName("full")FULL,
@SerializedName("remaining") REMAINING,
@SerializedName("amount") AMOUNT;
}
public BillogramClient(HttpClient httpClient) {
super(httpClient, "billogram", BillogramSearchResponse.class, BillogramResponse.class);
}
@Override
public BillogramResponse create(Billogram fullBillogram) throws IOException {
return super.create(fullBillogram);
}
@Override
public void createAsync(Billogram fullBillogram, BillogramCallback<BillogramResponse> callback) {
super.createAsync(fullBillogram, callback);
}
@Override
public BillogramResponse update(Billogram fullBillogram) throws IOException {
return super.update(fullBillogram);
}
@Override
public void updateAsync(Billogram fullBillogram, BillogramCallback<BillogramResponse> callback) {
super.updateAsync(fullBillogram, callback);
}
@Override
public BillogramResponse get(String id) throws IOException {
return super.get(id);
}
@Override
public void getAsync(String id, BillogramCallback<BillogramResponse> callback) {
super.getAsync(id, callback);
}
@Override
public BillogramSearchResponse search(Search<BillogramFilterField, BillogramOrderField> search) throws IOException {
return super.search(search);
}
@Override
public void searchAsync(Search<BillogramFilterField, BillogramOrderField> search, BillogramCallback<BillogramSearchResponse> callback) {
super.searchAsync(search, callback);
}
public BillogramResponse command(String billogramId, Command command) throws IOException {
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", command.command);
return httpClient.sync(uri, null, POST, responseClass);
}
public void commandAsync(String billogramId, Command command, BillogramCallback<BillogramResponse> callback) {
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", command.command);
httpClient.async(uri, null, POST, callback, BillogramResponse.class);
}
public BillogramResponse command(String billogramId, SendMethod resendMethod, SendCommand command) throws IOException {
Map<String, SendMethod> resendRequest = Collections.singletonMap("method", resendMethod);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", command.command);
return httpClient.sync(uri, resendRequest, POST, responseClass);
}
public void commandAsync(String billogramId, SendMethod resendMethod, SendCommand command, BillogramCallback<BillogramResponse> callback) {
Map<String, SendMethod> resendRequest = Collections.singletonMap("method", resendMethod);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", command.command);
httpClient.async(uri, resendRequest, POST, callback, BillogramResponse.class);
}
public BillogramResponse commandPayment(String billogramId, BigDecimal amount) throws IOException {
Map<String, BigDecimal> paymentRequest = Collections.singletonMap("amount", amount);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "payment");
return httpClient.sync(uri, paymentRequest, POST, BillogramResponse.class);
}
public void commandPaymentAsync(String billogramId, BigDecimal amount, BillogramCallback<BillogramResponse> callback) {
Map<String, BigDecimal> paymentRequest = Collections.singletonMap("amount", amount);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "payment");
httpClient.async(uri, paymentRequest, POST, callback, BillogramResponse.class);
}
public BillogramResponse commandCredit(String billogramId, BigDecimal amount, CreditMode creditMode) throws IOException {
Map<String, Object> creditRequest = new HashMap<>();
creditRequest.put("mode", creditMode);
creditRequest.put("amount", amount);
final URI uri = httpClient.pathToUri("billogram", billogramId, "command", "credit");
return httpClient.sync(uri, creditRequest, POST, BillogramResponse.class);
}
public void commandCredit(String billogramId, BigDecimal amount, CreditMode creditMode, BillogramCallback<BillogramResponse> callback) {
Map<String, Object> creditRequest = new HashMap<>();
creditRequest.put("mode", creditMode);
creditRequest.put("amount", amount);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "credit");
httpClient.async(uri, creditRequest, POST, callback, BillogramResponse.class);
}
public BillogramResponse addMessage(String billogramId, String message) throws IOException {
Map<String, String> body = Collections.singletonMap("message", message);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "message");
return httpClient.sync(uri, body, POST, BillogramResponse.class);
}
public void addMessageAsync(String billogramId, String message, BillogramCallback<BillogramResponse> callback) {
Map<String, String> body = Collections.singletonMap("message", message);
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "message");
httpClient.async(uri, body, POST, callback, BillogramResponse.class);
}
public BillogramResponse addPdf(String billogramId, String fileName, byte[] pdfData) throws IOException {
Map<String, String> body = new HashMap<>();
body.put("filename", fileName);
body.put("content", ByteString.of(pdfData).base64());
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "attach");
return httpClient.sync(uri, body, POST, BillogramResponse.class);
}
public void addPdfAsync(String billogramId, String fileName, byte[] pdfData, BillogramCallback<BillogramResponse> callback) {
Map<String, String> body = new HashMap<>();
body.put("filename", fileName);
body.put("content", ByteString.of(pdfData).base64());
final URI uri = httpClient.pathToUri(endpoint, billogramId, "command", "attach");
httpClient.async(uri, body, POST, callback, BillogramResponse.class);
}
public PdfFileResponse getPdf(String billogramId, String invoiceNo, String letterId) throws IOException {
Map<String, String> queryParameters = createPdfQueryParameter(invoiceNo, letterId);
final URI uri = httpClient.pathToUri(queryParameters, endpoint, billogramId + ".pdf");
return httpClient.sync(uri, null, GET, PdfFileResponse.class);
}
public void getPdf(String billogramId, String invoiceNo, String letterId, BillogramCallback<PdfFileResponse> callback) {
Map<String, String> queryParameters = createPdfQueryParameter(invoiceNo, letterId);
final URI uri = httpClient.pathToUri(queryParameters, endpoint, billogramId + ".pdf");
httpClient.async(uri, null, GET, callback, PdfFileResponse.class);
}
public PdfFileResponse getPdfAttachment(String billogramId, String invoiceNo, String letterId) throws IOException {
Map<String, String> queryParameters = createPdfQueryParameter(invoiceNo, letterId);
final URI uri = httpClient.pathToUri(queryParameters, endpoint, billogramId + "/attachment.pdf");
return httpClient.sync(uri, null, GET, PdfFileResponse.class);
}
public void getPdfAttachment(String billogramId, String invoiceNo, String letterId, BillogramCallback<PdfFileResponse> callback) {
Map<String, String> queryParameters = createPdfQueryParameter(invoiceNo, letterId);
final URI uri = httpClient.pathToUri(queryParameters, endpoint, billogramId + "/attachment.pdf");
httpClient.async(uri, null, GET, callback, PdfFileResponse.class);
}
private Map<String, String> createPdfQueryParameter(final String invoiceNo, final String letterId) {
Map<String, String> queryParameters = new HashMap<>();
if (invoiceNo != null) {
queryParameters.put("invoice_no", invoiceNo);
}
if (letterId != null) {
queryParameters.put("letter_id", letterId);
}
return queryParameters;
}
@Override
public void delete(String id) throws IOException {
super.delete(id);
}
@Override
public void deleteAsync(String id, BillogramCallback<Void> callback) {
super.deleteAsync(id, callback);
}
@Override
protected String getId(Billogram fullBillogram) {
return fullBillogram.getId();
}
public CallbackPost parseCallback(String json) {
return httpClient.getMapper().fromJson(json, CallbackPost.class);
}
}
| |
package net.ripe.db.whois.query.executor;
import net.ripe.db.whois.common.domain.CIString;
import net.ripe.db.whois.common.domain.ResponseObject;
import net.ripe.db.whois.common.rpsl.AttributeType;
import net.ripe.db.whois.common.source.IllegalSourceException;
import net.ripe.db.whois.common.source.Source;
import net.ripe.db.whois.common.source.SourceContext;
import net.ripe.db.whois.query.domain.MessageObject;
import net.ripe.db.whois.query.domain.QueryException;
import net.ripe.db.whois.query.domain.QueryMessages;
import net.ripe.db.whois.query.planner.RpslResponseDecorator;
import net.ripe.db.whois.query.query.Query;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.runners.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import java.util.Collections;
import static net.ripe.db.whois.common.domain.CIString.ciSet;
import static net.ripe.db.whois.common.domain.CIString.ciString;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class SearchQueryExecutorTest {
@Mock SourceContext sourceContext;
@Mock RpslObjectSearcher rpslObjectSearcher;
@Mock RpslResponseDecorator rpslResponseDecorator;
@InjectMocks SearchQueryExecutor subject;
@Before
public void setUp() throws Exception {
when(rpslObjectSearcher.search(any(Query.class))).thenReturn((Iterable)Collections.emptyList());
when(rpslResponseDecorator.getResponse(any(Query.class), any(Iterable.class))).thenAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
return invocationOnMock.getArguments()[1];
}
});
}
@Test
public void all_attributes_handled() {
for (final AttributeType attributeType : AttributeType.implementedValues()) {
assertTrue(subject.supports(Query.parse("-i " + attributeType.getName() + " query")));
}
}
@Test(expected = QueryException.class)
public void test_supports_no_attributes() {
assertThat(subject.supports(Query.parse("-i")), is(false));
}
@Test
public void test_supports_inverse_with_filter() {
assertThat(subject.supports(Query.parse("-T inetnum -i mnt-by aardvark-mnt")), is(true));
}
@Test
public void test_supports_inverse_recursive() {
assertThat(subject.supports(Query.parse("-i mnt-by aardvark-mnt")), is(true));
}
@Test
public void test_supports_inverse() {
assertThat(subject.supports(Query.parse("-r -i mnt-by aardvark-mnt")), is(true));
}
@Test
public void test_supports_inverse_multiple() {
assertThat(subject.supports(Query.parse("-r -i mnt-by,mnt-ref aardvark-mnt")), is(true));
}
@Test
public void test_supports_inverse_multiple_unknown() {
assertThat(subject.supports(Query.parse("-r -i mnt-by,mnt-ref,mnt-lower aardvark-mnt")), is(true));
}
@Test
public void unknown_source() {
final Query query = Query.parse("-s UNKNOWN 10.0.0.0");
doThrow(IllegalSourceException.class).when(sourceContext).setCurrent(Source.slave("UNKNOWN"));
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(rpslObjectSearcher, never()).search(query);
assertThat(responseHandler.getResponseObjects(), hasItems((ResponseObject) new MessageObject(QueryMessages.unknownSource("UNKNOWN").toString() + "\n")));
assertThat(responseHandler.getResponseObjects(), hasSize(1)); // make sure that e.g. 'no results found' is not printed
}
@Test
public void query_all_sources() {
when(sourceContext.getAllSourceNames()).thenReturn(ciSet("APNIC-GRS", "ARIN-GRS"));
final Query query = Query.parse("--all-sources 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext, times(2)).removeCurrentSource();
verify(rpslObjectSearcher, times(2)).search(query);
}
@Test
public void query_sources() {
final Query query = Query.parse("--sources APNIC-GRS,ARIN-GRS 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext, times(2)).removeCurrentSource();
verify(rpslObjectSearcher, times(2)).search(query);
}
@Test
public void query_sources_and_additional() {
when(sourceContext.getAllSourceNames()).thenReturn(ciSet("APNIC-GRS", "ARIN-GRS"));
final Query query = Query.parse("--all-sources --sources RIPE 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext).setCurrent(Source.slave("RIPE"));
verify(sourceContext, times(3)).removeCurrentSource();
verify(rpslObjectSearcher, times(3)).search(query);
}
@Test
public void query_resources() {
when(sourceContext.getGrsSourceNames()).thenReturn(ciSet("APNIC-GRS", "ARIN-GRS"));
final Query query = Query.parse("--resource 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext, times(2)).removeCurrentSource();
verify(rpslObjectSearcher, times(2)).search(query);
}
@Test
public void query_all_sources_filters_virtual_sources() {
when(sourceContext.getAllSourceNames()).thenReturn(ciSet("RIPE", "RIPE-GRS", "APNIC-GRS", "ARIN-GRS"));
when(sourceContext.isVirtual(any(CIString.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
final Object[] arguments = invocation.getArguments();
return (ciString("RIPE-GRS").equals(arguments[0]));
}
});
final Query query = Query.parse("--all-sources 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext, never()).setCurrent(Source.slave("RIPE-GRS"));
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext).setCurrent(Source.slave("RIPE"));
verify(sourceContext, times(3)).removeCurrentSource();
verify(rpslObjectSearcher, times(3)).search(query);
}
@Test
public void query_no_source_specified() {
when(sourceContext.getWhoisSlaveSource()).thenReturn(Source.slave("RIPE"));
final Query query = Query.parse("10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("RIPE"));
verify(sourceContext).removeCurrentSource();
verify(rpslObjectSearcher).search(query);
}
@Test
public void no_results_found_gives_message() {
final Query query = Query.parse("-s RIPE 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(rpslObjectSearcher).search(query);
verify(rpslResponseDecorator).getResponse(eq(query), any(Iterable.class));
assertThat(responseHandler.getResponseObjects(), contains((ResponseObject) new MessageObject(QueryMessages.noResults("RIPE").toString())));
}
@Test
public void query_additional_sources() {
when(sourceContext.getAdditionalSourceNames()).thenReturn(ciSet("APNIC-GRS", "ARIN-GRS"));
when(sourceContext.getWhoisSlaveSource()).thenReturn(Source.slave("RIPE"));
final Query query = Query.parse("10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("RIPE"));
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext, times(3)).removeCurrentSource();
verify(rpslObjectSearcher, times(3)).search(query);
}
@Test
public void query_sources_not_additional() {
when(sourceContext.getAdditionalSourceNames()).thenReturn(ciSet("RIPE", "APNIC-GRS", "ARIN-GRS"));
final Query query = Query.parse("--sources APNIC-GRS,ARIN-GRS 10.0.0.0");
final CaptureResponseHandler responseHandler = new CaptureResponseHandler();
subject.execute(query, responseHandler);
verify(sourceContext).setCurrent(Source.slave("APNIC-GRS"));
verify(sourceContext).setCurrent(Source.slave("ARIN-GRS"));
verify(sourceContext, times(2)).removeCurrentSource();
verify(rpslObjectSearcher, times(2)).search(query);
}
}
| |
package com.ilscipio.scipio.cms.media;
import java.io.IOException;
import java.io.InputStream;
import java.util.Locale;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.UtilHttp;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilProperties;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.common.image.ImageProfile;
import org.ofbiz.common.image.ImageVariantConfig;
import org.ofbiz.content.data.DataResourceWorker;
import com.ilscipio.scipio.content.image.ContentImageWorker;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.util.EntityQuery;
import org.ofbiz.entity.util.EntityUtil;
import org.ofbiz.entity.util.EntityUtilProperties;
import org.ofbiz.service.GenericServiceException;
import org.ofbiz.service.LocalDispatcher;
import org.ofbiz.service.ServiceUtil;
import org.ofbiz.webapp.WebAppUtil;
import com.ilscipio.scipio.cms.CmsUtil;
/**
* Servlet used to serve media files, which basically consists in getting the
* corresponding raw data from database and stream it in the response.
* FIXME?: 2017-08-08: currently there is little to no use of useCache...
*/
@SuppressWarnings("serial")
public class CmsMediaServlet extends HttpServlet {
private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass());
protected static final String USE_CACHE_PARAM_DEFAULT = "cache";
private static final String FN_SOURCE = UtilProperties.getPropertyValue("cms", "media.serve.filename.source", "name");
private static final String fnSrcFieldName = "origfn".equals(FN_SOURCE) ? "objectInfo" : "dataResourceName";
private static final String fnSrcFieldNameFallback = "origfn".equals(FN_SOURCE) ? "dataResourceName" : "objectInfo";
private static final boolean variantsEnabled = UtilProperties.getPropertyAsBoolean("cms", "media.variants.enabled", true);
private boolean useCacheDefault = true;
private String useCacheParam = USE_CACHE_PARAM_DEFAULT;
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
this.useCacheDefault = UtilMisc.booleanValue(config.getInitParameter("useCache"), true);
String useCacheParam = config.getInitParameter("useCacheParam");
if (useCacheParam != null && !"true".equals(useCacheParam)) {
if (useCacheParam.isEmpty() || "false".equals(useCacheParam)) {
this.useCacheParam = null;
} else {
this.useCacheParam = useCacheParam;
}
}
if (Debug.infoOn()) {
Debug.logInfo("Cms: Media servlet settings for servlet '" + config.getServletName() + "' of webapp '"
+ config.getServletContext().getContextPath() + "': ["
+ "useCache=" + this.useCacheDefault + ", useCacheParam="
+ (this.useCacheParam != null ? this.useCacheParam : "(disabled)")+ "]", module);
}
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request, response);
}
/**
* reference: {@link org.ofbiz.content.data.DataEvents#serveImage}
* <p>
* TODO: still missing an "auto" best-size selection based on width and height
* TODO: this isn't looking at the global debug flag yet for the error msgs
* WARN: autoVariant logic has severe limitations - see {@link CmsMediaWorker}.
* TODO: REVIEW: Should this method be enclosed in a transaction? Mitigated for now using useCache.
*/
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// SPECIAL: getDelegator/getDispatcher methods required so tenant db doesn't break (or breaks less)
Delegator delegator = WebAppUtil.getDelegatorFilterSafe(request);
LocalDispatcher dispatcher = WebAppUtil.getDispatcherFilterSafe(request, delegator);
Locale locale = UtilHttp.getLocale(request);
GenericValue userLogin = WebAppUtil.getUserLogin(request);
String contentId = request.getParameter("contentId");
String dataResourceId = request.getParameter("dataResourceId");
String variant = null; // this specifies an exact variant to use, by name
ImageVariantConfig.FitMode autoVariantMode = null; // this tries to find a best-fit variant
String widthStr = null;
String heightStr = null;
if (variantsEnabled) {
variant = request.getParameter("variant");
autoVariantMode = ImageVariantConfig.FitMode.fromStrNameParamSafe(request.getParameter("autoVariant"));
widthStr = request.getParameter("width");
heightStr = request.getParameter("height");
if (autoVariantMode == null && (UtilValidate.isNotEmpty(widthStr) || UtilValidate.isNotEmpty(heightStr))) {
autoVariantMode = ImageVariantConfig.FitMode.DEFAULT;
}
}
final boolean useCache = isUseCache(request);
GenericValue content;
GenericValue dataResource;
try {
String isPublic;
// AUTO VARIANT MODE - more difficult
if (autoVariantMode != null) {
if (UtilValidate.isNotEmpty(contentId)) {
/**
* Tries to find the best image variant name to use for the width & height.
* WARN/FIXME: this has serious limitations in current form - we are forced to use
* ImageProperties.xml to get the dimensions rather than the actual resized dims of the
* images - there is no point fixing this here currently because can't solve this
* problem for the file-based storage elsewhere yet.
*/
content = delegator.from("Content").where("contentId", contentId).cache().queryOne();
if (content == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND, "Media not found with contentId [" + contentId + "]");
return;
}
} else if (request.getPathInfo() != null && request.getPathInfo().length() > 1) {
String contentPath = request.getPathInfo().substring(1); // no slash
content = delegator.from("Content").where("contentPath", contentPath).cache().queryFirst();
if (content == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with contentPath [" + contentPath + "]");
return;
}
contentId = content.getString("contentId");
} else {
response.sendError(HttpServletResponse.SC_NOT_FOUND, "Media not found");
return;
}
ImageProfile imageProfile = ContentImageWorker.getContentImageProfileOrDefault(delegator, content, true, true);
if (imageProfile == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,"Invalid media with contentId [" + contentId + "]");
return;
}
ImageVariantConfig imgVariantCfg = imageProfile.getVariantConfig(); // Now cached by ImageProfile
if (imgVariantCfg != null) {
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Auto-selecting image variant [contentId: " + contentId + ", mode: " + autoVariantMode.getStrName() + "]", module);
}
ImageVariantConfig.VariantInfo variantInfo = imgVariantCfg.getCanvasBestFitVariant(autoVariantMode,
UtilValidate.isNotEmpty(widthStr) ? Integer.parseInt(widthStr) : null,
UtilValidate.isNotEmpty(heightStr) ? Integer.parseInt(heightStr) : null);
if (variantInfo != null) {
variant = variantInfo.getName();
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Auto-selected image variant [contentId: " + contentId + ", mode: " + autoVariantMode.getStrName() + ", variant: " + variant + "]", module);
}
} else {
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: No best image variant available [contentId: " + contentId + ", mode: " + autoVariantMode.getStrName() + "]; defaulting to original", module);
}
}
} else {
if (CmsUtil.verboseOn()) {
Debug.logWarning("Cms: Cannot auto-select image variant - no image variant config (ImageProperties.xml) available for CMS", module);
}
}
}
if ((UtilValidate.isEmpty(variant) || "original".equals(variant))) {
// STANDARD CASE
if (UtilValidate.isNotEmpty(dataResourceId)) {
dataResource = delegator.from("DataResourceContentRequiredView").where("dataResourceId", dataResourceId).cache(useCache).queryFirst();
if (dataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with dataResourceId [" + dataResourceId + "]");
return;
}
contentId = dataResource.getString("coContentId");
} else if (UtilValidate.isNotEmpty(contentId)) {
dataResource = delegator.from("DataResourceContentRequiredView").where("coContentId", contentId).cache(useCache).queryFirst();
if (dataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with contentId [" + contentId + "]");
return;
}
} else if (request.getPathInfo() != null && request.getPathInfo().length() > 1) {
String contentPath = request.getPathInfo().substring(1); // no slash
dataResource = delegator.from("DataResourceContentRequiredView").where("coContentPath", contentPath).cache(useCache).queryFirst();
if (dataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with contentPath [" + contentPath + "]");
return;
}
contentId = dataResource.getString("coContentId");
} else {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Missing or invalid dataResourceId or contentId parameter - cannot determine media");
return;
}
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Serving media (original) [contentId: " + contentId + "]", module);
}
isPublic = dataResource.getString("isPublic");
} else {
GenericValue origDataResource = null;
if (UtilValidate.isNotEmpty(dataResourceId)) {
origDataResource = EntityUtil.getFirst(EntityQuery.use(delegator).from("DataResourceContentRequiredView").where("dataResourceId", dataResourceId).cache(useCache).queryList());
if (origDataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with dataResourceId [" + dataResourceId + "]");
return;
}
contentId = origDataResource.getString("coContentId");
} else if (UtilValidate.isNotEmpty(contentId)) {
// if contentId is passed, we can currently skip the original's lookup
// NOTE: this could change, but trying to avoid...
;
} else if (request.getPathInfo() != null && request.getPathInfo().length() > 1) {
String contentPath = request.getPathInfo().substring(1); // no slash
dataResource = delegator.from("DataResourceContentRequiredView").where("coContentPath", contentPath).cache(useCache).queryFirst();
if (dataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with contentPath [" + contentPath + "]");
return;
}
contentId = dataResource.getString("coContentId");
} else {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Missing or invalid dataResourceId or contentId parameter - cannot determine media");
return;
}
// this implies we're getting IMAGE_OBJECT type
GenericValue contentAssoc = EntityUtil.getFirst(EntityQuery.use(delegator).from("ContentAssoc").where("contentId", contentId,
"contentAssocTypeId", "IMGSZ_" + variant.toUpperCase()).cache(useCache).queryList());
if (contentAssoc != null) {
dataResource = EntityUtil.getFirst(EntityQuery.use(delegator).from("DataResourceContentRequiredView")
.where("coContentId", contentAssoc.getString("contentIdTo")).cache(useCache).queryList());
if (dataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with contentId [" + contentId + "]");
return;
}
if (CmsUtil.verboseOn()) {
Debug.logInfo("Cms: Serving image variant [contentId: " + contentId + ", variant: " + variant + ", variant contentId: " + dataResource.getString("coContentId") + "]", module);
}
} else {
if (Debug.verboseOn()) {
Debug.logInfo("Cms: No variant image found [contentId: " + contentId + ", variant: " + variant + "]; serving original", module);
}
if (origDataResource != null) {
dataResource = origDataResource;
} else {
dataResource = EntityUtil.getFirst(EntityQuery.use(delegator).from("DataResourceContentRequiredView").where("coContentId", contentId).cache(useCache).queryList());
if (dataResource == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND,
"Media not found with contentId [" + contentId + "]");
return;
}
}
}
// WARN: we are getting the isPublic from the RESIZED image here, NOT the original; this may allow
// faster lookup (and more exact), but it relies on the media services properly updating the resized
// images!!
isPublic = dataResource.getString("isPublic");
}
// SECURITY: absolutely must deny anything not marked as CMS media, otherwise this could be used to read sensitive internal documents!
if (dataResource.getString("coContentTypeId") == null || !dataResource.getString("coContentTypeId").startsWith("SCP_MEDIA")) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Media not found");
return;
}
// SECURITY: 2017-08-02: isPublic check; borrowed from DataEvents.serveObjectData
String permissionService = EntityUtilProperties.getPropertyValue("content", "stream.permission.service", "genericContentPermission", delegator);
// see if data resource is public or not
if (UtilValidate.isEmpty(isPublic)) {
isPublic = "N";
}
// not public check security
if (!"Y".equalsIgnoreCase(isPublic)) {
// 2021-01-13: if no userlogin, automatically deny otherwise below fails
if (userLogin == null) {
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal error"); // WARN: DO NOT send details, for security reasons
return;
}
// do security check
Map<String, ? extends Object> permSvcCtx = UtilMisc.toMap("userLogin", userLogin, "locale", locale, "mainAction", "VIEW", "contentId", contentId);
Map<String, Object> permSvcResp;
try {
permSvcResp = dispatcher.runSync(permissionService, permSvcCtx);
} catch (GenericServiceException e) {
Debug.logError(e, module);
//request.setAttribute("_ERROR_MESSAGE_", e.getMessage());
//return "error";
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal error"); // WARN: DO NOT send details, for security reasons
return;
}
if (ServiceUtil.isError(permSvcResp)) {
String errorMsg = ServiceUtil.getErrorMessage(permSvcResp);
Debug.logError(errorMsg, module);
//request.setAttribute("_ERROR_MESSAGE_", errorMsg);
//return "error";
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal error"); // WARN: DO NOT send details, for security reasons
return;
}
// no service errors; now check the actual response
Boolean hasPermission = (Boolean) permSvcResp.get("hasPermission");
if (hasPermission == null || !hasPermission.booleanValue()) {
String errorMsg = (String) permSvcResp.get("failMessage");
Debug.logError(errorMsg, module);
//request.setAttribute("_ERROR_MESSAGE_", errorMsg);
//return "error";
response.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized"); // WARN: DO NOT send details, for security reasons
return;
}
}
String fileName = (UtilValidate.isNotEmpty(dataResource.getString(fnSrcFieldName))) ? dataResource.getString(fnSrcFieldName)
: dataResource.getString(fnSrcFieldNameFallback);
// see org.ofbiz.content.data.DataEvents#serveImage for reference code
ServletContext application = request.getServletContext(); // SCIPIO: NOTE: no longer need getSession() for getServletContext(), since servlet API 3.0
Map<String, Object> streamResult = DataResourceWorker.getDataResourceStream(dataResource, "", application.getInitParameter("webSiteId"), locale, application.getRealPath("/"), useCache);
byte[] mediaData = (byte[]) streamResult.get("streamBytes");
InputStream mediaStream = (InputStream) streamResult.get("stream");
long mediaLength = (long) streamResult.get("length");
response.setContentType(dataResource.getString("mimeTypeId"));
response.setHeader("Content-Disposition", "inline; filename= " + fileName);
response.setContentLengthLong(mediaLength);
if (mediaData != null) {
response.getOutputStream().write(mediaData, 0, (int) mediaLength);
} else if (mediaStream != null) {
UtilHttp.streamContent(response.getOutputStream(), mediaStream, (int) mediaLength);
} else {
Debug.logError("Cms: Bad stream/bytes source [effective contentId: " + dataResource.getString("coContentId") + "]", module);
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal error"); // WARN: DO NOT send details, for security reasons
return;
}
} catch (Exception e) {
Debug.logError(e, module);
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Internal error"); // WARN: DO NOT send details, for security reasons
return;
}
}
protected boolean isUseCache(HttpServletRequest request) {
if (useCacheParam != null) {
String value = request.getParameter(useCacheParam);
if (value != null) {
if ("Y".equals(value)) {
return true;
} else if ("N".equals(value)) {
return false;
} else {
; // ignore.
}
}
}
return useCacheDefault;
}
}
| |
/*
* Copyright 2014 Tariq Bugrara
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.progix.dropwizard.patch;
import com.fasterxml.jackson.core.JsonPointer;
import io.progix.dropwizard.patch.exception.InvalidPatchPathException;
import java.util.ArrayList;
import java.util.List;
/**
* Wrapper class for Jackson's {@link JsonPointer}
* <p/>
* Created to be more user-friendly for explicit patching. This class is used to determine what a String Json path as
* defined in RFC6901 points to
*/
public class JsonPath {
private List<JsonPathProperty> properties;
private List<JsonPathElement> elements;
private String pathString;
private JsonPointer jsonPointer;
private int size;
/**
* Creates the path using a {@link JsonPointer} by iterating through the segments and creating {@link
* JsonPathProperty} and {@link JsonPathElement} for each segment.
* <p/>
* If a given segment does not match as a String property, an empty {@link JsonPathProperty} is created.
* <p/>
* If a given segment does not match as a Integer index, an empty {@link JsonPathElement} is created.
*
* @param pointer
*
* @see com.fasterxml.jackson.core.JsonPointer
*/
public JsonPath(JsonPointer pointer) {
this.jsonPointer = pointer;
this.properties = new ArrayList<>();
this.elements = new ArrayList<>();
this.pathString = "";
while (pointer != null) {
if (pointer.getMatchingProperty() != null && !pointer.getMatchingProperty().isEmpty()) {
size++;
}
// Keep in mind, Jackson's implementation of JsonPointer allows all segments to be properties
if (pointer.mayMatchProperty() && !pointer.mayMatchElement() && !pointer.getMatchingProperty()
.isEmpty() && !pointer.getMatchingProperty().equals("-")) {
properties.add(new JsonPathProperty(pointer.getMatchingProperty()));
this.pathString += "/" + pointer.getMatchingProperty();
} else if (pointer.getMatchingProperty().equals("-")) {
// This character represents the last element in an array
elements.add(new JsonPathElement(true));
this.pathString += pointer.getMatchingProperty() + "/";
} else {
properties.add(new JsonPathProperty());
}
if (pointer.mayMatchElement()) {
elements.add(new JsonPathElement(pointer.getMatchingIndex()));
this.pathString += "/" + pointer.getMatchingIndex();
} else {
elements.add(new JsonPathElement());
}
pointer = pointer.tail();
}
}
/**
* Convenience method to retrieve all {@link JsonPathProperty} for this path
*
* @return the list of {@link JsonPathProperty}
*/
public List<JsonPathProperty> getProperties() {
return properties;
}
/**
* Convenience method to retrieve all {@link JsonPathElement} for this path
*
* @return the list of {@link JsonPathElement}
*/
public List<JsonPathElement> getElements() {
return elements;
}
/**
* @return the number of segments in this path
*/
public int size() {
return size;
}
/**
* Determines if this JsonPath is equivalent to the given String path.
* <p/>
* Note that there should be no leading slash in the given path.
* <p/>
* This method treats the string "#" as a special meaning to signify a numerical segment within the JsonPointer. If
* you use the '#' character as a property name, use "~#" to properly treat that segment as a property and not an
* array element.
*
* @param path String path to check
*
* @return true if the path is given is equivalent to this JsonPath, false otherwise.
*/
public boolean is(String path) {
if (path.charAt(0) != '/') {
throw new IllegalArgumentException("Paths must start with a '/'");
}
// Remove first slash which is guaranteed
path = path.substring(1, path.length());
String[] tokens = path.split("/");
if (!endsAt(tokens.length - 1)) {
return false;
}
for (int i = 0; i < tokens.length; i++) {
String token = tokens[i];
if (property(i).exists()) {
// Can't be true because a property exists at i and path is checking for an element
if (token.equals("#")) {
return false;
}
// Convert special character to "#"
if (token.equals("~#")) {
token = "#";
}
if (!token.equals(property(i).val())) {
return false;
}
} else if (element(i).exists()) {
if (token.equals("#") || (token.equals("-") && element(i).isEndOfArray()) || String
.valueOf(element(i).val()).equals(token)) {
return true;
}
} else {
return false;
}
}
return true;
}
/**
* This method can be used to determine when an {@link InvalidPatchPathException} should be thrown. Uses this
* exception and method provides useful information for the client when trying to patch a resource in a way the
* server does not support
* <p/>
* Ex. If the path is "/a/b/c", endsAt(2) will return true while endsAt(1) and endsAt(3) will return false
*
* @param index The index to test if this path ends
*
* @return true if the index provided is the last segment to contain data, false otherwise
*/
public boolean endsAt(int index) {
int next = index + 1;
return (property(index).exists() || element(index).exists()) && !property(next).exists() && !element(next)
.exists();
}
/**
* This method will never return null. If trying to access a {@link JsonPathProperty} for a segment that is not a
* String property, will return a special object who's {@link JsonPathProperty#exists()} will return false
*
* @param index the segment index to retrieve
*
* @return a {@link JsonPathProperty} for this index
*/
public JsonPathProperty property(int index) {
if (index >= properties.size()) {
return new JsonPathProperty();
}
return properties.get(index);
}
/**
* This method will never return null. If trying to access a {@link JsonPathElement} for a segment that is not an
* Integer property, will return a special object who's {@link JsonPathElement#exists()} will return false
*
* @param index the segment index to retrieve
*
* @return a {@link JsonPathElement} for this index
*/
public JsonPathElement element(int index) {
if (index >= elements.size()) {
return new JsonPathElement();
}
return elements.get(index);
}
@Override
public String toString() {
return pathString;
}
public JsonPointer getJsonPointer() {
return jsonPointer;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.athena.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListPreparedStatements" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListPreparedStatementsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The workgroup to list the prepared statements for.
* </p>
*/
private String workGroup;
/**
* <p>
* A token generated by the Athena service that specifies where to continue pagination if a previous request was
* truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response object of the
* previous page call.
* </p>
*/
private String nextToken;
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*/
private Integer maxResults;
/**
* <p>
* The workgroup to list the prepared statements for.
* </p>
*
* @param workGroup
* The workgroup to list the prepared statements for.
*/
public void setWorkGroup(String workGroup) {
this.workGroup = workGroup;
}
/**
* <p>
* The workgroup to list the prepared statements for.
* </p>
*
* @return The workgroup to list the prepared statements for.
*/
public String getWorkGroup() {
return this.workGroup;
}
/**
* <p>
* The workgroup to list the prepared statements for.
* </p>
*
* @param workGroup
* The workgroup to list the prepared statements for.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListPreparedStatementsRequest withWorkGroup(String workGroup) {
setWorkGroup(workGroup);
return this;
}
/**
* <p>
* A token generated by the Athena service that specifies where to continue pagination if a previous request was
* truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response object of the
* previous page call.
* </p>
*
* @param nextToken
* A token generated by the Athena service that specifies where to continue pagination if a previous request
* was truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response
* object of the previous page call.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A token generated by the Athena service that specifies where to continue pagination if a previous request was
* truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response object of the
* previous page call.
* </p>
*
* @return A token generated by the Athena service that specifies where to continue pagination if a previous request
* was truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response
* object of the previous page call.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A token generated by the Athena service that specifies where to continue pagination if a previous request was
* truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response object of the
* previous page call.
* </p>
*
* @param nextToken
* A token generated by the Athena service that specifies where to continue pagination if a previous request
* was truncated. To obtain the next set of pages, pass in the <code>NextToken</code> from the response
* object of the previous page call.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListPreparedStatementsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*
* @param maxResults
* The maximum number of results to return in this request.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*
* @return The maximum number of results to return in this request.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of results to return in this request.
* </p>
*
* @param maxResults
* The maximum number of results to return in this request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListPreparedStatementsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getWorkGroup() != null)
sb.append("WorkGroup: ").append(getWorkGroup()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListPreparedStatementsRequest == false)
return false;
ListPreparedStatementsRequest other = (ListPreparedStatementsRequest) obj;
if (other.getWorkGroup() == null ^ this.getWorkGroup() == null)
return false;
if (other.getWorkGroup() != null && other.getWorkGroup().equals(this.getWorkGroup()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getWorkGroup() == null) ? 0 : getWorkGroup().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
return hashCode;
}
@Override
public ListPreparedStatementsRequest clone() {
return (ListPreparedStatementsRequest) super.clone();
}
}
| |
package com.planet_ink.coffee_mud.Races;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2016-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Whale extends GiantFish
{
@Override
public String ID()
{
return "Whale";
}
private final static String localizedStaticName = CMLib.lang().L("Whale");
@Override
public String name()
{
return localizedStaticName;
}
@Override
public int shortestMale()
{
return 360;
}
@Override
public int shortestFemale()
{
return 360;
}
@Override
public int heightVariance()
{
return 40;
}
@Override
public int lightestWeight()
{
return 21955;
}
@Override
public int weightVariance()
{
return 2405;
}
@Override
public long forbiddenWornBits()
{
return ~(Wearable.WORN_EYES);
}
private final static String localizedStaticRacialCat = CMLib.lang().L("Sea Mammal");
@Override
public String racialCategory()
{
return localizedStaticRacialCat;
}
private static Vector<RawMaterial> resources = new Vector<RawMaterial>();
@Override
public int[] getBreathables()
{
return breatheAirWaterArray;
}
private final String[] racialAbilityNames = { "Aquan", "Skill_Swim", "Skill_AutoSwim" };
private final int[] racialAbilityLevels = { 1,1,1 };
private final int[] racialAbilityProficiencies = { 100,100,100 };
private final boolean[] racialAbilityQuals = { false,false,false };
private final String[] racialAbilityParms = { "", "", "" };
@Override
protected String[] racialAbilityNames()
{
return racialAbilityNames;
}
@Override
protected int[] racialAbilityLevels()
{
return racialAbilityLevels;
}
@Override
protected int[] racialAbilityProficiencies()
{
return racialAbilityProficiencies;
}
@Override
protected boolean[] racialAbilityQuals()
{
return racialAbilityQuals;
}
@Override
public String[] racialAbilityParms()
{
return racialAbilityParms;
}
private final String[] racialEffectNames = { "Spell_BigMouth"};
private final int[] racialEffectLevels = { 1};
private final String[] racialEffectParms = { "" };
@Override
protected String[] racialEffectNames()
{
return racialEffectNames;
}
@Override
protected int[] racialEffectLevels()
{
return racialEffectLevels;
}
@Override
protected String[] racialEffectParms()
{
return racialEffectParms;
}
// an ey ea he ne ar ha to le fo no gi mo wa ta wi
private static final int[] parts={0 ,2 ,0 ,1 ,0 ,0 ,0 ,1 ,0 ,0 ,0 ,2 ,1 ,0 ,1 ,0 };
@Override
public int[] bodyMask()
{
return parts;
}
private final int[] agingChart = { 0, 1, 3, 15, 35, 53, 70, 74, 78 };
@Override
public int[] getAgingChart()
{
return agingChart;
}
@Override
public void affectCharStats(final MOB affectedMOB, final CharStats affectableStats)
{
//super.affectCharStats(affectedMOB, affectableStats);
affectableStats.setRacialStat(CharStats.STAT_INTELLIGENCE,1);
affectableStats.setRacialStat(CharStats.STAT_STRENGTH,22);
affectableStats.setRacialStat(CharStats.STAT_DEXTERITY,3);
}
@Override
public void unaffectCharStats(final MOB affectedMOB, final CharStats affectableStats)
{
super.unaffectCharStats(affectedMOB, affectableStats);
affectableStats.setStat(CharStats.STAT_INTELLIGENCE,affectedMOB.baseCharStats().getStat(CharStats.STAT_INTELLIGENCE));
affectableStats.setStat(CharStats.STAT_MAX_INTELLIGENCE_ADJ,affectedMOB.baseCharStats().getStat(CharStats.STAT_MAX_INTELLIGENCE_ADJ));
affectableStats.setStat(CharStats.STAT_STRENGTH,affectedMOB.baseCharStats().getStat(CharStats.STAT_STRENGTH));
affectableStats.setStat(CharStats.STAT_MAX_STRENGTH_ADJ,affectedMOB.baseCharStats().getStat(CharStats.STAT_MAX_STRENGTH_ADJ));
affectableStats.setStat(CharStats.STAT_DEXTERITY,affectedMOB.baseCharStats().getStat(CharStats.STAT_DEXTERITY));
affectableStats.setStat(CharStats.STAT_MAX_DEXTERITY_ADJ,affectedMOB.baseCharStats().getStat(CharStats.STAT_MAX_DEXTERITY_ADJ));
}
@Override
public String arriveStr()
{
return "swims in";
}
@Override
public String leaveStr()
{
return "swims";
}
@Override
public Weapon myNaturalWeapon()
{
if(naturalWeapon==null)
{
naturalWeapon=CMClass.getWeapon("StdWeapon");
naturalWeapon.setName(L("a deadly maw"));
naturalWeapon.setMaterial(RawMaterial.RESOURCE_BONE);
naturalWeapon.setUsesRemaining(1000);
naturalWeapon.setWeaponDamageType(Weapon.TYPE_BASHING);
}
return naturalWeapon;
}
@Override
public List<RawMaterial> myResources()
{
synchronized(resources)
{
if(resources.size()==0)
{
for(int i=0;i<125;i++)
{
resources.addElement(makeResource
(L("some @x1",name().toLowerCase()),RawMaterial.RESOURCE_FISH));
}
for(int i=0;i<115;i++)
{
resources.addElement(makeResource
(L("a slippery @x1 hide",name().toLowerCase()),RawMaterial.RESOURCE_HIDE));
}
resources.addElement(makeResource
(L("some @x1 blood",name().toLowerCase()),RawMaterial.RESOURCE_BLOOD));
}
}
return resources;
}
}
| |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.samples.apps.iosched.service;
import android.app.AlarmManager;
import android.app.IntentService;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.NotificationCompat;
import android.support.v4.app.TaskStackBuilder;
import android.util.Log;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.Wearable;
import com.google.samples.apps.iosched.R;
import com.google.samples.apps.iosched.provider.ScheduleContract;
import com.google.samples.apps.iosched.ui.BaseMapActivity;
import com.google.samples.apps.iosched.ui.BrowseSessionsActivity;
import com.google.samples.apps.iosched.ui.MyScheduleActivity;
import com.google.samples.apps.iosched.util.FeedbackUtils;
import com.google.samples.apps.iosched.util.PrefUtils;
import com.google.samples.apps.iosched.util.UIUtils;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static com.google.samples.apps.iosched.util.LogUtils.LOGD;
import static com.google.samples.apps.iosched.util.LogUtils.LOGE;
import static com.google.samples.apps.iosched.util.LogUtils.makeLogTag;
/**
* Background service to handle scheduling of starred session notification via
* {@link android.app.AlarmManager}.
*/
public class SessionAlarmService extends IntentService
implements GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener {
private static final String TAG = makeLogTag(SessionAlarmService.class);
public static final String ACTION_NOTIFY_SESSION =
"com.google.samples.apps.iosched.action.NOTIFY_SESSION";
public static final String ACTION_SCHEDULE_STARRED_BLOCK =
"com.google.samples.apps.iosched.action.SCHEDULE_STARRED_BLOCK";
public static final String ACTION_SCHEDULE_ALL_STARRED_BLOCKS =
"com.google.samples.apps.iosched.action.SCHEDULE_ALL_STARRED_BLOCKS";
public static final String EXTRA_SESSION_START =
"com.google.samples.apps.iosched.extra.SESSION_START";
public static final String EXTRA_SESSION_END =
"com.google.samples.apps.iosched.extra.SESSION_END";
public static final String EXTRA_SESSION_ALARM_OFFSET =
"com.google.samples.apps.iosched.extra.SESSION_ALARM_OFFSET";
public static final String EXTRA_SESSION_ID =
"com.google.samples.apps.iosched.extra.SESSION_ID";
public static final String EXTRA_SESSION_TITLE =
"com.google.samples.apps.iosched.extra.SESSION_TITLE";
public static final String EXTRA_SESSION_ROOM =
"com.google.samples.apps.iosched.extra.SESSION_ROOM";
public static final String EXTRA_SESSION_SPEAKERS =
"com.google.samples.apps.iosched.extra.SESSION_SPEAKERS";
public static final int NOTIFICATION_ID = 100;
public static final int FEEDBACK_NOTIFICATION_ID = 101;
// pulsate every 1 second, indicating a relatively high degree of urgency
private static final int NOTIFICATION_LED_ON_MS = 100;
private static final int NOTIFICATION_LED_OFF_MS = 1000;
private static final int NOTIFICATION_ARGB_COLOR = 0xff0088ff; // cyan
private static final long MILLI_TEN_MINUTES = 600000;
private static final long MILLI_FIVE_MINUTES = 300000;
private static final long MILLI_ONE_MINUTE = 60000;
private static final long UNDEFINED_ALARM_OFFSET = -1;
private static final long UNDEFINED_VALUE = -1;
public static final String ACTION_NOTIFICATION_DISMISSAL
= "com.google.sample.apps.iosched.ACTION_NOTIFICATION_DISMISSAL";
private GoogleApiClient mGoogleApiClient;
public static final String KEY_SESSION_ID = "session-id";
private static final String KEY_SESSION_NAME = "session-name";
private static final String KEY_SPEAKER_NAME = "speaker-name";
private static final String KEY_SESSION_ROOM = "session-room";
public static final String PATH_FEEDBACK = "/iowear/feedback";
// special session ID that identifies a debug notification
public static final String DEBUG_SESSION_ID = "debug-session-id";
public SessionAlarmService() {
super(TAG);
}
@Override
public void onCreate() {
super.onCreate();
mGoogleApiClient = new GoogleApiClient.Builder(this)
.addApi(Wearable.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
}
@Override
protected void onHandleIntent(Intent intent) {
mGoogleApiClient.blockingConnect(2000, TimeUnit.MILLISECONDS);
final String action = intent.getAction();
LOGD(TAG, "SessionAlarmService handling " + action);
if (ACTION_SCHEDULE_ALL_STARRED_BLOCKS.equals(action)) {
LOGD(TAG, "Scheduling all starred blocks.");
scheduleAllStarredBlocks();
scheduleAllStarredSessionFeedbacks();
return;
}
final long sessionEnd = intent.getLongExtra(SessionAlarmService.EXTRA_SESSION_END,
UNDEFINED_VALUE);
if (sessionEnd == UNDEFINED_VALUE) {
LOGD(TAG, "IGNORING ACTION -- missing sessionEnd parameter");
return;
}
final long sessionAlarmOffset =
intent.getLongExtra(SessionAlarmService.EXTRA_SESSION_ALARM_OFFSET,
UNDEFINED_ALARM_OFFSET);
LOGD(TAG, "Session alarm offset is: " + sessionAlarmOffset);
final long sessionStart =
intent.getLongExtra(SessionAlarmService.EXTRA_SESSION_START, UNDEFINED_VALUE);
if (sessionStart == UNDEFINED_VALUE) {
LOGD(TAG, "IGNORING ACTION -- no session start parameter.");
return;
}
if (ACTION_NOTIFY_SESSION.equals(action)) {
LOGD(TAG, "Notifying about sessions starting at " +
sessionStart + " = " + (new Date(sessionStart)).toString());
LOGD(TAG, "-> Alarm offset: " + sessionAlarmOffset);
notifySession(sessionStart, sessionAlarmOffset);
} else if (ACTION_SCHEDULE_STARRED_BLOCK.equals(action)) {
LOGD(TAG, "Scheduling session alarm.");
LOGD(TAG, "-> Session start: " + sessionStart + " = " + (new Date(sessionStart))
.toString());
LOGD(TAG, "-> Session end: " + sessionEnd + " = " + (new Date(sessionEnd)).toString());
LOGD(TAG, "-> Alarm offset: " + sessionAlarmOffset);
scheduleAlarm(sessionStart, sessionEnd, sessionAlarmOffset);
}
}
public void scheduleFeedbackAlarm(final long sessionEnd,
final long alarmOffset, final String sessionTitle) {
// By default, feedback alarms fire 5 minutes before session end time. If alarm offset is
// provided, alarm is set to go off that much time from now (useful for testing).
long alarmTime;
if (alarmOffset == UNDEFINED_ALARM_OFFSET) {
alarmTime = sessionEnd - MILLI_FIVE_MINUTES;
} else {
alarmTime = UIUtils.getCurrentTime(this) + alarmOffset;
}
LOGD(TAG, "Scheduling session feedback alarm for session '" + sessionTitle + "'");
LOGD(TAG, " -> end time: " + sessionEnd + " = " + (new Date(sessionEnd)).toString());
LOGD(TAG, " -> alarm time: " + alarmTime + " = " + (new Date(alarmTime)).toString());
}
private void scheduleAlarm(final long sessionStart,
final long sessionEnd, final long alarmOffset) {
NotificationManager nm =
(NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
nm.cancel(NOTIFICATION_ID);
final long currentTime = UIUtils.getCurrentTime(this);
// If the session is already started, do not schedule system notification.
if (currentTime > sessionStart) {
LOGD(TAG, "Not scheduling alarm because target time is in the past: " + sessionStart);
return;
}
// By default, sets alarm to go off at 10 minutes before session start time. If alarm
// offset is provided, alarm is set to go off by that much time from now.
long alarmTime;
if (alarmOffset == UNDEFINED_ALARM_OFFSET) {
alarmTime = sessionStart - MILLI_TEN_MINUTES;
} else {
alarmTime = currentTime + alarmOffset;
}
LOGD(TAG, "Scheduling alarm for " + alarmTime + " = " + (new Date(alarmTime)).toString());
final Intent notifIntent = new Intent(
ACTION_NOTIFY_SESSION,
null,
this,
SessionAlarmService.class);
// Setting data to ensure intent's uniqueness for different session start times.
notifIntent.setData(
new Uri.Builder().authority("com.google.samples.apps.iosched")
.path(String.valueOf(sessionStart)).build()
);
notifIntent.putExtra(SessionAlarmService.EXTRA_SESSION_START, sessionStart);
LOGD(TAG, "-> Intent extra: session start " + sessionStart);
notifIntent.putExtra(SessionAlarmService.EXTRA_SESSION_END, sessionEnd);
LOGD(TAG, "-> Intent extra: session end " + sessionEnd);
notifIntent.putExtra(SessionAlarmService.EXTRA_SESSION_ALARM_OFFSET, alarmOffset);
LOGD(TAG, "-> Intent extra: session alarm offset " + alarmOffset);
PendingIntent pi = PendingIntent.getService(this,
0,
notifIntent,
PendingIntent.FLAG_CANCEL_CURRENT);
final AlarmManager am = (AlarmManager) getSystemService(Context.ALARM_SERVICE);
// Schedule an alarm to be fired to notify user of added sessions are about to begin.
LOGD(TAG, "-> Scheduling RTC_WAKEUP alarm at " + alarmTime);
am.set(AlarmManager.RTC_WAKEUP, alarmTime, pi);
}
/**
* Builds corresponding notification for the Wear device that is paired to this handset. This
* is done by adding a Data Item to teh Data Store; the Wear device will be notified to build a
* local notification.
*/
private void setupNotificationOnWear(String sessionId, String sessionRoom, String sessionName,
String speaker) {
if (!mGoogleApiClient.isConnected()) {
Log.e(TAG, "setupNotificationOnWear(): Failed to send data item since there was no "
+ "connectivity to Google API Client");
return;
}
PutDataMapRequest putDataMapRequest = PutDataMapRequest
.create(FeedbackUtils.getFeedbackPath(sessionId));
putDataMapRequest.getDataMap().putLong("time", new Date().getTime());
putDataMapRequest.getDataMap().putString(KEY_SESSION_ID, sessionId);
putDataMapRequest.getDataMap().putString(KEY_SESSION_NAME, sessionName);
putDataMapRequest.getDataMap().putString(KEY_SPEAKER_NAME, speaker);
putDataMapRequest.getDataMap().putString(KEY_SESSION_ROOM, sessionRoom);
PutDataRequest request = putDataMapRequest.asPutDataRequest();
Wearable.DataApi.putDataItem(mGoogleApiClient, request)
.setResultCallback(new ResultCallback<DataApi.DataItemResult>() {
@Override
public void onResult(DataApi.DataItemResult dataItemResult) {
LOGD(TAG, "setupNotificationOnWear(): Sending notification result success:"
+ dataItemResult.getStatus().isSuccess()
);
}
});
}
// Starred sessions are about to begin. Constructs and triggers system notification.
private void notifySession(final long sessionStart, final long alarmOffset) {
long currentTime = UIUtils.getCurrentTime(this);
final long intervalEnd = sessionStart + MILLI_TEN_MINUTES;
LOGD(TAG, "Considering notifying for time interval.");
LOGD(TAG, " Interval start: " + sessionStart + "=" + (new Date(sessionStart)).toString());
LOGD(TAG, " Interval end: " + intervalEnd + "=" + (new Date(intervalEnd)).toString());
LOGD(TAG, " Current time is: " + currentTime + "=" + (new Date(currentTime)).toString());
if (sessionStart < currentTime) {
LOGD(TAG, "Skipping session notification (too late -- time interval already started)");
return;
}
if (!PrefUtils.shouldShowSessionReminders(this)) {
// skip if disabled in settings
LOGD(TAG, "Skipping session notification for sessions. Disabled in settings.");
return;
}
// Avoid repeated notifications.
if (alarmOffset == UNDEFINED_ALARM_OFFSET && UIUtils.isNotificationFiredForBlock(
this, ScheduleContract.Blocks.generateBlockId(sessionStart, intervalEnd))) {
LOGD(TAG, "Skipping session notification (already notified)");
return;
}
final ContentResolver cr = getContentResolver();
LOGD(TAG, "Looking for sessions in interval " + sessionStart + " - " + intervalEnd);
Cursor c = cr.query(
ScheduleContract.Sessions.CONTENT_MY_SCHEDULE_URI,
SessionDetailQuery.PROJECTION,
ScheduleContract.Sessions.STARTING_AT_TIME_INTERVAL_SELECTION,
ScheduleContract.Sessions.buildAtTimeIntervalArgs(sessionStart, intervalEnd),
null);
int starredCount = c.getCount();
LOGD(TAG, "# starred sessions in that interval: " + c.getCount());
String singleSessionId = null;
String singleSessionRoomId = null;
ArrayList<String> starredSessionTitles = new ArrayList<String>();
while (c.moveToNext()) {
singleSessionId = c.getString(SessionDetailQuery.SESSION_ID);
singleSessionRoomId = c.getString(SessionDetailQuery.ROOM_ID);
starredSessionTitles.add(c.getString(SessionDetailQuery.SESSION_TITLE));
LOGD(TAG, "-> Title: " + c.getString(SessionDetailQuery.SESSION_TITLE));
}
if (starredCount < 1) {
return;
}
// Generates the pending intent which gets fired when the user taps on the notification.
// NOTE: Use TaskStackBuilder to comply with Android's design guidelines
// related to navigation from notifications.
Intent baseIntent = new Intent(this, MyScheduleActivity.class);
baseIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
TaskStackBuilder taskBuilder = TaskStackBuilder.create(this)
.addNextIntent(baseIntent);
// For a single session, tapping the notification should open the session details (b/15350787)
if (starredCount == 1) {
taskBuilder.addNextIntent(new Intent(Intent.ACTION_VIEW,
ScheduleContract.Sessions.buildSessionUri(singleSessionId)));
}
PendingIntent pi = taskBuilder.getPendingIntent(0, PendingIntent.FLAG_CANCEL_CURRENT);
final Resources res = getResources();
String contentText;
int minutesLeft = (int) (sessionStart - currentTime + 59000) / 60000;
if (minutesLeft < 1) {
minutesLeft = 1;
}
if (starredCount == 1) {
contentText = res.getString(R.string.session_notification_text_1, minutesLeft);
} else {
contentText = res.getQuantityString(R.plurals.session_notification_text,
starredCount - 1,
minutesLeft,
starredCount - 1);
}
NotificationCompat.Builder notifBuilder = new NotificationCompat.Builder(this)
.setContentTitle(starredSessionTitles.get(0))
.setContentText(contentText)
.setColor(getResources().getColor(R.color.theme_primary))
.setTicker(res.getQuantityString(R.plurals.session_notification_ticker,
starredCount,
starredCount))
.setDefaults(Notification.DEFAULT_SOUND | Notification.DEFAULT_VIBRATE)
.setLights(
SessionAlarmService.NOTIFICATION_ARGB_COLOR,
SessionAlarmService.NOTIFICATION_LED_ON_MS,
SessionAlarmService.NOTIFICATION_LED_OFF_MS)
.setSmallIcon(R.drawable.ic_stat_notification)
.setContentIntent(pi)
.setPriority(Notification.PRIORITY_MAX)
.setAutoCancel(true);
if (minutesLeft > 5) {
notifBuilder.addAction(R.drawable.ic_alarm_holo_dark,
String.format(res.getString(R.string.snooze_x_min), 5),
createSnoozeIntent(sessionStart, intervalEnd, 5));
}
if (starredCount == 1 && PrefUtils.isAttendeeAtVenue(this)) {
notifBuilder.addAction(R.drawable.ic_map_holo_dark,
res.getString(R.string.title_map),
createRoomMapIntent(singleSessionRoomId));
}
String bigContentTitle;
if (starredCount == 1 && starredSessionTitles.size() > 0) {
bigContentTitle = starredSessionTitles.get(0);
} else {
bigContentTitle = res.getQuantityString(R.plurals.session_notification_title,
starredCount,
minutesLeft,
starredCount);
}
NotificationCompat.InboxStyle richNotification = new NotificationCompat.InboxStyle(
notifBuilder)
.setBigContentTitle(bigContentTitle);
// Adds starred sessions starting at this time block to the notification.
for (int i = 0; i < starredCount; i++) {
richNotification.addLine(starredSessionTitles.get(i));
}
NotificationManager nm = (NotificationManager) getSystemService(
Context.NOTIFICATION_SERVICE);
LOGD(TAG, "Now showing notification.");
nm.notify(NOTIFICATION_ID, richNotification.build());
}
private PendingIntent createSnoozeIntent(final long sessionStart, final long sessionEnd,
final int snoozeMinutes) {
Intent scheduleIntent = new Intent(
SessionAlarmService.ACTION_SCHEDULE_STARRED_BLOCK,
null, this, SessionAlarmService.class);
scheduleIntent.putExtra(SessionAlarmService.EXTRA_SESSION_START, sessionStart);
scheduleIntent.putExtra(SessionAlarmService.EXTRA_SESSION_END, sessionEnd);
scheduleIntent.putExtra(SessionAlarmService.EXTRA_SESSION_ALARM_OFFSET,
snoozeMinutes * MILLI_ONE_MINUTE);
return PendingIntent.getService(this, 0, scheduleIntent,
PendingIntent.FLAG_CANCEL_CURRENT);
}
private PendingIntent createRoomMapIntent(final String roomId) {
Intent mapIntent = new Intent(getApplicationContext(),
UIUtils.getMapActivityClass(getApplicationContext()));
mapIntent.putExtra(BaseMapActivity.EXTRA_ROOM, roomId);
mapIntent.putExtra(BaseMapActivity.EXTRA_DETACHED_MODE, true);
return TaskStackBuilder
.create(getApplicationContext())
.addNextIntent(new Intent(this, BrowseSessionsActivity.class))
.addNextIntent(mapIntent)
.getPendingIntent(0, PendingIntent.FLAG_CANCEL_CURRENT);
}
private void scheduleAllStarredBlocks() {
final ContentResolver cr = getContentResolver();
final Cursor c = cr.query(ScheduleContract.Sessions.CONTENT_MY_SCHEDULE_URI,
new String[]{"distinct " + ScheduleContract.Sessions.SESSION_START,
ScheduleContract.Sessions.SESSION_END,
ScheduleContract.Sessions.SESSION_IN_MY_SCHEDULE},
null,
null,
null
);
if (c == null) {
return;
}
while (c.moveToNext()) {
final long sessionStart = c.getLong(0);
final long sessionEnd = c.getLong(1);
scheduleAlarm(sessionStart, sessionEnd, UNDEFINED_ALARM_OFFSET);
}
}
// Schedules feedback alarms for all starred sessions.
private void scheduleAllStarredSessionFeedbacks() {
final ContentResolver cr = getContentResolver();
// TODO: Should we also check that SESSION_IN_MY_SCHEDULE is true?
final Cursor c = cr.query(ScheduleContract.Sessions.CONTENT_MY_SCHEDULE_URI,
new String[]{
ScheduleContract.Sessions.SESSION_TITLE,
ScheduleContract.Sessions.SESSION_END,
ScheduleContract.Sessions.SESSION_IN_MY_SCHEDULE,
},
null,
null,
null
);
if (c == null) {
return;
}
while (c.moveToNext()) {
final String sessionTitle = c.getString(0);
final long sessionEnd = c.getLong(1);
scheduleFeedbackAlarm(sessionEnd, UNDEFINED_ALARM_OFFSET, sessionTitle);
}
}
public interface SessionDetailQuery {
String[] PROJECTION = {
ScheduleContract.Sessions.SESSION_ID,
ScheduleContract.Sessions.SESSION_TITLE,
ScheduleContract.Sessions.ROOM_ID,
ScheduleContract.Sessions.SESSION_IN_MY_SCHEDULE
};
int SESSION_ID = 0;
int SESSION_TITLE = 1;
int ROOM_ID = 2;
}
public interface SessionsNeedingFeedbackQuery {
String[] PROJECTION = {
ScheduleContract.Sessions.SESSION_ID,
ScheduleContract.Sessions.SESSION_TITLE,
ScheduleContract.Sessions.SESSION_IN_MY_SCHEDULE,
ScheduleContract.Sessions.HAS_GIVEN_FEEDBACK,
};
int SESSION_ID = 0;
int SESSION_TITLE = 1;
public static final String WHERE_CLAUSE =
ScheduleContract.Sessions.HAS_GIVEN_FEEDBACK + "=0";
}
@Override
public void onConnected(Bundle connectionHint) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Connected to Google Api Service");
}
}
@Override
public void onConnectionSuspended(int cause) {
// Ignore
}
@Override
public void onConnectionFailed(ConnectionResult result) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Disconnected from Google Api Service");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.state;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSchemaCompatibility;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.api.common.typeutils.UnloadableDummyTypeSerializer;
import org.apache.flink.util.Preconditions;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;
/**
* A {@link StateSerializerProvider} wraps logic on how to obtain serializers for registered state,
* either with the previous schema of state in checkpoints or the current schema of state.
*
* <p>A provider can be created from either a registered state serializer, or the snapshot
* of the previous state serializer. For the former case, if the state was restored and a
* snapshot of the previous state serializer was retrieved later on, the snapshot can be set
* on the provider which also additionally checks the compatibility of the initially registered
* serializer. Similarly for the latter case, if a new state serializer is registered later on,
* it can be set on the provider, which then also checks the compatibility of the new registered
* serializer.
*
* <p>Simply put, the provider works both directions - either creating it first with a registered
* serializer or the previous serializer's snapshot, and then setting the previous serializer's
* snapshot (if the provider was created with a registered serializer) or a new registered state
* serializer (if the provider was created with a serializer snapshot). Either way,
* the new registered serializer is checked for schema compatibility once both the new serializer
* and the previous serializer snapshot is present.
*
* @param <T> the type of the state.
*/
@Internal
public abstract class StateSerializerProvider<T> {
/**
* The registered serializer for the state.
*
* <p>In the case that this provider was created from a restored serializer snapshot via
* {@link #fromPreviousSerializerSnapshot(TypeSerializerSnapshot)}, but a new serializer was never registered
* for the state (i.e., this is the case if a restored state was never accessed), this would be {@code null}.
*/
@Nullable
TypeSerializer<T> registeredSerializer;
/**
* The state's previous serializer's snapshot.
*
* <p>In the case that this provider was created from a registered state serializer instance via
* {@link #fromNewRegisteredSerializer(TypeSerializer)}, but a serializer snapshot was never supplied to this
* provider (i.e. because the registered serializer was for a new state, not a restored one), this
* would be {@code null}.
*/
@Nullable
TypeSerializerSnapshot<T> previousSerializerSnapshot;
/**
* The restore serializer, lazily created only when the restore serializer is accessed.
*
* <p>NOTE: It is important to only create this lazily, so that off-heap
* state do not fail eagerly when restoring state that has a
* {@link UnloadableDummyTypeSerializer} as the previous serializer. This should
* be relevant only for restores from Flink versions prior to 1.7.x.
*/
@Nullable
private TypeSerializer<T> cachedRestoredSerializer;
private boolean isRegisteredWithIncompatibleSerializer = false;
/**
* Creates a {@link StateSerializerProvider} for restored state from the previous serializer's snapshot.
*
* <p>Once a new serializer is registered for the state, it should be provided via
* the {@link #registerNewSerializerForRestoredState(TypeSerializer)} method.
*
* @param stateSerializerSnapshot the previous serializer's snapshot.
* @param <T> the type of the state.
*
* @return a new {@link StateSerializerProvider}.
*/
public static <T> StateSerializerProvider<T> fromPreviousSerializerSnapshot(TypeSerializerSnapshot<T> stateSerializerSnapshot) {
return new LazilyRegisteredStateSerializerProvider<>(stateSerializerSnapshot);
}
/**
* Creates a {@link StateSerializerProvider} from the registered state serializer.
*
* <p>If the state is a restored one, and the previous serializer's snapshot is
* obtained later on, is should be supplied via the
* {@link #setPreviousSerializerSnapshotForRestoredState(TypeSerializerSnapshot)} method.
*
* @param registeredStateSerializer the new state's registered serializer.
* @param <T> the type of the state.
*
* @return a new {@link StateSerializerProvider}.
*/
public static <T> StateSerializerProvider<T> fromNewRegisteredSerializer(TypeSerializer<T> registeredStateSerializer) {
return new EagerlyRegisteredStateSerializerProvider<>(registeredStateSerializer);
}
private StateSerializerProvider(@Nonnull TypeSerializer<T> stateSerializer) {
this.registeredSerializer = stateSerializer;
this.previousSerializerSnapshot = null;
}
private StateSerializerProvider(@Nonnull TypeSerializerSnapshot<T> previousSerializerSnapshot) {
this.previousSerializerSnapshot = previousSerializerSnapshot;
this.registeredSerializer = null;
}
/**
* Gets the serializer that recognizes the current serialization schema of the state.
* This is the serializer that should be used for regular state serialization and
* deserialization after state has been restored.
*
* <p>If this provider was created from a restored state's serializer snapshot, while a
* new serializer (with a new schema) was not registered for the state (i.e., because
* the state was never accessed after it was restored), then the schema of state remains
* identical. Therefore, in this case, it is guaranteed that the serializer returned by
* this method is the same as the one returned by {@link #previousSchemaSerializer()}.
*
* <p>If this provider was created from a serializer instance, then this always returns the
* that same serializer instance. If later on a snapshot of the previous serializer is supplied
* via {@link #setPreviousSerializerSnapshotForRestoredState(TypeSerializerSnapshot)}, then
* the initially supplied serializer instance will be checked for compatibility.
*
* @return a serializer that reads and writes in the current schema of the state.
*/
@Nonnull
public final TypeSerializer<T> currentSchemaSerializer() {
if (registeredSerializer != null) {
checkState(
!isRegisteredWithIncompatibleSerializer,
"Unable to provide a serializer with the current schema, because the restored state was " +
"registered with a new serializer that has incompatible schema.");
return registeredSerializer;
}
// if we are not yet registered with a new serializer,
// we can just use the restore serializer to read / write the state.
return previousSchemaSerializer();
}
/**
* Gets the serializer that recognizes the previous serialization schema of the state.
* This is the serializer that should be used for restoring the state, i.e. when the state
* is still in the previous serialization schema.
*
* <p>This method only returns a serializer if this provider has the previous serializer's
* snapshot. Otherwise, trying to access the previous schema serializer will fail
* with an exception.
*
* @return a serializer that reads and writes in the previous schema of the state.
*/
@Nonnull
public final TypeSerializer<T> previousSchemaSerializer() {
if (cachedRestoredSerializer != null) {
return cachedRestoredSerializer;
}
if (previousSerializerSnapshot == null) {
throw new UnsupportedOperationException(
"This provider does not contain the state's previous serializer's snapshot. Cannot provider a serializer for previous schema.");
}
this.cachedRestoredSerializer = previousSerializerSnapshot.restoreSerializer();
return cachedRestoredSerializer;
}
/**
* Gets the previous serializer snapshot.
*
* @return The previous serializer snapshot, or null if registered serializer was for a new state, not a restored one.
*/
@Nullable
public final TypeSerializerSnapshot<T> getPreviousSerializerSnapshot() {
return previousSerializerSnapshot;
}
/**
* For restored state, register a new serializer that potentially has a new serialization schema.
*
* <p>Users are allowed to register serializers for state only once. Therefore, this method
* is irrelevant if this provider was created with a serializer instance, since a state serializer had
* been registered already.
*
* <p>For the case where this provider was created from a serializer snapshot, then this method should
* be called at most once. The new serializer will be checked for its schema compatibility with the
* previous serializer's schema, and returned to the caller. The caller is responsible for
* checking the result and react appropriately to it, as follows:
* <ul>
* <li>{@link TypeSerializerSchemaCompatibility#isCompatibleAsIs()}: nothing needs to be done.
* {@link #currentSchemaSerializer()} now returns the newly registered serializer.</li>
* <li>{@link TypeSerializerSchemaCompatibility#isCompatibleAfterMigration()} ()}: state needs to be
* migrated before the serializer returned by {@link #currentSchemaSerializer()} can be used.
* The migration should be performed by reading the state with {@link #previousSchemaSerializer()},
* and then writing it again with {@link #currentSchemaSerializer()}.</li>
* <li>{@link TypeSerializerSchemaCompatibility#isIncompatible()}: the registered serializer is
* incompatible. {@link #currentSchemaSerializer()} can no longer return a serializer for
* the state, and therefore this provider shouldn't be used anymore.</li>
* </ul>
*
* @return the schema compatibility of the new registered serializer, with respect to the previous serializer.
*/
@Nonnull
public abstract TypeSerializerSchemaCompatibility<T> registerNewSerializerForRestoredState(TypeSerializer<T> newSerializer);
/**
* For restored state, set the state's previous serializer's snapshot.
*
* <p>Users are allowed to set the previous serializer's snapshot once. Therefore, this method
* is irrelevant if this provider was created with a serializer snapshot, since the serializer
* snapshot had been set already.
*
* <p>For the case where this provider was created from a serializer instance, then this method should
* be called at most once. The initially registered state serializer will be checked for its
* schema compatibility with the previous serializer's schema, and returned to the caller.
* The caller is responsible for checking the result and react appropriately to it, as follows:
* <ul>
* <li>{@link TypeSerializerSchemaCompatibility#isCompatibleAsIs()}: nothing needs to be done.
* {@link #currentSchemaSerializer()} remains to return the initially registered serializer.</li>
* <li>{@link TypeSerializerSchemaCompatibility#isCompatibleAfterMigration()} ()}: state needs to be
* migrated before the serializer returned by {@link #currentSchemaSerializer()} can be used.
* The migration should be performed by reading the state with {@link #previousSchemaSerializer()},
* and then writing it again with {@link #currentSchemaSerializer()}.</li>
* <li>{@link TypeSerializerSchemaCompatibility#isIncompatible()}: the registered serializer is
* incompatible. {@link #currentSchemaSerializer()} can no longer return a serializer for
* the state, and therefore this provider shouldn't be used anymore.</li>
* </ul>
*
* @param previousSerializerSnapshot the state's previous serializer's snapshot
*
* @return the schema compatibility of the initially registered serializer, with respect to the previous serializer.
*/
@Nonnull
public abstract TypeSerializerSchemaCompatibility<T> setPreviousSerializerSnapshotForRestoredState(TypeSerializerSnapshot<T> previousSerializerSnapshot);
/**
* Invalidates access to the current schema serializer. This lets {@link #currentSchemaSerializer()}
* fail when invoked.
*
* <p>Access to the current schema serializer should be invalidated by the methods
* {@link #registerNewSerializerForRestoredState(TypeSerializer)} or
* {@link #setPreviousSerializerSnapshotForRestoredState(TypeSerializerSnapshot)}
* once the registered serializer is determined to be incompatible.
*/
protected final void invalidateCurrentSchemaSerializerAccess() {
this.isRegisteredWithIncompatibleSerializer = true;
}
/**
* Implementation of the {@link StateSerializerProvider} for the case where a snapshot of the
* previous serializer is obtained before a new state serializer is registered (hence, the naming "lazily" registered).
*/
private static class LazilyRegisteredStateSerializerProvider<T> extends StateSerializerProvider<T> {
LazilyRegisteredStateSerializerProvider(TypeSerializerSnapshot<T> previousSerializerSnapshot) {
super(Preconditions.checkNotNull(previousSerializerSnapshot));
}
@Nonnull
@Override
@SuppressWarnings("ConstantConditions")
public TypeSerializerSchemaCompatibility<T> registerNewSerializerForRestoredState(TypeSerializer<T> newSerializer) {
checkNotNull(newSerializer);
if (registeredSerializer != null) {
throw new UnsupportedOperationException("A serializer has already been registered for the state; re-registration is not allowed.");
}
TypeSerializerSchemaCompatibility<T> result = previousSerializerSnapshot.resolveSchemaCompatibility(newSerializer);
if (result.isIncompatible()) {
invalidateCurrentSchemaSerializerAccess();
}
if (result.isCompatibleWithReconfiguredSerializer()) {
this.registeredSerializer = result.getReconfiguredSerializer();
} else {
this.registeredSerializer = newSerializer;
}
return result;
}
@Nonnull
@Override
public TypeSerializerSchemaCompatibility<T> setPreviousSerializerSnapshotForRestoredState(
TypeSerializerSnapshot<T> previousSerializerSnapshot) {
throw new UnsupportedOperationException("The snapshot of the state's previous serializer has already been set; cannot reset.");
}
}
/**
* Implementation of the {@link StateSerializerProvider} for the case where a new state
* serializer instance is registered first, before any snapshots of the previous state serializer
* is obtained (hence, the naming "eagerly" registered).
*/
private static class EagerlyRegisteredStateSerializerProvider<T> extends StateSerializerProvider<T> {
EagerlyRegisteredStateSerializerProvider(TypeSerializer<T> registeredStateSerializer) {
super(Preconditions.checkNotNull(registeredStateSerializer));
}
@Nonnull
@Override
public TypeSerializerSchemaCompatibility<T> registerNewSerializerForRestoredState(TypeSerializer<T> newSerializer) {
throw new UnsupportedOperationException("A serializer has already been registered for the state; re-registration is not allowed.");
}
@Nonnull
@Override
public TypeSerializerSchemaCompatibility<T> setPreviousSerializerSnapshotForRestoredState(TypeSerializerSnapshot<T> previousSerializerSnapshot) {
checkNotNull(previousSerializerSnapshot);
if (this.previousSerializerSnapshot != null) {
throw new UnsupportedOperationException("The snapshot of the state's previous serializer has already been set; cannot reset.");
}
this.previousSerializerSnapshot = previousSerializerSnapshot;
TypeSerializerSchemaCompatibility<T> result = previousSerializerSnapshot.resolveSchemaCompatibility(registeredSerializer);
if (result.isIncompatible()) {
invalidateCurrentSchemaSerializerAccess();
}
if (result.isCompatibleWithReconfiguredSerializer()) {
this.registeredSerializer = result.getReconfiguredSerializer();
}
return result;
}
}
}
| |
/*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collection;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.CancelException;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.SystemFailure;
import com.gemstone.gemfire.cache.CacheClosedException;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.LowMemoryException;
import com.gemstone.gemfire.cache.Operation;
import com.gemstone.gemfire.cache.RegionDestroyedException;
import com.gemstone.gemfire.distributed.DistributedSystemDisconnectedException;
import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.DirectReplyProcessor;
import com.gemstone.gemfire.distributed.internal.DistributionManager;
import com.gemstone.gemfire.distributed.internal.DistributionMessage;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.distributed.internal.MessageWithReply;
import com.gemstone.gemfire.distributed.internal.ReplyException;
import com.gemstone.gemfire.distributed.internal.ReplyMessage;
import com.gemstone.gemfire.distributed.internal.ReplyProcessor21;
import com.gemstone.gemfire.distributed.internal.membership.InternalDistributedMember;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.cache.partitioned.PutMessage;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage;
import com.gemstone.gemfire.internal.logging.log4j.LogMarker;
/**
* The base PartitionedRegion message type upon which other messages should be
* based.
*
* @author gregp
* @since 6.5
*/
public abstract class RemoteOperationMessage extends DistributionMessage implements
MessageWithReply, TransactionMessage
{
private static final Logger logger = LogService.getLogger();
/** default exception to ensure a false-positive response is never returned */
static final ForceReattemptException UNHANDLED_EXCEPTION
= (ForceReattemptException)new ForceReattemptException(LocalizedStrings.PartitionMessage_UNKNOWN_EXCEPTION.toLocalizedString()).fillInStackTrace();
protected int processorId;
/** the type of executor to use */
protected int processorType;
protected String regionPath;
/** The unique transaction Id on the sending member, used to construct a TXId on the receiving side */
private int txUniqId = TXManagerImpl.NOTX;
private InternalDistributedMember txMemberId = null;
protected transient short flags;
public RemoteOperationMessage() {
}
public RemoteOperationMessage(InternalDistributedMember recipient, String regionPath,
ReplyProcessor21 processor) {
Assert.assertTrue(recipient != null, "RemoteMesssage recipient can not be null");
setRecipient(recipient);
this.regionPath = regionPath;
this.processorId = processor==null? 0 : processor.getProcessorId();
if (processor != null && this.isSevereAlertCompatible()) {
processor.enableSevereAlertProcessing();
}
this.txUniqId = TXManagerImpl.getCurrentTXUniqueId();
TXStateProxy txState = TXManagerImpl.getCurrentTXState();
if(txState!=null && txState.isMemberIdForwardingRequired()) {
this.txMemberId = txState.getOriginatingMember();
}
}
public RemoteOperationMessage(Set recipients, String regionPath, ReplyProcessor21 processor) {
setRecipients(recipients);
this.regionPath = regionPath;
this.processorId = processor==null? 0 : processor.getProcessorId();
if (processor != null && this.isSevereAlertCompatible()) {
processor.enableSevereAlertProcessing();
}
this.txUniqId = TXManagerImpl.getCurrentTXUniqueId();
TXStateProxy txState = TXManagerImpl.getCurrentTXState();
if(txState!=null && txState.isMemberIdForwardingRequired()) {
this.txMemberId = txState.getOriginatingMember();
}
}
/**
* Copy constructor that initializes the fields declared in this class
* @param other
*/
public RemoteOperationMessage(RemoteOperationMessage other) {
this.regionPath = other.regionPath;
this.processorId = other.processorId;
this.txUniqId = other.getTXUniqId();
this.txMemberId = other.getTXMemberId();
}
/**
* Severe alert processing enables suspect processing at the ack-wait-threshold
* and issuing of a severe alert at the end of the ack-severe-alert-threshold.
* Some messages should not support this type of processing
* (e.g., GII, or DLockRequests)
* @return whether severe-alert processing may be performed on behalf
* of this message
*/
@Override
public boolean isSevereAlertCompatible() {
return true;
}
@Override
public int getProcessorType() {
return DistributionManager.PARTITIONED_REGION_EXECUTOR;
}
/**
* @return the full path of the region
*/
public final String getRegionPath()
{
return regionPath;
}
/**
* @return the {@link ReplyProcessor21}id associated with the message, null
* if no acknowlegement is required.
*/
@Override
public final int getProcessorId()
{
return this.processorId;
}
/**
* @param processorId1 the {@link
* com.gemstone.gemfire.distributed.internal.ReplyProcessor21} id associated
* with the message, null if no acknowlegement is required.
*/
public final void registerProcessor(int processorId1)
{
this.processorId = processorId1;
}
public void setCacheOpRecipients(Collection cacheOpRecipients) {
// TODO need to implement this for other remote ops
assert this instanceof RemotePutMessage;
}
/**
* check to see if the cache is closing
*/
final public boolean checkCacheClosing(DistributionManager dm) {
GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
// return (cache != null && cache.isClosed());
return cache == null || cache.isClosed();
}
/**
* check to see if the distributed system is closing
* @return true if the distributed system is closing
*/
final public boolean checkDSClosing(DistributionManager dm) {
InternalDistributedSystem ds = dm.getSystem();
return (ds == null || ds.isDisconnecting());
}
/**
* Upon receipt of the message, both process the message and send an
* acknowledgement, not necessarily in that order. Note: Any hang in this
* message may cause a distributed deadlock for those threads waiting for an
* acknowledgement.
*
* @throws PartitionedRegionException if the region does not exist (typically, if it has been destroyed)
*/
@Override
public void process(final DistributionManager dm)
{
Throwable thr = null;
boolean sendReply = true;
LocalRegion r = null;
long startTime = 0;
try {
if (checkCacheClosing(dm) || checkDSClosing(dm)) {
thr = new CacheClosedException(LocalizedStrings.PartitionMessage_REMOTE_CACHE_IS_CLOSED_0.toLocalizedString(dm.getId()));
return;
}
GemFireCacheImpl gfc = (GemFireCacheImpl)CacheFactory.getInstance(dm.getSystem());
r = gfc.getRegionByPathForProcessing(this.regionPath);
if (r == null && failIfRegionMissing()) {
// if the distributed system is disconnecting, don't send a reply saying
// the partitioned region can't be found (bug 36585)
thr = new RegionDestroyedException(LocalizedStrings.RemoteOperationMessage_0_COULD_NOT_FIND_REGION_1
.toLocalizedString(new Object[] {dm.getDistributionManagerId(), regionPath }), regionPath);
return; // reply sent in finally block below
}
thr = UNHANDLED_EXCEPTION;
// [bruce] r might be null here, so we have to go to the cache instance to get the txmgr
TXManagerImpl txMgr = GemFireCacheImpl.getInstance().getTxManager();
TXStateProxy tx = null;
try {
tx = txMgr.masqueradeAs(this);
sendReply = operateOnRegion(dm, r, startTime);
} finally {
txMgr.unmasquerade(tx);
}
thr = null;
} catch (RemoteOperationException fre) {
thr = fre;
}
catch (DistributedSystemDisconnectedException se) {
// bug 37026: this is too noisy...
// throw new CacheClosedException("remote system shutting down");
// thr = se; cache is closed, no point trying to send a reply
thr = null;
sendReply = false;
if (logger.isDebugEnabled()) {
logger.debug("shutdown caught, abandoning message: {}", se.getMessage(), se);
}
}
catch (RegionDestroyedException rde) {
// [bruce] RDE does not always mean that the sender's region is also
// destroyed, so we must send back an exception. If the sender's
// region is also destroyed, who cares if we send it an exception
//if (pr != null && pr.isClosed) {
thr = new ForceReattemptException(LocalizedStrings.PartitionMessage_REGION_IS_DESTROYED_IN_0.toLocalizedString(dm.getDistributionManagerId()), rde);
//}
}
catch (VirtualMachineError err) {
SystemFailure.initiateFailure(err);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw err;
}
catch (Throwable t) {
// Whenever you catch Error or Throwable, you must also
// catch VirtualMachineError (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
// log the exception at fine level if there is no reply to the message
thr = null;
if (sendReply) {
if (!checkDSClosing(dm)) {
thr = t;
}
else {
// don't pass arbitrary runtime exceptions and errors back if this
// cache/vm is closing
thr = new ForceReattemptException(LocalizedStrings.PartitionMessage_DISTRIBUTED_SYSTEM_IS_DISCONNECTING.toLocalizedString());
}
}
if (logger.isTraceEnabled(LogMarker.DM) && (t instanceof RuntimeException)) {
logger.trace(LogMarker.DM, "Exception caught while processing message", t);
}
}
finally {
if (sendReply) {
ReplyException rex = null;
if (thr != null) {
// don't transmit the exception if this message was to a listener
// and this listener is shutting down
rex = new ReplyException(thr);
}
// Send the reply if the operateOnPartitionedRegion returned true
sendReply(getSender(), this.processorId, dm, rex, r, startTime);
}
}
}
/** Send a generic ReplyMessage. This is in a method so that subclasses can override the reply message type
* @param pr the Partitioned Region for the message whose statistics are incremented
* @param startTime the start time of the operation in nanoseconds
* @see PutMessage#sendReply
*/
protected void sendReply(InternalDistributedMember member, int procId, DM dm, ReplyException ex, LocalRegion pr, long startTime) {
// if (pr != null && startTime > 0) {
//pr.getPrStats().endRemoteOperationMessagesProcessing(startTime);
// }
ReplyMessage.send(member, procId, ex, getReplySender(dm), pr != null && pr.isInternalRegion());
}
/**
* Allow classes that over-ride to choose whether
* a RegionDestroyException is thrown if no partitioned region is found (typically occurs if the message will be sent
* before the PartitionedRegion has been fully constructed.
* @return true if throwing a {@link RegionDestroyedException} is acceptable
*/
protected boolean failIfRegionMissing() {
return true;
}
/**
* return a new reply processor for this class, for use in relaying a response.
* This <b>must</b> be an instance method so subclasses can override it
* properly.
*/
RemoteOperationResponse createReplyProcessor(PartitionedRegion r, Set recipients) {
return new RemoteOperationResponse(r.getSystem(), recipients);
}
protected abstract boolean operateOnRegion(DistributionManager dm,
LocalRegion r,long startTime) throws RemoteOperationException;
/**
* Fill out this instance of the message using the <code>DataInput</code>
* Required to be a {@link com.gemstone.gemfire.DataSerializable}Note: must
* be symmetric with {@link #toData(DataOutput)}in what it reads
*/
@Override
public void fromData(DataInput in) throws IOException, ClassNotFoundException
{
super.fromData(in);
this.flags = in.readShort();
setFlags(this.flags, in);
this.regionPath = DataSerializer.readString(in);
}
public InternalDistributedMember getTXOriginatorClient() {
return this.txMemberId;
}
/**
* Send the contents of this instance to the DataOutput Required to be a
* {@link com.gemstone.gemfire.DataSerializable}Note: must be symmetric with
* {@link #fromData(DataInput)}in what it writes
*/
@Override
public void toData(DataOutput out) throws IOException
{
super.toData(out);
short flags = computeCompressedShort();
out.writeShort(flags);
if (this.processorId != 0) {
out.writeInt(this.processorId);
}
if (this.processorType != 0) {
out.writeByte(this.processorType);
}
if (this.getTXUniqId() != TXManagerImpl.NOTX) {
out.writeInt(this.getTXUniqId());
}
if (this.getTXMemberId() != null) {
DataSerializer.writeObject(this.getTXMemberId(),out);
}
DataSerializer.writeString(this.regionPath,out);
}
protected short computeCompressedShort() {
short flags = 0;
if (this.processorId != 0) flags |= HAS_PROCESSOR_ID;
if (this.processorType != 0) flags |= HAS_PROCESSOR_TYPE;
if (this.getTXUniqId() != TXManagerImpl.NOTX) flags |= HAS_TX_ID;
if (this.getTXMemberId() != null) flags |= HAS_TX_MEMBERID;
return flags;
}
protected void setFlags(short flags, DataInput in) throws IOException,
ClassNotFoundException {
if ((flags & HAS_PROCESSOR_ID) != 0) {
this.processorId = in.readInt();
ReplyProcessor21.setMessageRPId(this.processorId);
}
if ((flags & HAS_PROCESSOR_TYPE) != 0) {
this.processorType = in.readByte();
}
if ((flags & HAS_TX_ID) != 0) {
this.txUniqId = in.readInt();
}
if ((flags & HAS_TX_MEMBERID) != 0) {
this.txMemberId = DataSerializer.readObject(in);
}
}
protected final InternalDistributedMember getTXMemberId() {
return txMemberId;
}
private final static String PN_TOKEN = ".cache.";
@Override
public String toString()
{
StringBuffer buff = new StringBuffer();
String className = getClass().getName();
// className.substring(className.lastIndexOf('.', className.lastIndexOf('.') - 1) + 1); // partition.<foo> more generic version
buff.append(className.substring(className.indexOf(PN_TOKEN) + PN_TOKEN.length())); // partition.<foo>
buff.append("(regionPath="); // make sure this is the first one
buff.append(this.regionPath);
appendFields(buff);
buff.append(")");
return buff.toString();
}
/**
* Helper class of {@link #toString()}
*
* @param buff
* buffer in which to append the state of this instance
*/
protected void appendFields(StringBuffer buff)
{
buff.append("; sender=").append(getSender())
.append("; recipients=[");
InternalDistributedMember[] recips = getRecipients();
for(int i=0; i<recips.length-1; i++) {
buff.append(recips[i]).append(',');
}
if (recips.length > 0) {
buff.append(recips[recips.length-1]);
}
buff.append("]; processorId=").append(this.processorId);
}
public InternalDistributedMember getRecipient() {
return getRecipients()[0];
}
public void setOperation(Operation op) {
// override in subclasses holding operations
}
/**
* added to support old value to be written on wire.
* @param value true or false
* @since 6.5
*/
public void setHasOldValue(boolean value) {
// override in subclasses which need old value to be serialized.
// overridden by classes like PutMessage, DestroyMessage.
}
/**
* @return the txUniqId
*/
public final int getTXUniqId() {
return txUniqId;
}
public final InternalDistributedMember getMemberToMasqueradeAs() {
if(txMemberId==null) {
return getSender();
}
return txMemberId;
}
public boolean canStartRemoteTransaction() {
return true;
}
@Override
public boolean canParticipateInTransaction() {
return true;
}
/**
* A processor on which to await a response from the {@link RemoteOperationMessage}
* recipient, capturing any CacheException thrown by the recipient and handle
* it as an expected exception.
*
* @author Greg Passmore
* @since 6.5
* @see #waitForCacheException()
*/
public static class RemoteOperationResponse extends DirectReplyProcessor {
/**
* The exception thrown when the recipient does not reply
*/
volatile ForceReattemptException prce;
/**
* Whether a response has been received
*/
volatile boolean responseReceived;
/**
* whether a response is required
*/
boolean responseRequired;
public RemoteOperationResponse(InternalDistributedSystem dm, Collection initMembers) {
this(dm, initMembers, true);
}
public RemoteOperationResponse(InternalDistributedSystem dm, Collection initMembers, boolean register) {
super(dm, initMembers);
if(register) {
register();
}
}
public RemoteOperationResponse(InternalDistributedSystem dm, InternalDistributedMember member) {
this(dm, member, true);
}
public RemoteOperationResponse(InternalDistributedSystem dm, InternalDistributedMember member, boolean register) {
super(dm, member);
if(register) {
register();
}
}
/**
* require a response message to be received
*/
public void requireResponse() {
this.responseRequired = true;
}
@Override
public void memberDeparted(final InternalDistributedMember id, final boolean crashed) {
if (id != null) {
if (removeMember(id, true)) {
this.prce = new ForceReattemptException(LocalizedStrings.PartitionMessage_PARTITIONRESPONSE_GOT_MEMBERDEPARTED_EVENT_FOR_0_CRASHED_1.toLocalizedString(new Object[] {id, Boolean.valueOf(crashed)}));
}
checkIfDone();
} else {
Exception e = new Exception(LocalizedStrings.PartitionMessage_MEMBERDEPARTED_GOT_NULL_MEMBERID.toLocalizedString());
logger.info(LocalizedMessage.create(LocalizedStrings.PartitionMessage_MEMBERDEPARTED_GOT_NULL_MEMBERID_CRASHED_0, Boolean.valueOf(crashed)), e);
}
}
/**
* Waits for the response from the {@link RemoteOperationMessage}'s recipient
* @throws CacheException if the recipient threw a cache exception during message processing
* @throws ForceReattemptException if the recipient left the distributed system before the response
* was received.
* @throws PrimaryBucketException
*/
final public void waitForCacheException()
throws CacheException, RemoteOperationException, PrimaryBucketException {
try {
waitForRepliesUninterruptibly();
if (this.prce!=null || (this.responseRequired && !this.responseReceived)) {
throw new RemoteOperationException(LocalizedStrings.PartitionMessage_ATTEMPT_FAILED.toLocalizedString(), this.prce);
}
}
catch (ReplyException e) {
Throwable t = e.getCause();
if (t instanceof CacheException) {
throw (CacheException)t;
}
else if (t instanceof RemoteOperationException) {
RemoteOperationException ft = (RemoteOperationException)t;
// See FetchEntriesMessage, which can marshal a ForceReattempt
// across to the sender
RemoteOperationException fre = new RemoteOperationException(LocalizedStrings.PartitionMessage_PEER_REQUESTS_REATTEMPT.toLocalizedString(), t);
if (ft.hasHash()) {
fre.setHash(ft.getHash());
}
throw fre;
}
else if (t instanceof PrimaryBucketException) {
// See FetchEntryMessage, GetMessage, InvalidateMessage,
// PutMessage
// which can marshal a ForceReattemptacross to the sender
throw new PrimaryBucketException(LocalizedStrings.PartitionMessage_PEER_FAILED_PRIMARY_TEST.toLocalizedString(), t);
}
else if (t instanceof RegionDestroyedException) {
RegionDestroyedException rde = (RegionDestroyedException) t;
throw rde;
}
else if (t instanceof CancelException) {
if (logger.isDebugEnabled()) {
logger.debug("RemoteOperationResponse got CacheClosedException from {}, throwing ForceReattemptException", e.getSender(), t);
}
throw new RemoteOperationException(LocalizedStrings.PartitionMessage_PARTITIONRESPONSE_GOT_REMOTE_CACHECLOSEDEXCEPTION.toLocalizedString(), t);
}
else if (t instanceof LowMemoryException) {
if (logger.isDebugEnabled()) {
logger.debug("RemoteOperationResponse re-throwing remote LowMemoryException from {}", e.getSender(), t);
}
throw (LowMemoryException) t;
}
e.handleAsUnexpected();
}
}
/* overridden from ReplyProcessor21 */
@Override
public void process(DistributionMessage msg) {
this.responseReceived = true;
super.process(msg);
}
}
}
| |
/*******************************************************************************
* Copyright 2011, 2012 Chris Banes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.topface;
import android.content.Context;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.Adapter;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.ListAdapter;
import com.handmark.pulltorefresh.library.R;
import com.topface.internal.EmptyViewMethodAccessor;
import com.topface.internal.IndicatorLayout;
public abstract class PullToRefreshAdapterViewBase<T extends AbsListView> extends PullToRefreshBase<T> implements
OnScrollListener {
private static FrameLayout.LayoutParams convertEmptyViewLayoutParams(ViewGroup.LayoutParams lp) {
FrameLayout.LayoutParams newLp = null;
if (null != lp) {
newLp = new FrameLayout.LayoutParams(lp);
if (lp instanceof LinearLayout.LayoutParams) {
newLp.gravity = ((LinearLayout.LayoutParams) lp).gravity;
} else {
newLp.gravity = Gravity.CENTER;
}
}
return newLp;
}
private boolean mLastItemVisible;
private OnScrollListener mOnScrollListener;
private OnLastItemVisibleListener mOnLastItemVisibleListener;
private View mEmptyView;
private IndicatorLayout mIndicatorIvTop;
private IndicatorLayout mIndicatorIvBottom;
private boolean mShowIndicator;
private boolean mScrollEmptyView = true;
public PullToRefreshAdapterViewBase(Context context) {
super(context);
mRefreshableView.setOnScrollListener(this);
}
public PullToRefreshAdapterViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
mRefreshableView.setOnScrollListener(this);
}
public PullToRefreshAdapterViewBase(Context context, Mode mode) {
super(context, mode);
mRefreshableView.setOnScrollListener(this);
}
public PullToRefreshAdapterViewBase(Context context, Mode mode, AnimationStyle animStyle) {
super(context, mode, animStyle);
mRefreshableView.setOnScrollListener(this);
}
/**
* Gets whether an indicator graphic should be displayed when the View is in
* a state where a Pull-to-Refresh can happen. An example of this state is
* when the Adapter View is scrolled to the top and the mode is set to
* {@link Mode#PULL_FROM_START}. The default value is <var>true</var> if
* {@link PullToRefreshBase#isPullToRefreshOverScrollEnabled()
* isPullToRefreshOverScrollEnabled()} returns false.
*
* @return true if the indicators will be shown
*/
public boolean getShowIndicator() {
return mShowIndicator;
}
public final void onScroll(final AbsListView view, final int firstVisibleItem, final int visibleItemCount,
final int totalItemCount) {
if (DEBUG) {
Log.d(LOG_TAG, "First Visible: " + firstVisibleItem + ". Visible Count: " + visibleItemCount
+ ". Total Items:" + totalItemCount);
}
/**
* Set whether the Last Item is Visible. lastVisibleItemIndex is a
* zero-based index, so we minus one totalItemCount to check
*/
if (null != mOnLastItemVisibleListener) {
mLastItemVisible = (totalItemCount > 0) && (firstVisibleItem + visibleItemCount >= totalItemCount - 1);
}
// If we're showing the indicator, check positions...
if (getShowIndicatorInternal()) {
updateIndicatorViewsVisibility();
}
// Finally call OnScrollListener if we have one
if (null != mOnScrollListener) {
mOnScrollListener.onScroll(view, firstVisibleItem, visibleItemCount, totalItemCount);
}
}
public final void onScrollStateChanged(final AbsListView view, final int state) {
/**
* Check that the scrolling has stopped, and that the last item is
* visible.
*/
if (state == OnScrollListener.SCROLL_STATE_IDLE && null != mOnLastItemVisibleListener && mLastItemVisible) {
mOnLastItemVisibleListener.onLastItemVisible();
}
if (null != mOnScrollListener) {
mOnScrollListener.onScrollStateChanged(view, state);
}
}
/**
* Pass-through method for {@link PullToRefreshBase#getRefreshableView()
* getRefreshableView()}.
* {@link AdapterView#setAdapter(android.widget.Adapter)}
* setAdapter(adapter)}. This is just for convenience!
*
* @param adapter - Adapter to set
*/
public void setAdapter(ListAdapter adapter) {
((AdapterView<ListAdapter>) mRefreshableView).setAdapter(adapter);
}
/**
* Sets the Empty View to be used by the Adapter View.
* <p/>
* We need it handle it ourselves so that we can Pull-to-Refresh when the
* Empty View is shown.
* <p/>
* Please note, you do <strong>not</strong> usually need to call this method
* yourself. Calling setEmptyView on the AdapterView will automatically call
* this method and set everything up. This includes when the Android
* Framework automatically sets the Empty View based on it's ID.
*
* @param newEmptyView - Empty View to be used
*/
public final void setEmptyView(View newEmptyView) {
FrameLayout refreshableViewWrapper = getRefreshableViewWrapper();
if (null != newEmptyView) {
// New view needs to be clickable so that Android recognizes it as a
// target for Touch Events
newEmptyView.setClickable(true);
ViewParent newEmptyViewParent = newEmptyView.getParent();
if (null != newEmptyViewParent && newEmptyViewParent instanceof ViewGroup) {
((ViewGroup) newEmptyViewParent).removeView(newEmptyView);
}
// We need to convert any LayoutParams so that it works in our
// FrameLayout
FrameLayout.LayoutParams lp = convertEmptyViewLayoutParams(newEmptyView.getLayoutParams());
if (null != lp) {
refreshableViewWrapper.addView(newEmptyView, lp);
} else {
refreshableViewWrapper.addView(newEmptyView);
}
}
if (mRefreshableView instanceof EmptyViewMethodAccessor) {
((EmptyViewMethodAccessor) mRefreshableView).setEmptyViewInternal(newEmptyView);
} else {
mRefreshableView.setEmptyView(newEmptyView);
}
mEmptyView = newEmptyView;
}
/**
* Pass-through method for {@link PullToRefreshBase#getRefreshableView()
* getRefreshableView()}.
* {@link AdapterView#setOnItemClickListener(OnItemClickListener)
* setOnItemClickListener(listener)}. This is just for convenience!
*
* @param listener - OnItemClickListener to use
*/
public void setOnItemClickListener(OnItemClickListener listener) {
mRefreshableView.setOnItemClickListener(listener);
}
public final void setOnLastItemVisibleListener(OnLastItemVisibleListener listener) {
mOnLastItemVisibleListener = listener;
}
public final void setOnScrollListener(OnScrollListener listener) {
mOnScrollListener = listener;
}
public final void setScrollEmptyView(boolean doScroll) {
mScrollEmptyView = doScroll;
}
/**
* Sets whether an indicator graphic should be displayed when the View is in
* a state where a Pull-to-Refresh can happen. An example of this state is
* when the Adapter View is scrolled to the top and the mode is set to
* {@link Mode#PULL_FROM_START}
*
* @param showIndicator - true if the indicators should be shown.
*/
public void setShowIndicator(boolean showIndicator) {
mShowIndicator = showIndicator;
if (getShowIndicatorInternal()) {
// If we're set to Show Indicator, add/update them
addIndicatorViews();
} else {
// If not, then remove then
removeIndicatorViews();
}
}
;
@Override
protected void onPullToRefresh() {
super.onPullToRefresh();
if (getShowIndicatorInternal()) {
switch (getCurrentMode()) {
case PULL_FROM_END:
mIndicatorIvBottom.pullToRefresh();
break;
case PULL_FROM_START:
mIndicatorIvTop.pullToRefresh();
break;
default:
// NO-OP
break;
}
}
}
protected void onRefreshing(boolean doScroll) {
super.onRefreshing(doScroll);
if (getShowIndicatorInternal()) {
updateIndicatorViewsVisibility();
}
}
@Override
protected void onReleaseToRefresh() {
super.onReleaseToRefresh();
if (getShowIndicatorInternal()) {
switch (getCurrentMode()) {
case PULL_FROM_END:
mIndicatorIvBottom.releaseToRefresh();
break;
case PULL_FROM_START:
mIndicatorIvTop.releaseToRefresh();
break;
default:
// NO-OP
break;
}
}
}
@Override
protected void onReset() {
super.onReset();
if (getShowIndicatorInternal()) {
updateIndicatorViewsVisibility();
}
}
@Override
protected void handleStyledAttributes(TypedArray a) {
// Set Show Indicator to the XML value, or default value
mShowIndicator = a.getBoolean(R.styleable.PullToRefresh_ptrShowIndicator, !isPullToRefreshOverScrollEnabled());
}
protected boolean isReadyForPullStart() {
return isFirstItemVisible();
}
protected boolean isReadyForPullEnd() {
return isLastItemVisible();
}
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
if (null != mEmptyView && !mScrollEmptyView) {
mEmptyView.scrollTo(-l, -t);
}
}
@Override
protected void updateUIForMode() {
super.updateUIForMode();
// Check Indicator Views consistent with new Mode
if (getShowIndicatorInternal()) {
addIndicatorViews();
} else {
removeIndicatorViews();
}
}
private void addIndicatorViews() {
Mode mode = getMode();
FrameLayout refreshableViewWrapper = getRefreshableViewWrapper();
if (mode.showHeaderLoadingLayout() && null == mIndicatorIvTop) {
// If the mode can pull down, and we don't have one set already
mIndicatorIvTop = new IndicatorLayout(getContext(), Mode.PULL_FROM_START);
FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
params.rightMargin = getResources().getDimensionPixelSize(R.dimen.indicator_right_padding);
params.gravity = Gravity.TOP | Gravity.RIGHT;
refreshableViewWrapper.addView(mIndicatorIvTop, params);
} else if (!mode.showHeaderLoadingLayout() && null != mIndicatorIvTop) {
// If we can't pull down, but have a View then remove it
refreshableViewWrapper.removeView(mIndicatorIvTop);
mIndicatorIvTop = null;
}
if (mode.showFooterLoadingLayout() && null == mIndicatorIvBottom) {
// If the mode can pull down, and we don't have one set already
mIndicatorIvBottom = new IndicatorLayout(getContext(), Mode.PULL_FROM_END);
FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
params.rightMargin = getResources().getDimensionPixelSize(R.dimen.indicator_right_padding);
params.gravity = Gravity.BOTTOM | Gravity.RIGHT;
refreshableViewWrapper.addView(mIndicatorIvBottom, params);
} else if (!mode.showFooterLoadingLayout() && null != mIndicatorIvBottom) {
// If we can't pull down, but have a View then remove it
refreshableViewWrapper.removeView(mIndicatorIvBottom);
mIndicatorIvBottom = null;
}
}
private boolean getShowIndicatorInternal() {
return mShowIndicator && isPullToRefreshEnabled();
}
private boolean isFirstItemVisible() {
final Adapter adapter = mRefreshableView.getAdapter();
if (null == adapter || adapter.isEmpty()) {
if (DEBUG) {
Log.d(LOG_TAG, "isFirstItemVisible. Empty View.");
}
return true;
} else {
/**
* This check should really just be:
* mRefreshableView.getFirstVisiblePosition() == 0, but PtRListView
* internally use a HeaderView which messes the positions up. For
* now we'll just add one to account for it and rely on the inner
* condition which checks getTop().
*/
if (mRefreshableView.getFirstVisiblePosition() <= 1) {
final View firstVisibleChild = mRefreshableView.getChildAt(0);
if (firstVisibleChild != null) {
return firstVisibleChild.getTop() >= mRefreshableView.getTop();
}
}
}
return false;
}
private boolean isLastItemVisible() {
final Adapter adapter = mRefreshableView.getAdapter();
if (null == adapter || adapter.isEmpty()) {
if (DEBUG) {
Log.d(LOG_TAG, "isLastItemVisible. Empty View.");
}
return true;
} else {
final int lastItemPosition = mRefreshableView.getCount() - 1;
final int lastVisiblePosition = mRefreshableView.getLastVisiblePosition();
if (DEBUG) {
Log.d(LOG_TAG, "isLastItemVisible. Last Item Position: " + lastItemPosition + " Last Visible Pos: "
+ lastVisiblePosition);
}
/**
* This check should really just be: lastVisiblePosition ==
* lastItemPosition, but PtRListView internally uses a FooterView
* which messes the positions up. For me we'll just subtract one to
* account for it and rely on the inner condition which checks
* getBottom().
*/
if (lastVisiblePosition >= lastItemPosition - 1) {
final int childIndex = lastVisiblePosition - mRefreshableView.getFirstVisiblePosition();
final View lastVisibleChild = mRefreshableView.getChildAt(childIndex);
if (lastVisibleChild != null) {
return lastVisibleChild.getBottom() <= mRefreshableView.getBottom();
}
}
}
return false;
}
private void removeIndicatorViews() {
if (null != mIndicatorIvTop) {
getRefreshableViewWrapper().removeView(mIndicatorIvTop);
mIndicatorIvTop = null;
}
if (null != mIndicatorIvBottom) {
getRefreshableViewWrapper().removeView(mIndicatorIvBottom);
mIndicatorIvBottom = null;
}
}
private void updateIndicatorViewsVisibility() {
if (null != mIndicatorIvTop) {
if (!isRefreshing() && isReadyForPullStart()) {
if (!mIndicatorIvTop.isVisible()) {
mIndicatorIvTop.show();
}
} else {
if (mIndicatorIvTop.isVisible()) {
mIndicatorIvTop.hide();
}
}
}
if (null != mIndicatorIvBottom) {
if (!isRefreshing() && isReadyForPullEnd()) {
if (!mIndicatorIvBottom.isVisible()) {
mIndicatorIvBottom.show();
}
} else {
if (mIndicatorIvBottom.isVisible()) {
mIndicatorIvBottom.hide();
}
}
}
}
}
| |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.course.member;
import java.util.Collections;
import java.util.List;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.EscapeMode;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.table.ColumnDescriptor;
import org.olat.core.gui.components.table.CustomCellRenderer;
import org.olat.core.gui.components.table.CustomRenderColumnDescriptor;
import org.olat.core.gui.components.table.DefaultColumnDescriptor;
import org.olat.core.gui.components.table.StaticColumnDescriptor;
import org.olat.core.gui.components.table.Table;
import org.olat.core.gui.components.table.TableEvent;
import org.olat.core.gui.components.table.TableMultiSelectEvent;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.closablewrapper.CloseableModalController;
import org.olat.core.gui.control.generic.modal.DialogBoxController;
import org.olat.core.gui.control.generic.modal.DialogBoxUIFactory;
import org.olat.core.util.StringHelper;
import org.olat.group.BusinessGroup;
import org.olat.group.BusinessGroupManagedFlag;
import org.olat.group.model.BusinessGroupSelectionEvent;
import org.olat.group.model.SearchBusinessGroupParams;
import org.olat.group.ui.main.AbstractBusinessGroupListController;
import org.olat.group.ui.main.BGAccessControlledCellRenderer;
import org.olat.group.ui.main.BGTableItem;
import org.olat.group.ui.main.BusinessGroupNameColumnDescriptor;
import org.olat.group.ui.main.BusinessGroupTableModelWithType.Cols;
import org.olat.group.ui.main.BusinessGroupViewFilter;
import org.olat.group.ui.main.SelectBusinessGroupController;
import org.olat.group.ui.main.UnmanagedGroupFilter;
import org.olat.repository.RepositoryEntry;
import org.olat.repository.RepositoryEntryManagedFlag;
import org.olat.resource.OLATResource;
/**
*
* @author srosse, stephane.rosse@frentix.com, http://www.frentix.com
*/
public class CourseBusinessGroupListController extends AbstractBusinessGroupListController {
public static String TABLE_ACTION_UNLINK = "tblUnlink";
public static String TABLE_ACTION_MULTI_UNLINK = "tblMultiUnlink";
private final RepositoryEntry re;
private final Link createGroup;
private final Link addGroup;
private DialogBoxController confirmRemoveResource;
private DialogBoxController confirmRemoveMultiResource;
private SelectBusinessGroupController selectController;
public CourseBusinessGroupListController(UserRequest ureq, WindowControl wControl, RepositoryEntry re) {
super(ureq, wControl, "group_list", re);
this.re = re;
boolean managed = RepositoryEntryManagedFlag.isManaged(re, RepositoryEntryManagedFlag.groups);
createGroup = LinkFactory.createButton("group.create", mainVC, this);
createGroup.setVisible(!managed);
mainVC.put("createGroup", createGroup);
addGroup = LinkFactory.createButton("group.add", mainVC, this);
addGroup.setVisible(!managed);
mainVC.put("addGroup", addGroup);
}
@Override
protected void initButtons(UserRequest ureq) {
initButtons(ureq, true);
groupListCtr.setMultiSelect(true);
RepositoryEntry re = (RepositoryEntry)getUserObject();
boolean managed = RepositoryEntryManagedFlag.isManaged(re, RepositoryEntryManagedFlag.groups);
if(!managed) {
groupListCtr.addMultiSelectAction("table.duplicate", TABLE_ACTION_DUPLICATE);
groupListCtr.addMultiSelectAction("table.merge", TABLE_ACTION_MERGE);
}
groupListCtr.addMultiSelectAction("table.users.management", TABLE_ACTION_USERS);
groupListCtr.addMultiSelectAction("table.config", TABLE_ACTION_CONFIG);
groupListCtr.addMultiSelectAction("table.email", TABLE_ACTION_EMAIL);
if(!managed) {
groupListCtr.addMultiSelectAction("table.header.remove", TABLE_ACTION_MULTI_UNLINK);
}
}
@Override
protected int initColumns() {
RepositoryEntry re = (RepositoryEntry)getUserObject();
boolean managed = RepositoryEntryManagedFlag.isManaged(re, RepositoryEntryManagedFlag.groups);
groupListCtr.addColumnDescriptor(new BusinessGroupNameColumnDescriptor(TABLE_ACTION_LAUNCH, getLocale()));
groupListCtr.addColumnDescriptor(false, new DefaultColumnDescriptor(Cols.key.i18n(), Cols.key.ordinal(), null, getLocale()));
if(groupModule.isManagedBusinessGroups()) {
groupListCtr.addColumnDescriptor(false, new DefaultColumnDescriptor(Cols.externalId.i18n(), Cols.externalId.ordinal(), null, getLocale()));
}
groupListCtr.addColumnDescriptor(false, new DefaultColumnDescriptor(Cols.description.i18n(), Cols.description.ordinal(), null, getLocale()));
groupListCtr.addColumnDescriptor(new ResourcesColumnDescriptor(this, mainVC, getTranslator()));
groupListCtr.addColumnDescriptor(new DefaultColumnDescriptor(Cols.tutorsCount.i18n(), Cols.tutorsCount.ordinal(), null, getLocale()));
groupListCtr.addColumnDescriptor(new DefaultColumnDescriptor(Cols.participantsCount.i18n(), Cols.participantsCount.ordinal(), null, getLocale()));
DefaultColumnDescriptor freeplacesCol = new DefaultColumnDescriptor(Cols.freePlaces.i18n(), Cols.freePlaces.ordinal(), TABLE_ACTION_LAUNCH, getLocale());
freeplacesCol.setEscapeHtml(EscapeMode.none);
groupListCtr.addColumnDescriptor(freeplacesCol);
groupListCtr.addColumnDescriptor(new DefaultColumnDescriptor(Cols.waitingListCount.i18n(), Cols.waitingListCount.ordinal(), null, getLocale()));
CustomCellRenderer acRenderer = new BGAccessControlledCellRenderer();
groupListCtr.addColumnDescriptor(new CustomRenderColumnDescriptor(Cols.accessTypes.i18n(), Cols.accessTypes.ordinal(), null, getLocale(), ColumnDescriptor.ALIGNMENT_LEFT, acRenderer));
groupListCtr.addColumnDescriptor(new StaticColumnDescriptor(TABLE_ACTION_EDIT, "table.header.edit", translate("table.header.edit")));
if(!managed) {
groupListCtr.addColumnDescriptor(new RemoveActionColumnDescriptor("table.header.remove", Cols.wrapper.ordinal(), getTranslator()));
}
return 11;
}
@Override
protected void event(UserRequest ureq, Component source, Event event) {
if(source == createGroup) {
doCreate(ureq, getWindowControl(), re);
} else if (source == addGroup) {
doSelectGroups(ureq);
} else {
super.event(ureq, source, event);
}
}
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if(event instanceof BusinessGroupSelectionEvent) {
BusinessGroupSelectionEvent selectionEvent = (BusinessGroupSelectionEvent)event;
List<BusinessGroup> selectedGroups = selectionEvent.getGroups();
cmc.deactivate();
cleanUpPopups();
addGroupsToCourse(selectedGroups);
} else if (source == groupListCtr) {
if (event.getCommand().equals(Table.COMMANDLINK_ROWACTION_CLICKED)) {
TableEvent te = (TableEvent) event;
String actionid = te.getActionId();
if(TABLE_ACTION_UNLINK.equals(actionid)) {
Long businessGroupKey = groupListModel.getObject(te.getRowId()).getBusinessGroupKey();
BusinessGroup group = businessGroupService.loadBusinessGroup(businessGroupKey);
String text = getTranslator().translate("group.remove", new String[] {
StringHelper.escapeHtml(group.getName()),
StringHelper.escapeHtml(re.getDisplayname())
});
confirmRemoveResource = activateYesNoDialog(ureq, null, text, confirmRemoveResource);
confirmRemoveResource.setUserObject(group);
}
} else if (event instanceof TableMultiSelectEvent) {
TableMultiSelectEvent te = (TableMultiSelectEvent)event;
if(TABLE_ACTION_MULTI_UNLINK.equals(te.getAction())) {
List<BGTableItem> selectedItems = groupListModel.getObjects(te.getSelection());
if(selectedItems.isEmpty()) {
showWarning("error.select.one");
} else {
doConfirmRemove(ureq, selectedItems);
}
}
}
} else if (source == confirmRemoveResource) {
if (DialogBoxUIFactory.isYesEvent(event)) { // yes case
BusinessGroup group = (BusinessGroup)confirmRemoveResource.getUserObject();
doRemoveBusinessGroups(Collections.singletonList(group));
}
} else if (source == confirmRemoveMultiResource) {
if (DialogBoxUIFactory.isYesEvent(event)) { // yes case
@SuppressWarnings("unchecked")
List<BGTableItem> selectedItems = (List<BGTableItem>)confirmRemoveMultiResource.getUserObject();
List<BusinessGroup> groups = toBusinessGroups(ureq, selectedItems, false);
doRemoveBusinessGroups(groups);
}
}
super.event(ureq, source, event);
}
private void doConfirmRemove(UserRequest ureq, List<BGTableItem> selectedItems) {
StringBuilder sb = new StringBuilder();
StringBuilder managedSb = new StringBuilder();
for(BGTableItem item:selectedItems) {
String gname = item.getBusinessGroupName() == null ? "???" : StringHelper.escapeHtml(item.getBusinessGroupName());
if(BusinessGroupManagedFlag.isManaged(item.getManagedFlags(), BusinessGroupManagedFlag.resources)) {
if(managedSb.length() > 0) managedSb.append(", ");
managedSb.append(gname);
} else {
if(sb.length() > 0) sb.append(", ");
sb.append(gname);
}
}
if(managedSb.length() > 0) {
showWarning("error.managed.group", managedSb.toString());
} else {
String text = getTranslator().translate("group.remove", new String[] {
sb.toString(),
StringHelper.escapeHtml(re.getDisplayname())
});
confirmRemoveMultiResource = activateYesNoDialog(ureq, null, text, confirmRemoveResource);
confirmRemoveMultiResource.setUserObject(selectedItems);
}
}
@Override
protected void cleanUpPopups() {
super.cleanUpPopups();
removeAsListenerAndDispose(selectController);
selectController = null;
}
private void doRemoveBusinessGroups(List<BusinessGroup> groups) {
businessGroupService.removeResourceFrom(groups, re);
reloadModel();
}
protected void doSelectGroups(UserRequest ureq) {
removeAsListenerAndDispose(selectController);
BusinessGroupViewFilter filter = new UnmanagedGroupFilter(BusinessGroupManagedFlag.resources);
selectController = new SelectBusinessGroupController(ureq, getWindowControl(), filter);
listenTo(selectController);
cmc = new CloseableModalController(getWindowControl(), translate("close"),
selectController.getInitialComponent(), true, translate("select.group"));
cmc.activate();
listenTo(cmc);
}
protected void addGroupsToCourse(List<BusinessGroup> groups) {
List<RepositoryEntry> resources = Collections.singletonList(re);
businessGroupService.addResourcesTo(groups, resources);
reloadModel();
mainVC.setDirty(true);
}
@Override
protected void reloadModel() {
updateTableModel(new SearchBusinessGroupParams(), false);
}
@Override
protected OLATResource getResource() {
return re.getOlatResource();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/services/customer_user_access_invitation_service.proto
package com.google.ads.googleads.v8.services;
/**
* <pre>
* Request message for
* [CustomerUserAccessInvitation.GetCustomerUserAccessInvitation][]
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest}
*/
public final class GetCustomerUserAccessInvitationRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest)
GetCustomerUserAccessInvitationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetCustomerUserAccessInvitationRequest.newBuilder() to construct.
private GetCustomerUserAccessInvitationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetCustomerUserAccessInvitationRequest() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetCustomerUserAccessInvitationRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetCustomerUserAccessInvitationRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CustomerUserAccessInvitationServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerUserAccessInvitationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CustomerUserAccessInvitationServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerUserAccessInvitationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.class, com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest other = (com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [CustomerUserAccessInvitation.GetCustomerUserAccessInvitation][]
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest)
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CustomerUserAccessInvitationServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerUserAccessInvitationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CustomerUserAccessInvitationServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerUserAccessInvitationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.class, com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.services.CustomerUserAccessInvitationServiceProto.internal_static_google_ads_googleads_v8_services_GetCustomerUserAccessInvitationRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest build() {
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest buildPartial() {
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest result = new com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest(this);
result.resourceName_ = resourceName_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest) {
return mergeFrom((com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest other) {
if (other == com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Required. Resource name of the access invitation.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest)
private static final com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest();
}
public static com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetCustomerUserAccessInvitationRequest>
PARSER = new com.google.protobuf.AbstractParser<GetCustomerUserAccessInvitationRequest>() {
@java.lang.Override
public GetCustomerUserAccessInvitationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetCustomerUserAccessInvitationRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetCustomerUserAccessInvitationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetCustomerUserAccessInvitationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCustomerUserAccessInvitationRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.sleuth.autoconfig.brave.instrument.web.client;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import brave.Span;
import brave.Tracer;
import brave.baggage.BaggagePropagation;
import brave.handler.SpanHandler;
import brave.propagation.B3Propagation;
import brave.sampler.Sampler;
import brave.test.TestSpanHandler;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.cassandra.CassandraAutoConfiguration;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.data.r2dbc.R2dbcDataAutoConfiguration;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.boot.autoconfigure.r2dbc.R2dbcAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.cloud.gateway.config.GatewayAutoConfiguration;
import org.springframework.cloud.gateway.config.GatewayClassPathWarningAutoConfiguration;
import org.springframework.cloud.sleuth.DisableSecurity;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.HttpHeaders;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import static brave.Span.Kind.CLIENT;
import static brave.propagation.B3Propagation.Format.SINGLE_NO_PARENT;
import static org.assertj.core.api.BDDAssertions.then;
@SpringBootTest(classes = WebClientTests.TestConfiguration.class,
webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT,
properties = { "spring.sleuth.web.servlet.enabled=false", "spring.application.name=fooservice",
"spring.sleuth.web.client.skip-pattern=/skip.*" })
@DirtiesContext
public class WebClientTests {
@Autowired
HttpClientBuilder httpClientBuilder; // #845
@Autowired
HttpAsyncClientBuilder httpAsyncClientBuilder; // #845
@Autowired
TestSpanHandler spans;
@Autowired
Tracer tracer;
@LocalServerPort
int port;
@Autowired
FooController fooController;
@AfterEach
@BeforeEach
public void close() {
this.spans.clear();
this.fooController.clear();
}
@Test
@SuppressWarnings("unchecked")
public void shouldAttachTraceIdWhenCallingAnotherServiceForHttpClient() throws Exception {
then(this.spans).isEmpty();
Span span = this.tracer.nextSpan().name("foo").start();
try (Tracer.SpanInScope ws = this.tracer.withSpanInScope(span)) {
String response = this.httpClientBuilder.build().execute(new HttpGet("http://localhost:" + this.port),
new BasicResponseHandler());
then(response).isNotEmpty();
}
then(this.tracer.currentSpan()).isNull();
then(this.spans).isNotEmpty().extracting("traceId", String.class).containsOnly(span.context().traceIdString());
then(this.spans.spans().stream().map(s -> s.kind().name()).collect(Collectors.toList())).contains("CLIENT");
}
@Test
@SuppressWarnings("unchecked")
public void shouldAttachTraceIdWhenCallingAnotherServiceForAsyncHttpClient() throws Exception {
Span span = this.tracer.nextSpan().name("foo").start();
CloseableHttpAsyncClient client = this.httpAsyncClientBuilder.build();
try (Tracer.SpanInScope ws = this.tracer.withSpanInScope(span)) {
client.start();
Future<HttpResponse> future = client.execute(new HttpGet("http://localhost:" + this.port),
new FutureCallback<HttpResponse>() {
@Override
public void completed(HttpResponse result) {
}
@Override
public void failed(Exception ex) {
}
@Override
public void cancelled() {
}
});
then(future.get()).isNotNull();
}
finally {
client.close();
}
then(this.tracer.currentSpan()).isNull();
then(this.spans).isNotEmpty().extracting("traceId", String.class).containsOnly(span.context().traceIdString());
then(this.spans.spans().stream().map(s -> s.kind().name()).collect(Collectors.toList())).contains("CLIENT");
}
@Configuration(proxyBeanMethods = false)
@EnableAutoConfiguration(exclude = { GatewayClassPathWarningAutoConfiguration.class, GatewayAutoConfiguration.class,
R2dbcAutoConfiguration.class, R2dbcDataAutoConfiguration.class, RedisAutoConfiguration.class,
CassandraAutoConfiguration.class, MongoAutoConfiguration.class, MongoDataAutoConfiguration.class })
@DisableSecurity
public static class TestConfiguration {
@Bean
BaggagePropagation.FactoryBuilder baggagePropagationFactoryBuilder() {
// Use b3 single format as it is less verbose
return BaggagePropagation.newFactoryBuilder(
B3Propagation.newFactoryBuilder().injectFormat(CLIENT, SINGLE_NO_PARENT).build());
}
@Bean
FooController fooController() {
return new FooController();
}
@Bean
Sampler testSampler() {
return Sampler.ALWAYS_SAMPLE;
}
@Bean
SpanHandler testSpanHandler() {
return new TestSpanHandler();
}
}
@RestController
public static class FooController {
Span span;
@RequestMapping("/")
public Map<String, String> home(@RequestHeader HttpHeaders headers) {
Map<String, String> map = new HashMap<>();
for (String key : headers.keySet()) {
map.put(key, headers.getFirst(key));
}
return map;
}
public Span getSpan() {
return this.span;
}
public void clear() {
this.span = null;
}
}
}
| |
package com.fragmentime.markdownj.analyzer;
import com.fragmentime.markdownj.elements.Element;
import com.fragmentime.markdownj.elements.text.Text;
import com.fragmentime.markdownj.logger.Log;
import java.util.*;
/**
* Created by Beancan on 2016/11/2.
*/
public class TextAnalyser extends Analyzer {
public boolean belongsToAnalyzer(Element element) {
return true;
}
protected int getWeight() {
return ANALYZER_TEXT;
}
public boolean analyze(Element root) {
if (root == null || root.getData().size() == 0) {
return false;
}
StringBuilder sb = new StringBuilder();
Iterator<String> it = root.getData().iterator();
while (it.hasNext()) {
sb.append(it.next());
if (it.hasNext()) {
sb.append(" ");
}
}
String text = sb.toString();
if (text.trim().length() == 0) {
return false;
}
Element e = new TextAnalyser(sb.toString()).analyze();
root.setRight(e);
e.setParent(root);
return false;
}
private static class TextIndexer implements Comparable<TextIndexer> {
private TextIndexer parent;
private TextIndexer left;
private TextIndexer right;
private final String type;
private final int start;
private final int end;
public TextIndexer(String type, int start, int end) {
this.type = type;
this.start = start;
this.end = end;
}
public int compareTo(TextIndexer o) {
return this.start - o.start;
}
@Override
public String toString() {
return type + ": " + start + "-" + end;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof TextIndexer) {
TextIndexer b = (TextIndexer) obj;
return this.type.equals(b.type) && this.start == b.start && this.end == b.end;
} else {
return false;
}
}
}
private List<TextIndexer> indexers = new ArrayList<>();
private String text;
public TextAnalyser(String text) {
this.text = text;
}
public TextAnalyser() {
}
public Element analyze(String text) {
return null;
}
private TextIndexer analyzeElementsTypes() {
// analyze types
List<Analyser> analysers = Arrays.asList(new BlockAnalyser(), new ImageAnalyser(), new LinkAnalyser(), new BoldAnalyser(), new ItalicAnalyser());
StringBuffer multiplyStrings = new StringBuffer(this.text);
for (Analyser item : analysers) {
multiplyStrings = analyzeAndMultiply(multiplyStrings, item, this.indexers);
}
Collections.sort(this.indexers);
for (TextIndexer item : this.indexers) {
Log.log(item.type + ": " + item.start + "-" + item.end);
}
// build render tree and fill blanks
ReactTree rt = new ReactTree(new TextIndexer(Element.TEXT, 0, this.text.length()), this.text);
rt.buildReactTree(this.indexers);
rt.fillBlanks();
return rt.root;
}
private Element buildElementTree(TextIndexer ti) {
if (ti == null) {
return null;
}
Text t = new Text();
t.append(this.text.substring(ti.start, ti.end));
t.setType(ti.type);
t.setAcceptAnalyzed(false);
t.setRight(buildElementTree(ti.right));
t.setLeft(buildElementTree(ti.left));
return t;
}
public Element analyze() {
TextIndexer reactTree = this.analyzeElementsTypes();
Element tree = buildElementTree(reactTree);
tree.setAcceptAnalyzed(false);
return tree;
}
private static String generateString(int length) {
StringBuffer sb = new StringBuffer();
while (length > 0) {
sb.append("A");
length--;
}
return sb.toString();
}
private static StringBuffer analyzeAndMultiply(StringBuffer origin, Analyser analyser, List<TextIndexer> indexers) {
String textA = origin.toString();
if (!analyser.has(textA)) {
return origin;
}
StringBuffer multiplyStrings = new StringBuffer("");
List<String> txts = analyser.split(textA);
int leftIndex = 0;
for (String item : txts) {
if (analyser.has(item)) {
if (analyser instanceof BoldAnalyser) {
// fill dead zone of bold & italic
StringBuffer boldString = new StringBuffer(origin.substring(leftIndex + 2, leftIndex + item.length() - 2));
List<TextIndexer> subBoldIndexers = new ArrayList<>();
List<Analyser> analysers = Arrays.asList(new BlockAnalyser(), new ImageAnalyser(), new LinkAnalyser(), new BoldAnalyser(), new ItalicAnalyser());
for (Analyser subitem : analysers) {
boldString = analyzeAndMultiply(boldString, subitem, subBoldIndexers);
}
if (subBoldIndexers.size() > 0) {
// there're sub elements in bold string
for (TextIndexer tiItem : subBoldIndexers) {
indexers.add(new TextIndexer(tiItem.type, leftIndex + 2 + tiItem.start, leftIndex + 2 + tiItem.end));
}
}
}
indexers.add(new TextIndexer(analyser.getType(), leftIndex, leftIndex + item.length()));
multiplyStrings.append(generateString(item.length()));
} else {
multiplyStrings.append(item);
}
leftIndex += item.length();
}
return multiplyStrings;
}
private static class ReactTree {
private TextIndexer root;
private final String context;
public ReactTree(TextIndexer root, String context) {
this.root = root;
this.context = context;
}
private TextIndexer buildReactTree(List<TextIndexer> nodes) {
if (nodes == null || nodes.size() == 0) {
return null;
}
for (TextIndexer item : nodes) {
insertTextIndexer(item);
}
return this.root;
}
private void fillBlanks() {
fillBlanks(this.root);
}
private void fillBlanks(TextIndexer node) {
if (node == null) {
return;
}
if (node.right != null) {
int minStart = node.right.start;
int maxEnd = node.right.end;
TextIndexer current = node.right, lastLeft = current;
while (current != null) {
{
// fill middle and find the max end
int currentEnd = current.end;
TextIndexer ti = current.left;
if (ti == null) {
maxEnd = current.end;
break;
}
int nextStart = ti.start;
if (currentEnd + 1 < nextStart) {
TextIndexer filler = new TextIndexer(Element.TEXT, currentEnd + 1, nextStart - 1);
filler.parent = current;
filler.left = ti;
current.left = filler;
ti.parent = filler;
current = filler;
}
}
maxEnd = current.end;
lastLeft = current;
current = current.left;
}
if (node.start < minStart) {
// fill start
int fillStartStart = node.start;
if (Text.TEXT_ITALIC.equals(node.type)) {
fillStartStart += 1;
} else if (Text.TEXT_BOLD.equals(node.type)) {
fillStartStart += 2;
} else if (Text.TEXT_LINK.equals(node.type)) {
fillStartStart += 1;
}
TextIndexer filler = new TextIndexer(Element.TEXT, fillStartStart, minStart);
filler.parent = node;
filler.left = node.right;
node.right.parent = filler;
node.right = filler;
}
if (maxEnd < node.end) {
// fill end
int fillEndEnd = node.end;
if (Text.TEXT_ITALIC.equals(node.type)) {
fillEndEnd -= 1;
} else if (Text.TEXT_BOLD.equals(node.type)) {
fillEndEnd -= 2;
} else if (Text.TEXT_LINK.equals(node.type)) {
String tmp = this.context.substring(maxEnd, node.end);
int linkTextEnd = tmp.indexOf(']');
fillEndEnd = maxEnd + linkTextEnd;
}
TextIndexer filler = new TextIndexer(Element.TEXT, maxEnd, fillEndEnd);
filler.parent = lastLeft;
lastLeft.left = filler;
}
}
fillBlanks(node.right);
fillBlanks(node.left);
}
private void insertTextIndexer(TextIndexer node) {
if (root == null) {
root = node;
return;
}
TextIndexer current = root;
while (true) {
if (node.end <= current.end) {
if (current.right == null) {
current.right = node;
node.parent = current;
break;
} else {
current = current.right;
}
} else {
if (current.left == null) {
current.left = node;
node.parent = current;
break;
} else {
current = current.left;
}
}
}
}
}
private interface Analyser {
List<String> split(String text);
boolean has(String text);
String getType();
}
private static class BlockAnalyser implements Analyser {
public List<String> split(String text) {
return Text.splitBlock(text);
}
public boolean has(String text) {
return Text.hasBlock(text);
}
public String getType() {
return Text.TEXT_BLOCK;
}
}
private static class ImageAnalyser implements Analyser {
public List<String> split(String text) {
return Text.splitImage(text);
}
public boolean has(String text) {
return Text.hasImage(text);
}
public String getType() {
return Text.TEXT_IMAGE;
}
}
private static class LinkAnalyser implements Analyser {
public List<String> split(String text) {
return Text.splitLink(text);
}
public boolean has(String text) {
return Text.hasLink(text);
}
public String getType() {
return Text.TEXT_LINK;
}
}
private static class ItalicAnalyser implements Analyser {
public List<String> split(String text) {
return Text.splitItalic(text);
}
public boolean has(String text) {
return Text.hasItalic(text);
}
public String getType() {
return Text.TEXT_ITALIC;
}
}
private static class BoldAnalyser implements Analyser {
public List<String> split(String text) {
return Text.splitBold(text);
}
public boolean has(String text) {
return Text.hasBold(text);
}
public String getType() {
return Text.TEXT_BOLD;
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/datacatalog.proto
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Request message for
* [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.CreateTagTemplateRequest}
*/
public final class CreateTagTemplateRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.CreateTagTemplateRequest)
CreateTagTemplateRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateTagTemplateRequest.newBuilder() to construct.
private CreateTagTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateTagTemplateRequest() {
parent_ = "";
tagTemplateId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateTagTemplateRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CreateTagTemplateRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18:
{
com.google.cloud.datacatalog.v1.TagTemplate.Builder subBuilder = null;
if (tagTemplate_ != null) {
subBuilder = tagTemplate_.toBuilder();
}
tagTemplate_ =
input.readMessage(
com.google.cloud.datacatalog.v1.TagTemplate.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tagTemplate_);
tagTemplate_ = subBuilder.buildPartial();
}
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
tagTemplateId_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateTagTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateTagTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.class,
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TAG_TEMPLATE_ID_FIELD_NUMBER = 3;
private volatile java.lang.Object tagTemplateId_;
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The tagTemplateId.
*/
@java.lang.Override
public java.lang.String getTagTemplateId() {
java.lang.Object ref = tagTemplateId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagTemplateId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for tagTemplateId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTagTemplateIdBytes() {
java.lang.Object ref = tagTemplateId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagTemplateId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TAG_TEMPLATE_FIELD_NUMBER = 2;
private com.google.cloud.datacatalog.v1.TagTemplate tagTemplate_;
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagTemplate field is set.
*/
@java.lang.Override
public boolean hasTagTemplate() {
return tagTemplate_ != null;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagTemplate.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.TagTemplate getTagTemplate() {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.TagTemplateOrBuilder getTagTemplateOrBuilder() {
return getTagTemplate();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (tagTemplate_ != null) {
output.writeMessage(2, getTagTemplate());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tagTemplateId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tagTemplateId_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (tagTemplate_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTagTemplate());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tagTemplateId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tagTemplateId_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.CreateTagTemplateRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest other =
(com.google.cloud.datacatalog.v1.CreateTagTemplateRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getTagTemplateId().equals(other.getTagTemplateId())) return false;
if (hasTagTemplate() != other.hasTagTemplate()) return false;
if (hasTagTemplate()) {
if (!getTagTemplate().equals(other.getTagTemplate())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + TAG_TEMPLATE_ID_FIELD_NUMBER;
hash = (53 * hash) + getTagTemplateId().hashCode();
if (hasTagTemplate()) {
hash = (37 * hash) + TAG_TEMPLATE_FIELD_NUMBER;
hash = (53 * hash) + getTagTemplate().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.CreateTagTemplateRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.CreateTagTemplateRequest)
com.google.cloud.datacatalog.v1.CreateTagTemplateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateTagTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateTagTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.class,
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
tagTemplateId_ = "";
if (tagTemplateBuilder_ == null) {
tagTemplate_ = null;
} else {
tagTemplate_ = null;
tagTemplateBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_CreateTagTemplateRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateTagTemplateRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateTagTemplateRequest build() {
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateTagTemplateRequest buildPartial() {
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest result =
new com.google.cloud.datacatalog.v1.CreateTagTemplateRequest(this);
result.parent_ = parent_;
result.tagTemplateId_ = tagTemplateId_;
if (tagTemplateBuilder_ == null) {
result.tagTemplate_ = tagTemplate_;
} else {
result.tagTemplate_ = tagTemplateBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.CreateTagTemplateRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1.CreateTagTemplateRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.CreateTagTemplateRequest other) {
if (other == com.google.cloud.datacatalog.v1.CreateTagTemplateRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (!other.getTagTemplateId().isEmpty()) {
tagTemplateId_ = other.tagTemplateId_;
onChanged();
}
if (other.hasTagTemplate()) {
mergeTagTemplate(other.getTagTemplate());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.datacatalog.v1.CreateTagTemplateRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.datacatalog.v1.CreateTagTemplateRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project and the template location
* [region](https://cloud.google.com/data-catalog/docs/concepts/regions).
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private java.lang.Object tagTemplateId_ = "";
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The tagTemplateId.
*/
public java.lang.String getTagTemplateId() {
java.lang.Object ref = tagTemplateId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagTemplateId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for tagTemplateId.
*/
public com.google.protobuf.ByteString getTagTemplateIdBytes() {
java.lang.Object ref = tagTemplateId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagTemplateId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The tagTemplateId to set.
* @return This builder for chaining.
*/
public Builder setTagTemplateId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tagTemplateId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTagTemplateId() {
tagTemplateId_ = getDefaultInstance().getTagTemplateId();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID of the tag template to create.
* The ID must contain only lowercase letters (a-z), numbers (0-9),
* or underscores (_), and must start with a letter or underscore.
* The maximum size is 64 bytes when encoded in UTF-8.
* </pre>
*
* <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for tagTemplateId to set.
* @return This builder for chaining.
*/
public Builder setTagTemplateIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tagTemplateId_ = value;
onChanged();
return this;
}
private com.google.cloud.datacatalog.v1.TagTemplate tagTemplate_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.TagTemplate,
com.google.cloud.datacatalog.v1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1.TagTemplateOrBuilder>
tagTemplateBuilder_;
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagTemplate field is set.
*/
public boolean hasTagTemplate() {
return tagTemplateBuilder_ != null || tagTemplate_ != null;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagTemplate.
*/
public com.google.cloud.datacatalog.v1.TagTemplate getTagTemplate() {
if (tagTemplateBuilder_ == null) {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
} else {
return tagTemplateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTagTemplate(com.google.cloud.datacatalog.v1.TagTemplate value) {
if (tagTemplateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tagTemplate_ = value;
onChanged();
} else {
tagTemplateBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTagTemplate(
com.google.cloud.datacatalog.v1.TagTemplate.Builder builderForValue) {
if (tagTemplateBuilder_ == null) {
tagTemplate_ = builderForValue.build();
onChanged();
} else {
tagTemplateBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTagTemplate(com.google.cloud.datacatalog.v1.TagTemplate value) {
if (tagTemplateBuilder_ == null) {
if (tagTemplate_ != null) {
tagTemplate_ =
com.google.cloud.datacatalog.v1.TagTemplate.newBuilder(tagTemplate_)
.mergeFrom(value)
.buildPartial();
} else {
tagTemplate_ = value;
}
onChanged();
} else {
tagTemplateBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTagTemplate() {
if (tagTemplateBuilder_ == null) {
tagTemplate_ = null;
onChanged();
} else {
tagTemplate_ = null;
tagTemplateBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1.TagTemplate.Builder getTagTemplateBuilder() {
onChanged();
return getTagTemplateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1.TagTemplateOrBuilder getTagTemplateOrBuilder() {
if (tagTemplateBuilder_ != null) {
return tagTemplateBuilder_.getMessageOrBuilder();
} else {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
}
/**
*
*
* <pre>
* Required. The tag template to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.TagTemplate,
com.google.cloud.datacatalog.v1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1.TagTemplateOrBuilder>
getTagTemplateFieldBuilder() {
if (tagTemplateBuilder_ == null) {
tagTemplateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.TagTemplate,
com.google.cloud.datacatalog.v1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1.TagTemplateOrBuilder>(
getTagTemplate(), getParentForChildren(), isClean());
tagTemplate_ = null;
}
return tagTemplateBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.CreateTagTemplateRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.CreateTagTemplateRequest)
private static final com.google.cloud.datacatalog.v1.CreateTagTemplateRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.CreateTagTemplateRequest();
}
public static com.google.cloud.datacatalog.v1.CreateTagTemplateRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateTagTemplateRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateTagTemplateRequest>() {
@java.lang.Override
public CreateTagTemplateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateTagTemplateRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateTagTemplateRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateTagTemplateRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.CreateTagTemplateRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sampletvinput.syncadapter;
import android.accounts.Account;
import android.content.AbstractThreadedSyncAdapter;
import android.content.ContentProviderClient;
import android.content.ContentProviderOperation;
import android.content.ContentUris;
import android.content.Context;
import android.content.OperationApplicationException;
import android.content.SyncResult;
import android.media.tv.TvContract;
import android.net.Uri;
import android.os.Bundle;
import android.os.RemoteException;
import android.util.Log;
import android.util.LongSparseArray;
import com.example.android.sampletvinput.TvContractUtils;
import com.example.android.sampletvinput.data.Program;
import com.example.android.sampletvinput.rich.RichFeedUtil;
import com.example.android.sampletvinput.xmltv.XmlTvParser;
import java.util.ArrayList;
import java.util.List;
/**
* A SyncAdapter implementation which updates program info periodically.
*/
class SyncAdapter extends AbstractThreadedSyncAdapter {
public static final String TAG = "SyncAdapter";
public static final String BUNDLE_KEY_INPUT_ID = "bundle_key_input_id";
public static final String BUNDLE_KEY_CURRENT_PROGRAM_ONLY = "bundle_key_current_program_only";
public static final long FULL_SYNC_FREQUENCY_SEC = 60 * 60 * 24; // daily
private static final int FULL_SYNC_WINDOW_SEC = 60 * 60 * 24 * 14; // 2 weeks
private static final int SHORT_SYNC_WINDOW_SEC = 60 * 60; // 1 hour
private static final int BATCH_OPERATION_COUNT = 100;
private final Context mContext;
public SyncAdapter(Context context, boolean autoInitialize) {
super(context, autoInitialize);
mContext = context;
}
public SyncAdapter(Context context, boolean autoInitialize, boolean allowParallelSyncs) {
super(context, autoInitialize, allowParallelSyncs);
mContext = context;
}
/**
* Called periodically by the system in every {@code FULL_SYNC_FREQUENCY_SEC}.
*/
@Override
public void onPerformSync(Account account, Bundle extras, String authority,
ContentProviderClient provider, SyncResult syncResult) {
Log.d(TAG, "onPerformSync(" + account + ", " + authority + ", " + extras + ")");
String inputId = extras.getString(SyncAdapter.BUNDLE_KEY_INPUT_ID);
if (inputId == null) {
return;
}
XmlTvParser.TvListing listings = RichFeedUtil.getRichTvListings(mContext);
LongSparseArray<XmlTvParser.XmlTvChannel> channelMap = TvContractUtils.buildChannelMap(
mContext.getContentResolver(), inputId, listings.channels);
boolean currentProgramOnly = extras.getBoolean(
SyncAdapter.BUNDLE_KEY_CURRENT_PROGRAM_ONLY, false);
long startMs = System.currentTimeMillis();
long endMs = startMs + FULL_SYNC_WINDOW_SEC * 1000;
if (currentProgramOnly) {
// This is requested from the setup activity, in this case, users don't need to wait for
// the full sync. Sync the current programs first and do the full sync later in the
// background.
endMs = startMs + SHORT_SYNC_WINDOW_SEC * 1000;
}
for (int i = 0; i < channelMap.size(); ++i) {
Uri channelUri = TvContract.buildChannelUri(channelMap.keyAt(i));
List<Program> programs = getPrograms(channelUri, channelMap.valueAt(i),
listings.programs, startMs, endMs);
updatePrograms(channelUri, programs);
}
}
/**
* Returns a list of programs for the given time range.
*
* @param channelUri The channel where the program info will be added.
* @param channel The {@link XmlTvParser.XmlTvChannel} for the programs to return.
* @param programs The feed fetched from cloud.
* @param startTimeMs The start time of the range requested.
* @param endTimeMs The end time of the range requested.
*/
private List<Program> getPrograms(Uri channelUri, XmlTvParser.XmlTvChannel channel,
List<XmlTvParser.XmlTvProgram> programs, long startTimeMs, long endTimeMs) {
if (startTimeMs > endTimeMs) {
throw new IllegalArgumentException();
}
List<XmlTvParser.XmlTvProgram> channelPrograms = new ArrayList<>();
for (XmlTvParser.XmlTvProgram program : programs) {
if (program.channelId.equals(channel.id)) {
channelPrograms.add(program);
}
}
List<Program> programForGivenTime = new ArrayList<>();
if (!channel.repeatPrograms) {
for (XmlTvParser.XmlTvProgram program : channelPrograms) {
if (program.startTimeUtcMillis <= endTimeMs
&& program.endTimeUtcMillis >= startTimeMs) {
programForGivenTime.add(new Program.Builder()
.setChannelId(ContentUris.parseId(channelUri))
.setTitle(program.title)
.setDescription(program.description)
.setContentRatings(XmlTvParser.xmlTvRatingToTvContentRating(
program.rating))
.setCanonicalGenres(program.category)
.setPosterArtUri(program.icon.src)
.setInternalProviderData(TvContractUtils.
convertVideoInfoToInternalProviderData(
program.videoType, program.videoSrc))
.setStartTimeUtcMillis(program.startTimeUtcMillis)
.setEndTimeUtcMillis(program.endTimeUtcMillis)
.build()
);
}
}
return programForGivenTime;
}
// If repeat-programs is on, schedule the programs sequentially in a loop. To make every
// device play the same program in a given channel and time, we assumes the loop started
// from the epoch time.
long totalDurationMs = 0;
for (XmlTvParser.XmlTvProgram program : channelPrograms) {
totalDurationMs += program.getDurationMillis();
}
long programStartTimeMs = startTimeMs - startTimeMs % totalDurationMs;
int i = 0;
final int programCount = channelPrograms.size();
while (programStartTimeMs < endTimeMs) {
XmlTvParser.XmlTvProgram programInfo = channelPrograms.get(i++ % programCount);
long programEndTimeMs = programStartTimeMs + programInfo.getDurationMillis();
if (programEndTimeMs < startTimeMs) {
programStartTimeMs = programEndTimeMs;
continue;
}
programForGivenTime.add(new Program.Builder()
.setChannelId(ContentUris.parseId(channelUri))
.setTitle(programInfo.title)
.setDescription(programInfo.description)
.setContentRatings(XmlTvParser.xmlTvRatingToTvContentRating(
programInfo.rating))
.setCanonicalGenres(programInfo.category)
.setPosterArtUri(programInfo.icon.src)
// NOTE: {@code COLUMN_INTERNAL_PROVIDER_DATA} is a private field where
// TvInputService can store anything it wants. Here, we store video type and
// video URL so that TvInputService can play the video later with this field.
.setInternalProviderData(TvContractUtils.convertVideoInfoToInternalProviderData(
programInfo.videoType, programInfo.videoSrc))
.setStartTimeUtcMillis(programStartTimeMs)
.setEndTimeUtcMillis(programEndTimeMs)
.build()
);
programStartTimeMs = programEndTimeMs;
}
return programForGivenTime;
}
/**
* Updates the system database, TvProvider, with the given programs.
*
* <p>If there is any overlap between the given and existing programs, the existing ones
* will be updated with the given ones if they have the same title or replaced.
*
* @param channelUri The channel where the program info will be added.
* @param newPrograms A list of {@link Program} instances which includes program
* information.
*/
private void updatePrograms(Uri channelUri, List<Program> newPrograms) {
final int fetchedProgramsCount = newPrograms.size();
if (fetchedProgramsCount == 0) {
return;
}
List<Program> oldPrograms = TvContractUtils.getPrograms(mContext.getContentResolver(),
channelUri);
Program firstNewProgram = newPrograms.get(0);
int oldProgramsIndex = 0;
int newProgramsIndex = 0;
// Skip the past programs. They will be automatically removed by the system.
for (Program program : oldPrograms) {
oldProgramsIndex++;
if(program.getEndTimeUtcMillis() > firstNewProgram.getStartTimeUtcMillis()) {
break;
}
}
// Compare the new programs with old programs one by one and update/delete the old one or
// insert new program if there is no matching program in the database.
ArrayList<ContentProviderOperation> ops = new ArrayList<>();
while (newProgramsIndex < fetchedProgramsCount) {
Program oldProgram = oldProgramsIndex < oldPrograms.size()
? oldPrograms.get(oldProgramsIndex) : null;
Program newProgram = newPrograms.get(newProgramsIndex);
boolean addNewProgram = false;
if (oldProgram != null) {
if (oldProgram.equals(newProgram)) {
// Exact match. No need to update. Move on to the next programs.
oldProgramsIndex++;
newProgramsIndex++;
} else if (needsUpdate(oldProgram, newProgram)) {
// Partial match. Update the old program with the new one.
// NOTE: Use 'update' in this case instead of 'insert' and 'delete'. There could
// be application specific settings which belong to the old program.
ops.add(ContentProviderOperation.newUpdate(
TvContract.buildProgramUri(oldProgram.getProgramId()))
.withValues(newProgram.toContentValues())
.build());
oldProgramsIndex++;
newProgramsIndex++;
} else if (oldProgram.getEndTimeUtcMillis() < newProgram.getEndTimeUtcMillis()) {
// No match. Remove the old program first to see if the next program in
// {@code oldPrograms} partially matches the new program.
ops.add(ContentProviderOperation.newDelete(
TvContract.buildProgramUri(oldProgram.getProgramId()))
.build());
oldProgramsIndex++;
} else {
// No match. The new program does not match any of the old programs. Insert it
// as a new program.
addNewProgram = true;
newProgramsIndex++;
}
} else {
// No old programs. Just insert new programs.
addNewProgram = true;
newProgramsIndex++;
}
if (addNewProgram) {
ops.add(ContentProviderOperation
.newInsert(TvContract.Programs.CONTENT_URI)
.withValues(newProgram.toContentValues())
.build());
}
// Throttle the batch operation not to cause TransactionTooLargeException.
if (ops.size() > BATCH_OPERATION_COUNT
|| newProgramsIndex >= fetchedProgramsCount) {
try {
mContext.getContentResolver().applyBatch(TvContract.AUTHORITY, ops);
} catch (RemoteException | OperationApplicationException e) {
Log.e(TAG, "Failed to insert programs.", e);
return;
}
ops.clear();
}
}
}
/**
* Returns {@code true} if the {@code oldProgram} program needs to be updated with the
* {@code newProgram} program.
*/
private boolean needsUpdate(Program oldProgram, Program newProgram) {
// NOTE: Here, we update the old program if it has the same title and overlaps with the new
// program. The test logic is just an example and you can modify this. E.g. check whether
// the both programs have the same program ID if your EPG supports any ID for the programs.
return oldProgram.getTitle().equals(newProgram.getTitle())
&& oldProgram.getStartTimeUtcMillis() <= newProgram.getEndTimeUtcMillis()
&& newProgram.getStartTimeUtcMillis() <= oldProgram.getEndTimeUtcMillis();
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.util;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.calendar.DateTimeUnit;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.joda.time.DateTime;
import org.joda.time.Days;
import org.joda.time.IllegalInstantException;
import org.joda.time.LocalDate;
import org.joda.time.Months;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import org.joda.time.format.PeriodFormatter;
import org.joda.time.format.PeriodFormatterBuilder;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.ObjectArrays;
/**
* @author Lars Helge Overland
*/
public class DateUtils
{
public static final String ISO8601_NO_TZ_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSS";
private static DateTimeFormatter ISO8601_NO_TZ = DateTimeFormat.forPattern( ISO8601_NO_TZ_PATTERN );
public static final String ISO8601_PATTERN = ISO8601_NO_TZ_PATTERN + "Z";
private static DateTimeFormatter ISO8601 = DateTimeFormat.forPattern( ISO8601_PATTERN );
private static final String DEFAULT_DATE_REGEX = "\\b(?<year>\\d{4})-(?<month>0[1-9]|1[0-2])-(?<day>0[1-9]|[1-2][0-9]|3[0-2])(?<time>.*)\\b";
private static final Pattern DEFAULT_DATE_REGEX_PATTERN = Pattern.compile( DEFAULT_DATE_REGEX );
private static final DateTimeParser[] SUPPORTED_DATE_ONLY_PARSERS = {
DateTimeFormat.forPattern( "yyyy-MM-dd" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM" ).getParser(),
DateTimeFormat.forPattern( "yyyy" ).getParser()
};
private static final DateTimeParser[] SUPPORTED_DATE_TIME_FORMAT_PARSERS = {
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSSZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSSSSZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSSSS" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSSZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSS" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSS" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ssZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mmZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HHZ" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH" ).getParser(),
DateTimeFormat.forPattern( "yyyy-MM-dd HH:mm:ssZ" ).getParser()
};
private static final DateTimeParser[] SUPPORTED_DATE_FORMAT_PARSERS = ObjectArrays
.concat( SUPPORTED_DATE_ONLY_PARSERS, SUPPORTED_DATE_TIME_FORMAT_PARSERS, DateTimeParser.class );
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder()
.append( null, SUPPORTED_DATE_FORMAT_PARSERS ).toFormatter();
private static final DateTimeFormatter DATE_TIME_FORMAT = new DateTimeFormatterBuilder()
.append( null, SUPPORTED_DATE_TIME_FORMAT_PARSERS ).toFormatter();
public static final PeriodFormatter DAY_SECOND_FORMAT = new PeriodFormatterBuilder()
.appendDays().appendSuffix( " d" ).appendSeparator( ", " )
.appendHours().appendSuffix( " h" ).appendSeparator( ", " )
.appendMinutes().appendSuffix( " m" ).appendSeparator( ", " )
.appendSeconds().appendSuffix( " s" ).appendSeparator( ", " ).toFormatter();
private static final DateTimeFormatter MEDIUM_DATE_FORMAT = DateTimeFormat.forPattern( "yyyy-MM-dd" );
private static final DateTimeFormatter LONG_DATE_FORMAT = DateTimeFormat.forPattern( "yyyy-MM-dd'T'HH:mm:ss" );
private static final DateTimeFormatter HTTP_DATE_FORMAT = DateTimeFormat
.forPattern( "EEE, dd MMM yyyy HH:mm:ss 'GMT'" ).withLocale( Locale.ENGLISH );
private static final DateTimeFormatter TIMESTAMP_UTC_TZ_FORMAT = DateTimeFormat
.forPattern( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ).withZoneUTC();
public static final double DAYS_IN_YEAR = 365.0;
private static final long MS_PER_DAY = 86400000;
private static final long MS_PER_S = 1000;
private static final Pattern DURATION_PATTERN = Pattern.compile( "^(\\d+)(d|h|m|s)$" );
private static final Map<String, ChronoUnit> TEMPORAL_MAP = ImmutableMap.of(
"d", ChronoUnit.DAYS, "h", ChronoUnit.HOURS, "m", ChronoUnit.MINUTES, "s", ChronoUnit.SECONDS );
/**
* Returns date formatted as ISO 8601
*/
public static String getIso8601( Date date )
{
return date != null ? ISO8601.print( new DateTime( date ) ) : null;
}
/**
* Returns date formatted as ISO 8601, without any TZ info
*/
public static String getIso8601NoTz( Date date )
{
return date != null ? ISO8601_NO_TZ.print( new DateTime( date ) ) : null;
}
/**
* Converts a Date to the GMT timezone and formats it to the format
* yyyy-MM-dd HH:mm:ssZ.
*
* @param date the Date to parse.
* @return A formatted date string.
*/
public static String getLongGmtDateString( Date date )
{
return date != null ? TIMESTAMP_UTC_TZ_FORMAT.print( new DateTime( date ) ) : null;
}
/**
* Formats a Date to the format yyyy-MM-dd HH:mm:ss.
*
* @param date the Date to parse.
* @return A formatted date string.
*/
public static String getLongDateString( Date date )
{
return date != null ? LONG_DATE_FORMAT.print( new DateTime( date ) ) : null;
}
/**
* Formats a Date to the format yyyy-MM-dd HH:mm:ss.
*
* @return A formatted date string.
*/
public static String getLongDateString()
{
return getLongDateString( Calendar.getInstance().getTime() );
}
/**
* Formats a Date to the format yyyy-MM-dd.
*
* @param date the Date to parse.
* @return A formatted date string. Null if argument is null.
*/
public static String getMediumDateString( Date date )
{
return date != null ? MEDIUM_DATE_FORMAT.print( new DateTime( date ) ) : null;
}
/**
* Formats the current Date to the format YYYY-MM-DD.
*
* @return A formatted date string.
*/
public static String getMediumDateString()
{
return getMediumDateString( Calendar.getInstance().getTime() );
}
/**
* adds 1 day to provided Date and returns it
*
* @param date
* @return day after provided date
*/
public static Date plusOneDay( Date date )
{
return Date.from( date
.toInstant()
.plus( 1, ChronoUnit.DAYS ) );
}
/**
* Formats a Date according to the HTTP specification standard date format.
*
* @param date the Date to format.
* @return a formatted string.
*/
public static String getHttpDateString( Date date )
{
return date != null ? (HTTP_DATE_FORMAT.print( new DateTime( date ) )) : null;
}
/**
* Returns the latest of the two given dates.
*
* @param date1 the first date, can be null.
* @param date2 the second date, can be null.
* @return the latest of the two given dates.
*/
public static Date max( Date date1, Date date2 )
{
if ( date1 == null )
{
return date2;
}
return date2 != null ? (date1.after( date2 ) ? date1 : date2) : date1;
}
/**
* Returns the latest of the given dates.
*
* @param dates the collection of dates.
* @return the latest of the given dates.
*/
public static Date max( Collection<Date> dates )
{
Date latest = null;
for ( Date d : dates )
{
latest = max( d, latest );
}
return latest;
}
/**
* Returns the earliest of the two given dates.
*
* @param date1 the first date, can be null.
* @param date2 the second date, can be null.
* @return the latest of the two given dates.
*/
public static Date min( Date date1, Date date2 )
{
if ( date1 == null )
{
return date2;
}
return date2 != null ? (date1.before( date2 ) ? date1 : date2) : date1;
}
/**
* Returns the earliest of the given dates.
*
* @param dates the collection of dates.
* @return the earliest of the given dates.
*/
public static Date min( Collection<Date> dates )
{
Date earliest = null;
for ( Date d : dates )
{
earliest = min( d, earliest );
}
return earliest;
}
/**
* Parses a date from a String on the format YYYY-MM-DD. Returns null if the
* given string is null.
*
* @param string the String to parse.
* @return a Date based on the given String.
* @throws IllegalArgumentException if the given string is invalid.
*/
public static Date getMediumDate( String string )
{
return safeParseDateTime( string, MEDIUM_DATE_FORMAT );
}
/**
* Tests if the given base date is between the given start date and end
* date, including the dates themselves.
*
* @param baseDate the date used as base for the test.
* @param startDate the start date.
* @param endDate the end date.
* @return <code>true</code> if the base date is between the start date and
* end date, <code>false</code> otherwise.
*/
public static boolean between( Date baseDate, Date startDate, Date endDate )
{
if ( startDate.equals( endDate ) || endDate.before( startDate ) )
{
return false;
}
if ( (startDate.before( baseDate ) || startDate.equals( baseDate ))
&& (endDate.after( baseDate ) || endDate.equals( baseDate )) )
{
return true;
}
return false;
}
/**
* Tests if the given base date is strictly between the given start date and
* end date.
*
* @param baseDate the date used as base for the test.
* @param startDate the start date.
* @param endDate the end date.
* @return <code>true</code> if the base date is between the start date and
* end date, <code>false</code> otherwise.
*/
public static boolean strictlyBetween( Date baseDate, Date startDate, Date endDate )
{
if ( startDate.equals( endDate ) || endDate.before( startDate ) )
{
return false;
}
if ( startDate.before( baseDate ) && endDate.after( baseDate ) )
{
return true;
}
return false;
}
/**
* Returns the number of days since 01/01/1970. The value is rounded off to
* the floor value and does not take daylight saving time into account.
*
* @param date the date.
* @return number of days since Epoch.
*/
public static long getDays( Date date )
{
return date.getTime() / MS_PER_DAY;
}
/**
* Returns the number of days between the start date (inclusive) and end
* date (exclusive). The value is rounded off to the floor value and does
* not take daylight saving time into account.
*
* @param startDate the start-date.
* @param endDate the end-date.
* @return the number of days between the start and end-date.
*/
public static long getDays( Date startDate, Date endDate )
{
return (endDate.getTime() - startDate.getTime()) / MS_PER_DAY;
}
/**
* Returns the number of days between the start date (inclusive) and end
* date (inclusive). The value is rounded off to the floor value and does
* not take daylight saving time into account.
*
* @param startDate the start-date.
* @param endDate the end-date.
* @return the number of days between the start and end-date.
*/
public static long getDaysInclusive( Date startDate, Date endDate )
{
return getDays( startDate, endDate ) + 1;
}
/**
* Calculates the number of days between the start and end-date. Note this
* method is taking daylight saving time into account and has a performance
* overhead.
*
* @param startDate the start date.
* @param endDate the end date.
* @return the number of days between the start and end date.
*/
public static int daysBetween( Date startDate, Date endDate )
{
final Days days = Days.daysBetween( new DateTime( startDate ), new DateTime( endDate ) );
return days.getDays();
}
/**
* Checks if the date provided in argument is today's date.
*
* @param date to check
* @return <code>true</code> if date is representing today's date
* <code>false</code> otherwise
*/
public static boolean isToday( Date date )
{
int days = Days.daysBetween( new LocalDate( date ), new LocalDate() ).getDays();
return days == 0;
}
/**
* Calculates the number of months between the start and end-date. Note this
* method is taking daylight saving time into account and has a performance
* overhead.
*
* @param startDate the start date.
* @param endDate the end date.
* @return the number of months between the start and end date.
*/
public static int monthsBetween( Date startDate, Date endDate )
{
final Months days = Months.monthsBetween( new DateTime( startDate ), new DateTime( endDate ) );
return days.getMonths();
}
/**
* Calculates the number of days between Epoch and the given date.
*
* @param date the date.
* @return the number of days between Epoch and the given date.
*/
public static int daysSince1900( Date date )
{
final Calendar calendar = Calendar.getInstance();
calendar.clear();
calendar.set( 1900, 0, 1 );
return daysBetween( calendar.getTime(), date );
}
/**
* Returns the nearest date forward in time with the given hour of day, with
* the minute, second and millisecond to zero. If the hour equals the
* current hour of day, the next following day is used.
*
* @param hourOfDay the hour of the day.
* @param now the date representing the current time, if null, the current
* time is used.
* @return the nearest date forward in time with the given hour of day.
*/
public static Date getNextDate( int hourOfDay, Date now )
{
now = now != null ? now : new Date();
DateTime date = new DateTime( now ).plusHours( 1 );
while ( date.getHourOfDay() != hourOfDay )
{
date = date.plusHours( 1 );
}
return date
.withMinuteOfHour( 0 )
.withSecondOfMinute( 0 )
.withMillisOfSecond( 0 )
.toDate();
}
/**
* Returns Epoch date, ie. 01/01/1970.
*
* @return Epoch date, ie. 01/01/1970.
*/
public static Date getEpoch()
{
final Calendar calendar = Calendar.getInstance();
calendar.clear();
calendar.set( 1970, 0, 1 );
return calendar.getTime();
}
/**
* Returns a date formatted in ANSI SQL.
*
* @param date the Date.
* @return a date String.
*/
public static String getSqlDateString( Date date )
{
Calendar cal = Calendar.getInstance();
cal.setTime( date );
int year = cal.get( Calendar.YEAR );
int month = cal.get( Calendar.MONTH ) + 1;
int day = cal.get( Calendar.DAY_OF_MONTH );
String yearString = String.valueOf( year );
String monthString = month < 10 ? "0" + month : String.valueOf( month );
String dayString = day < 10 ? "0" + day : String.valueOf( day );
return yearString + "-" + monthString + "-" + dayString;
}
/**
* This method checks whether the String inDate is a valid date following
* the format "yyyy-MM-dd".
*
* @param dateString the string to be checked.
* @return true/false depending on whether the string is a date according to
* the format "yyyy-MM-dd".
*/
public static boolean dateIsValid( String dateString )
{
return dateIsValid( PeriodType.getCalendar(), dateString );
}
/**
* This method checks whether the String inDate is a valid date following
* the format "yyyy-MM-dd".
*
* @param calendar Calendar to be used
* @param dateString the string to be checked.
* @return true/false depending on whether the string is a date according to
* the format "yyyy-MM-dd".
*/
public static boolean dateIsValid( org.hisp.dhis.calendar.Calendar calendar, String dateString )
{
Matcher matcher = DEFAULT_DATE_REGEX_PATTERN.matcher( dateString );
if ( !matcher.matches() )
{
return false;
}
DateTimeUnit dateTime = new DateTimeUnit(
Integer.parseInt( matcher.group( "year" ) ),
Integer.parseInt( matcher.group( "month" ) ),
Integer.parseInt( matcher.group( "day" ) ) );
return calendar.isValid( dateTime );
}
/**
* This method checks whether the String dateTimeString is a valid datetime
* following the format "yyyy-MM-dd".
*
* @param dateTimeString the string to be checked.
* @return true/false depending on whether the string is a valid datetime
* according to the format "yyyy-MM-dd".
*/
public static boolean dateTimeIsValid( final String dateTimeString )
{
try
{
safeParseDateTime( dateTimeString, DATE_TIME_FORMAT );
return true;
}
catch ( IllegalArgumentException ex )
{
return false;
}
}
/**
* Returns the number of seconds until the next day at the given hour.
*
* @param hour the hour.
* @return number of seconds.
*/
public static long getSecondsUntilTomorrow( int hour )
{
Date date = getDateForTomorrow( hour );
return (date.getTime() - new Date().getTime()) / MS_PER_S;
}
/**
* Returns a date set to tomorrow at the given hour.
*
* @param hour the hour.
* @return a date.
*/
public static Date getDateForTomorrow( int hour )
{
Calendar cal = PeriodType.createCalendarInstance();
cal.add( Calendar.DAY_OF_YEAR, 1 );
cal.set( Calendar.HOUR_OF_DAY, hour );
return cal.getTime();
}
/**
* This method adds days to a date
*
* @param date the date.
* @param days the number of days to add.
*/
public static Date getDateAfterAddition( Date date, int days )
{
Calendar cal = Calendar.getInstance();
cal.setTime( date );
cal.add( Calendar.DATE, days );
return cal.getTime();
}
/**
* Method responsible for adding a positive or negative number based in a
* chronological unit.
*
* @param date the date to be modified. It's the input date for the
* calculation.
* @param addend a positive or negative integer to be added to the date.
* @param chronoUnit the unit of time to be used in the calculation. It's
* fully based in the Calendar API. Valid values could be:
* Calendar.DATE, Calendar.MILLISECOND, etc..
* @return the resultant date after the addition.
*/
public static Date calculateDateFrom( final Date date, final int addend, final int chronoUnit )
{
Calendar cal = Calendar.getInstance();
cal.setLenient( false );
cal.setTime( date );
cal.add( chronoUnit, addend );
return cal.getTime();
}
/**
* Sets the name property of each period based on the given I18nFormat.
*/
public static List<Period> setNames( List<Period> periods, I18nFormat format )
{
for ( Period period : periods )
{
if ( period != null )
{
period.setName( format.formatPeriod( period ) );
}
}
return periods;
}
/**
* Returns a pretty string representing the interval between the given start
* and end dates using a day, month, second format.
*
* @param start the start date.
* @param end the end date.
* @return a string, or null if the given start or end date is null.
*/
public static String getPrettyInterval( Date start, Date end )
{
if ( start == null || end == null )
{
return null;
}
long diff = end.getTime() - start.getTime();
return DAY_SECOND_FORMAT.print( new org.joda.time.Period( diff ) );
}
/**
* Parses the given string into a Date using the supported date formats.
* Returns null if the string cannot be parsed.
*
* @param dateString the date string.
* @return a date.
*/
public static Date parseDate( final String dateString )
{
return safeParseDateTime( dateString, DATE_FORMATTER );
}
/**
* Null safe instant to date conversion
*
* @param instant the instant
* @return a date.
*/
public static Date fromInstant( final Instant instant )
{
return convertOrNull( instant, Date::from );
}
/**
* Null safe date to instant conversion
*
* @param date the date
* @return an instant.
*/
public static Instant instantFromDate( final Date date )
{
return convertOrNull( date, Date::toInstant );
}
/**
* Null safe epoch to instant conversion
*
* @param epochMillis the date expressed as milliseconds from epoch
* @return an instant.
*/
public static Instant instantFromEpoch( final Long epochMillis )
{
return convertOrNull( new Date( epochMillis ), Date::toInstant );
}
public static Instant instantFromDateAsString( String dateAsString )
{
return convertOrNull( DateUtils.parseDate( dateAsString ), Date::toInstant );
}
private static <T, R> R convertOrNull( T from, Function<T, R> converter )
{
return Optional.ofNullable( from )
.map( converter )
.orElse( null );
}
/**
* Creates a {@link java.util.Date} from the given
* {@link java.time.LocalDateTime} based on the UTC time zone.
*
* @param time the LocalDateTime.
* @return a Date.
*/
public static Date getDate( LocalDateTime time )
{
Instant instant = time.toInstant( ZoneOffset.UTC );
return Date.from( instant );
}
/**
* Return the current date minus the duration specified by the given string.
*
* @param duration the duration string.
* @return a Date.
*/
public static Date nowMinusDuration( String duration )
{
Duration dr = DateUtils.getDuration( duration );
LocalDateTime time = LocalDateTime.now().minus( dr );
return DateUtils.getDate( time );
}
/**
* Parses the given string into a {@link java.time.Duration} object. The
* string syntax is [amount][unit]. The supported units are:
* <p>
* <ul>
* <li>"d": Days</li>
* <li>"h": Hours</li>
* <li>"m": Minutes</li>
* <li>"s": Seconds</li>
* </ul>
*
* @param duration the duration string, an example describing 12 days is
* "12d".
* @return a Duration object, or null if the duration string is invalid.
*/
public static Duration getDuration( String duration )
{
Matcher matcher = DURATION_PATTERN.matcher( duration );
if ( !matcher.find() )
{
return null;
}
long amount = Long.valueOf( matcher.group( 1 ) );
String unit = matcher.group( 2 );
ChronoUnit chronoUnit = TEMPORAL_MAP.get( unit );
if ( chronoUnit == null )
{
return null;
}
return Duration.of( amount, chronoUnit );
}
/**
* Converts the given {@link Date} to a {@link Timestamp}.
*
* @param date the date to convert.
* @return a time stamp.
*/
public static Timestamp asTimestamp( Date date )
{
return new Timestamp( date.getTime() );
}
/**
* Converts the given {@link Date} to a {@link java.sql.Date}.
*
* @param date the date to convert.
* @return a date.
*/
public static java.sql.Date asSqlDate( Date date )
{
return new java.sql.Date( date.getTime() );
}
/**
* Returns the latest, non-null date of the given dates. If all dates are
* null, then null is returned.
*
* @param dates the dates.
* @return the earliest, non-null date.
*/
public static Date getEarliest( Date... dates )
{
return Lists.newArrayList( dates ).stream()
.filter( Objects::nonNull )
.min( Date::compareTo ).orElse( null );
}
/**
* Returns the latest, non-null date of the given dates. If all dates are
* null, then null is returned.
*
* @param dates the dates.
* @return the latest, non-null date.
*/
public static Date getLatest( Date... dates )
{
return Lists.newArrayList( dates ).stream()
.filter( Objects::nonNull )
.max( Date::compareTo ).orElse( null );
}
/**
* Returns only the date part after removing timestamp
*
* @param date the date to convert.
* @return a date
*/
public static Date removeTimeStamp( Date date )
{
return date == null ? null : getMediumDate( getMediumDateString( date ) );
}
/**
* Parses the given string into a Date object. In case the date parsed falls
* in a daylight savings transition, the date is parsed via a local date and
* converted to the first valid time after the DST gap. When the fallback is
* used, any timezone offset in the given format would be ignored.
*
* @param dateString The string to parse
* @param formatter The formatter to use for parsing
* @return Parsed Date object. Null if the supplied dateString is empty.
*/
private static Date safeParseDateTime( final String dateString, final DateTimeFormatter formatter )
{
if ( StringUtils.isEmpty( dateString ) )
{
return null;
}
try
{
return formatter.parseDateTime( dateString ).toDate();
}
catch ( IllegalInstantException e )
{
return formatter.parseLocalDateTime( dateString ).toDate();
}
}
}
| |
package org.sputnikdev.bluetooth.manager.impl;
/*-
* #%L
* org.sputnikdev:bluetooth-manager
* %%
* Copyright (C) 2017 Sputnik Dev
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sputnikdev.bluetooth.URL;
import org.sputnikdev.bluetooth.manager.AdapterDiscoveryListener;
import org.sputnikdev.bluetooth.manager.AdapterGovernor;
import org.sputnikdev.bluetooth.manager.AdapterListener;
import org.sputnikdev.bluetooth.manager.BluetoothGovernor;
import org.sputnikdev.bluetooth.manager.BluetoothObjectType;
import org.sputnikdev.bluetooth.manager.BluetoothObjectVisitor;
import org.sputnikdev.bluetooth.manager.CombinedGovernor;
import org.sputnikdev.bluetooth.manager.DeviceGovernor;
import org.sputnikdev.bluetooth.manager.DiscoveredAdapter;
import org.sputnikdev.bluetooth.manager.GovernorListener;
import org.sputnikdev.bluetooth.manager.NotReadyException;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.function.Predicate;
class CombinedAdapterGovernorImpl implements AdapterGovernor, CombinedGovernor,
BluetoothObjectGovernor, AdapterDiscoveryListener {
private Logger logger = LoggerFactory.getLogger(CombinedAdapterGovernorImpl.class);
private final Map<URL, AdapterGovernorHandler> governors = new ConcurrentHashMap<>();
private final CompletableFutureService<BluetoothObjectGovernor> futureService = new CompletableFutureService<>();
private final BluetoothManagerImpl bluetoothManager;
private final URL url;
private final List<GovernorListener> governorListeners = new CopyOnWriteArrayList<>();
private final List<AdapterListener> adapterListeners = new CopyOnWriteArrayList<>();
private Instant lastInteracted;
private boolean poweredControl = true;
private boolean discoveringControl = true;
private double signalPropagationExponent;
private final ConcurrentBitMap ready = new ConcurrentBitMap();
private final ConcurrentBitMap powered = new ConcurrentBitMap();
private final ConcurrentBitMap discovering = new ConcurrentBitMap();
private final AtomicInteger governorsCount = new AtomicInteger();
CombinedAdapterGovernorImpl(BluetoothManagerImpl bluetoothManager, URL url) {
this.bluetoothManager = bluetoothManager;
this.url = url;
}
@Override
public String getName() throws NotReadyException {
return "Combined Bluetooth Adapter";
}
@Override
public String getAlias() throws NotReadyException {
return null;
}
@Override
public void setAlias(String alias) throws NotReadyException {
}
@Override
public String getDisplayName() throws NotReadyException {
return getName();
}
@Override
public boolean isPowered() throws NotReadyException {
return powered.get();
}
@Override
public boolean getPoweredControl() {
return poweredControl;
}
@Override
public void setPoweredControl(boolean powered) {
poweredControl = powered;
governors.values().forEach(
adapterGovernorHandler -> adapterGovernorHandler.adapterGovernor.setPoweredControl(powered));
}
@Override
public boolean isDiscovering() throws NotReadyException {
return discovering.get();
}
@Override
public boolean getDiscoveringControl() {
return discoveringControl;
}
@Override
public void setDiscoveringControl(boolean discovering) {
discoveringControl = discovering;
governors.values().forEach(
adapterGovernorHandler -> adapterGovernorHandler.adapterGovernor.setDiscoveringControl(discovering));
}
@Override
public double getSignalPropagationExponent() {
return signalPropagationExponent;
}
@Override
public void setSignalPropagationExponent(double exponent) {
signalPropagationExponent = exponent;
governors.values().forEach(adapterGovernorHandler -> adapterGovernorHandler.adapterGovernor
.setSignalPropagationExponent(exponent));
}
@Override
public List<URL> getDevices() throws NotReadyException {
return null;
}
@Override
public List<DeviceGovernor> getDeviceGovernors() throws NotReadyException {
return null;
}
@Override
public void init() {
bluetoothManager.addAdapterDiscoveryListener(this);
bluetoothManager.getRegisteredGovernors().forEach(this::registerGovernor);
bluetoothManager.getDiscoveredAdapters().stream().map(DiscoveredAdapter::getURL)
.forEach(this::registerGovernor);
}
@Override
public void update() {
futureService.completeSilently(this);
}
@Override
public boolean isUpdatable() {
return true;
}
@Override
public void reset() { /* do nothing */ }
@Override
public void dispose() {
bluetoothManager.removeAdapterDiscoveryListener(this);
governors.clear();
governorListeners.clear();
adapterListeners.clear();
futureService.clear();
}
@Override
public URL getURL() {
return url;
}
@Override
public boolean isReady() {
return ready.get();
}
@Override
public BluetoothObjectType getType() {
return BluetoothObjectType.ADAPTER;
}
@Override
public Instant getLastInteracted() {
return lastInteracted;
}
@Override
public void accept(BluetoothObjectVisitor visitor) throws Exception {
visitor.visit(this);
}
@Override
public void addAdapterListener(AdapterListener adapterListener) {
adapterListeners.add(adapterListener);
}
@Override
public void removeAdapterListener(AdapterListener adapterListener) {
adapterListeners.remove(adapterListener);
}
@Override
public void addGovernorListener(GovernorListener listener) {
governorListeners.add(listener);
}
@Override
public void removeGovernorListener(GovernorListener listener) {
governorListeners.remove(listener);
}
@Override
public void discovered(DiscoveredAdapter adapter) {
registerGovernor(adapter.getURL());
}
@Override
public void adapterLost(URL address) { /* do nothing */ }
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public <G extends BluetoothGovernor, V> CompletableFuture<V> when(Predicate<G> predicate, Function<G, V> function) {
return futureService.submit(this, (Predicate<BluetoothObjectGovernor>) predicate,
(Function<BluetoothObjectGovernor, V>) function);
}
private void registerGovernor(URL url) {
if (governorsCount.get() > 63) {
throw new IllegalStateException("Shared Device Governor can only span upto 63 device governors.");
}
if (url.isAdapter() && !url.equals(this.url)) {
governors.computeIfAbsent(url, newUrl -> {
AdapterGovernor deviceGovernor = bluetoothManager.getAdapterGovernor(url);
return new AdapterGovernorHandler(deviceGovernor, governorsCount.getAndIncrement());
});
}
}
private void updateLastInteracted(Instant lastActivity) {
if (lastInteracted == null || lastInteracted.isBefore(lastActivity)) {
lastInteracted = lastActivity;
BluetoothManagerUtils.forEachSilently(governorListeners, listener -> {
listener.lastUpdatedChanged(lastActivity);
}, logger, "Execution error of a governor listener: last changed");
}
}
private final class AdapterGovernorHandler implements GovernorListener, AdapterListener {
private final AdapterGovernor adapterGovernor;
private final int index;
private AdapterGovernorHandler(AdapterGovernor adapterGovernor, int index) {
this.adapterGovernor = adapterGovernor;
this.index = index;
this.adapterGovernor.addAdapterListener(this);
this.adapterGovernor.addGovernorListener(this);
this.adapterGovernor.setPoweredControl(poweredControl);
this.adapterGovernor.setDiscoveringControl(discoveringControl);
this.adapterGovernor.setSignalPropagationExponent(signalPropagationExponent);
ready(true);
}
@Override
public void powered(boolean newState) {
powered.cumulativeSet(index, newState, () -> {
BluetoothManagerUtils.forEachSilently(adapterListeners, AdapterListener::powered, newState,
logger, "Execution error of a Powered listener");
});
}
@Override
public void discovering(boolean newState) {
discovering.cumulativeSet(index, newState, () -> {
BluetoothManagerUtils.forEachSilently(adapterListeners, AdapterListener::discovering, newState,
logger, "Execution error of a Discovering listener");
});
}
@Override
public void ready(boolean newState) {
ready.cumulativeSet(index, newState, () -> {
BluetoothManagerUtils.forEachSilently(governorListeners, GovernorListener::ready, newState,
logger, "Execution error of a governor listener: ready");
});
}
@Override
public void lastUpdatedChanged(Instant lastActivity) {
updateLastInteracted(lastActivity);
}
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.analysis;
import java.util.ArrayList;
import java.util.List;
import org.apache.impala.catalog.ArrayType;
import org.apache.impala.catalog.Column;
import org.apache.impala.catalog.MapType;
import org.apache.impala.catalog.StructField;
import org.apache.impala.catalog.StructType;
import org.apache.impala.catalog.Table;
import org.apache.impala.catalog.Type;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* Represents a resolved or unresolved dot-separated path that is rooted at a registered
* tuple descriptor, catalog table/view, or an existing resolved path.
*
* This class implements the resolution logic for mapping an implicit or explicit
* raw path to the corresponding physical types/positions in the schema tree.
*
* Implicit vs. Explicit Paths
* The item of an array and the key/value of maps are accessed via their implicit field
* names. However, if the type of an array item or a map value is a struct, then we allow
* omitting the explicit reference to the struct type in paths for accessing fields
* within that struct as a shorthand for user convenience. An explicit reference to the
* struct type is always legal. Paths that explicitly reference such a struct are
* "physical" because they typically map exactly to the schema representation in the
* underlying storage format (e.g. Parquet/Avro). Paths that omit the struct reference
* are called "implicit". During resolution, explicit paths are always preferred over
* implicit paths for resolving ambiguities.
*
* Example
* create table d.t (
* c array<struct<f:int,item:int,pos:int>>
* );
*
* select ... from d.t.c
* d.t.c <-- resolves to type array<struct<f:int,item:int,pos:int>>
* c alias <-- type struct<item:struct<f:int,item:int,pos:int>,pos:bigint>>
*
* select c.item.f, c.f from d.t.c
* c.item.f <-- explicit path to "f"
* c.f <-- implicit path to "f", skips "item" reference
* (same for the unqualified versions item.f and f)
*
* select c.item, c.item.item from d.t.c
* c.item <-- explicit path to "item" struct of type struct<f:int,item:string>
* c.item.item <-- explicit path to string "item"; there is no logical path to the
* string "item" due to the "item" name conflict
* c.pos <-- explicit path to "pos" of type bigint
* c.item.pos <-- explicit path to "pos" of type int; there is no logical path to the
* int "pos" due to the "pos" name conflict
* (same for unqualified versions item, item.item, pos, item.pos)
*
* Please refer to TestImplicitAndExplicitPaths() for analogous examples for maps.
*
* Illegal Implicit Paths
* The intention of implicit paths is to allow users to skip a *single* trivial level of
* indirection in common cases. In particular, it is illegal to implicitly skip multiple
* levels in a path, illustrated as follows.
*
* Example
* create table d.t (
* c array<array<struct<e:int,f:string>>>
* );
*
* select c.f from d.t.c
* select 1 from d.t.c, c.f
* c.f <-- illegal path because it would have to implicitly skip two 'item' fields
*
*
* Uses of Paths and Terminology
*
* Uncorrelated References: Star exprs, SlotRefs and TableRefs that are rooted at a
* catalog Table or a registered TupleDescriptor in the same query block.
*
* Relative References: TableRefs that are rooted at a TupleDescriptor.
*
* Correlated References: SlotRefs and TableRefs that are rooted at a TupleDescriptor
* registered in an ancestor query block are called 'correlated'. All correlated
* references are relative, but not all relative references are correlated.
*
* A Path itself is never said to be un/correlated because it is intentionally unaware
* of the query block that it is used in.
*/
public class Path extends SyntaxBlock {
// Implicit field names of collections.
public static final String ARRAY_ITEM_FIELD_NAME = "item";
public static final String ARRAY_POS_FIELD_NAME = "pos";
public static final String MAP_KEY_FIELD_NAME = "key";
public static final String MAP_VALUE_FIELD_NAME = "value";
public static enum PathType {
SLOT_REF,
TABLE_REF,
STAR,
ANY, // Reference to any field or table in schema.
}
// Implicit or explicit raw path to be resolved relative to rootDesc_ or rootTable_.
// Every raw-path element is mapped to zero, one or two types/positions in resolution.
private final List<String> rawPath_;
// Registered table alias that this path is rooted at, if any.
// Null if the path is rooted at a catalog table/view.
private final TupleDescriptor rootDesc_;
// Catalog table that this resolved path is rooted at, if any.
// Null if the path is rooted at a registered tuple that does not
// belong to a catalog table/view.
private final Table rootTable_;
// Root path that a relative path was created from.
private final Path rootPath_;
// List of matched types and field positions set during resolution. The matched
// types/positions describe the physical path through the schema tree.
private final List<Type> matchedTypes_ = Lists.newArrayList();
private final List<Integer> matchedPositions_ = Lists.newArrayList();
// Remembers the indices into rawPath_ and matchedTypes_ of the first collection
// matched during resolution.
private int firstCollectionPathIdx_ = -1;
private int firstCollectionTypeIdx_ = -1;
// Indicates whether this path has been resolved. Set in resolve().
private boolean isResolved_ = false;
// Caches the result of getAbsolutePath() to avoid re-computing it.
private List<Integer> absolutePath_ = null;
/**
* Constructs a Path rooted at the given rootDesc.
*/
public Path(TupleDescriptor rootDesc, List<String> rawPath) {
Preconditions.checkNotNull(rootDesc);
Preconditions.checkNotNull(rawPath);
rootTable_ = rootDesc.getTable();
rootDesc_ = rootDesc;
rootPath_ = null;
rawPath_ = rawPath;
}
/**
* Constructs a Path rooted at the given rootTable.
*/
public Path(Table rootTable, List<String> rawPath) {
Preconditions.checkNotNull(rootTable);
Preconditions.checkNotNull(rawPath);
rootTable_ = rootTable;
rootDesc_ = null;
rootPath_ = null;
rawPath_ = rawPath;
}
/**
* Constructs a new unresolved path relative to an existing resolved path.
*/
public Path(Path rootPath, List<String> relRawPath) {
Preconditions.checkNotNull(rootPath);
Preconditions.checkState(rootPath.isResolved());
Preconditions.checkNotNull(relRawPath);
rootTable_ = rootPath.rootTable_;
rootDesc_ = rootPath.rootDesc_;
rootPath_ = rootPath;
rawPath_ = Lists.newArrayListWithCapacity(
rootPath.getRawPath().size() + relRawPath.size());
rawPath_.addAll(rootPath.getRawPath());
rawPath_.addAll(relRawPath);
matchedTypes_.addAll(rootPath.matchedTypes_);
matchedPositions_.addAll(rootPath.matchedPositions_);
firstCollectionPathIdx_ = rootPath.firstCollectionPathIdx_;
firstCollectionTypeIdx_ = rootPath.firstCollectionTypeIdx_;
}
/**
* Resolves this path in the context of the root tuple descriptor / root table
* or continues resolving this relative path from an existing root path.
* Returns true if the path could be fully resolved, false otherwise.
* A failed resolution leaves this Path in a partially resolved state.
*/
public boolean resolve() {
if (isResolved_) return true;
Preconditions.checkState(rootDesc_ != null || rootTable_ != null);
Type currentType = null;
int rawPathIdx = 0;
if (rootPath_ != null) {
// Continue resolving this path relative to the rootPath_.
currentType = rootPath_.destType();
rawPathIdx = rootPath_.getRawPath().size();
} else if (rootDesc_ != null) {
currentType = rootDesc_.getType();
} else {
// Directly startPosition from the item type because only implicit paths are allowed.
currentType = rootTable_.getType().getItemType();
}
// Map all remaining raw-path elements to field types and positions.
while (rawPathIdx < rawPath_.size()) {
if (!currentType.isComplexType()) return false;
StructType structType = getTypeAsStruct(currentType);
// Resolve explicit path.
StructField field = structType.getField(rawPath_.get(rawPathIdx));
if (field == null) {
// Resolve implicit path.
if (structType instanceof CollectionStructType) {
field = ((CollectionStructType) structType).getOptionalField();
// Collections must be matched explicitly.
if (field.getType().isCollectionType()) return false;
} else {
// Failed to resolve implicit or explicit path.
return false;
}
// Update the physical types/positions.
matchedTypes_.add(field.getType());
matchedPositions_.add(field.getPosition());
currentType = field.getType();
// Do not consume a raw-path element.
continue;
}
matchedTypes_.add(field.getType());
matchedPositions_.add(field.getPosition());
if (field.getType().isCollectionType() && firstCollectionPathIdx_ == -1) {
Preconditions.checkState(firstCollectionTypeIdx_ == -1);
firstCollectionPathIdx_ = rawPathIdx;
firstCollectionTypeIdx_ = matchedTypes_.size() - 1;
}
currentType = field.getType();
++rawPathIdx;
}
Preconditions.checkState(matchedTypes_.size() == matchedPositions_.size());
Preconditions.checkState(matchedTypes_.size() >= rawPath_.size());
isResolved_ = true;
return true;
}
/**
* If the given type is a collection, returns a collection struct type representing
* named fields of its explicit path. Returns the given type itself if it is already
* a struct. Requires that the given type is a complex type.
*/
public static StructType getTypeAsStruct(Type t) {
Preconditions.checkState(t.isComplexType());
if (t.isStructType()) return (StructType) t;
if (t.isArrayType()) {
return CollectionStructType.createArrayStructType((ArrayType) t);
} else {
Preconditions.checkState(t.isMapType());
return CollectionStructType.createMapStructType((MapType) t);
}
}
public Table getRootTable() { return rootTable_; }
public TupleDescriptor getRootDesc() { return rootDesc_; }
public boolean isRootedAtTable() { return rootTable_ != null; }
public boolean isRootedAtTuple() { return rootDesc_ != null; }
public List<String> getRawPath() { return rawPath_; }
public boolean isResolved() { return isResolved_; }
public List<Type> getMatchedTypes() {
Preconditions.checkState(isResolved_);
return matchedTypes_;
}
public List<Integer> getMatchedPositions() {
Preconditions.checkState(isResolved_);
return matchedPositions_;
}
public boolean hasNonDestCollection() {
Preconditions.checkState(isResolved_);
return firstCollectionPathIdx_ != -1 &&
firstCollectionPathIdx_ != rawPath_.size() - 1;
}
public String getFirstCollectionName() {
Preconditions.checkState(isResolved_);
if (firstCollectionPathIdx_ == -1) return null;
return rawPath_.get(firstCollectionPathIdx_);
}
public Type getFirstCollectionType() {
Preconditions.checkState(isResolved_);
if (firstCollectionTypeIdx_ == -1) return null;
return matchedTypes_.get(firstCollectionTypeIdx_);
}
public int getFirstCollectionIndex() {
Preconditions.checkState(isResolved_);
return firstCollectionTypeIdx_;
}
public Type destType() {
Preconditions.checkState(isResolved_);
if (!matchedTypes_.isEmpty()) return matchedTypes_.get(matchedTypes_.size() - 1);
if (rootDesc_ != null) return rootDesc_.getType();
if (rootTable_ != null) return rootTable_.getType();
return null;
}
public Table destTable() {
Preconditions.checkState(isResolved_);
if (rootTable_ != null && rootDesc_ == null && matchedTypes_.isEmpty()) {
return rootTable_;
}
return null;
}
/**
* Returns the destination Column of this path, or null if the destination of this
* path is not a Column. This path must be rooted at a table or a tuple descriptor
* corresponding to a table for the destination to be a Column.
*/
public Column destColumn() {
Preconditions.checkState(isResolved_);
if (rootTable_ == null || rawPath_.size() != 1) return null;
return rootTable_.getColumn(rawPath_.get(rawPath_.size() - 1));
}
/**
* Returns the destination tuple descriptor of this path, or null
* if the destination of this path is not a registered alias.
*/
public TupleDescriptor destTupleDesc() {
Preconditions.checkState(isResolved_);
if (rootDesc_ != null && matchedTypes_.isEmpty()) return rootDesc_;
return null;
}
public List<String> getFullyQualifiedRawPath() {
Preconditions.checkState(rootTable_ != null || rootDesc_ != null);
List<String> result = Lists.newArrayListWithCapacity(rawPath_.size() + 2);
if (rootDesc_ != null) {
result.addAll(Lists.newArrayList(rootDesc_.getAlias().split("\\.")));
} else {
result.add(rootTable_.getDb().getName());
result.add(rootTable_.getName());
}
result.addAll(rawPath_);
return result;
}
/**
* Returns the absolute explicit path starting from the fully-qualified table name.
* The goal is produce a canonical non-ambiguous path that can be used as an
* identifier for table and slot references.
*
* Example:
* create table mydb.test (a array<struct<f1:int,f2:string>>);
* use mydb;
* select f1 from test t, t.a;
*
* This function should return the following for the path of the 'f1' SlotRef:
* mydb.test.a.item.f1
*/
public List<String> getCanonicalPath() {
List<String> result = Lists.newArrayList();
getCanonicalPath(result);
return result;
}
/**
* Recursive helper for getCanonicalPath().
*/
private void getCanonicalPath(List<String> result) {
Type currentType = null;
if (isRootedAtTuple()) {
rootDesc_.getPath().getCanonicalPath(result);
currentType = rootDesc_.getType();
} else {
Preconditions.checkNotNull(isRootedAtTable());
result.add(rootTable_.getTableName().getDb());
result.add(rootTable_.getTableName().getTbl());
currentType = rootTable_.getType().getItemType();
}
// Compute the explicit path from the matched positions. Note that rawPath_ is
// not sufficient because it could contain implicit matches.
for (int i = 0; i < matchedPositions_.size(); ++i) {
StructType structType = getTypeAsStruct(currentType);
int matchPos = matchedPositions_.get(i);
Preconditions.checkState(matchPos < structType.getFields().size());
StructField match = structType.getFields().get(matchPos);
result.add(match.getName());
currentType = match.getType();
}
}
/**
* Returns the absolute physical path in positions starting from the schema root to the
* destination of this path.
*/
public List<Integer> getAbsolutePath() {
if (absolutePath_ != null) return absolutePath_;
Preconditions.checkState(isResolved_);
absolutePath_ = Lists.newArrayList();
if (rootDesc_ != null) absolutePath_.addAll(rootDesc_.getPath().getAbsolutePath());
absolutePath_.addAll(matchedPositions_);
return absolutePath_;
}
@Override
public String toString() {
Preconditions.checkState(rootTable_ != null || rootDesc_ != null);
String pathRoot = null;
if (rootDesc_ != null) {
pathRoot = rootDesc_.getAlias();
} else {
pathRoot = rootTable_.getFullName();
}
if (rawPath_.isEmpty()) return pathRoot;
return pathRoot + "." + Joiner.on(".").join(rawPath_);
}
/**
* Returns a raw path from a known root alias and field name.
*/
public static ArrayList<String> createRawPath(String rootAlias, String fieldName) {
ArrayList<String> result = Lists.newArrayList(rootAlias.split("\\."));
result.add(fieldName);
return result;
}
public static Path createRelPath(Path rootPath, String... fieldNames) {
Preconditions.checkState(rootPath.isResolved());
Path result = new Path(rootPath, Lists.newArrayList(fieldNames));
return result;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.server;
import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.apache.hadoop.ozone.container.common.impl.HddsDispatcher;
import org.apache.hadoop.ozone.container.common.interfaces.Handler;
import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
import org.apache.hadoop.ozone.container.replication.GrpcReplicationService;
import org.apache.hadoop.ozone.container.replication.OnDemandContainerReplicationSource;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos
.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.scm.TestUtils;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.RatisTestHelper;
import org.apache.hadoop.ozone.container.ContainerTestHelper;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerDispatcher;
import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerGrpc;
import org.apache.hadoop.ozone.container.common.transport.server.XceiverServerSpi;
import org.apache.hadoop.ozone.container.common.transport.server.ratis.XceiverServerRatis;
import org.apache.hadoop.ozone.web.utils.OzoneUtils;
import org.apache.hadoop.hdds.scm.XceiverClientGrpc;
import org.apache.hadoop.hdds.scm.XceiverClientRatis;
import org.apache.hadoop.hdds.scm.XceiverClientSpi;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.ratis.rpc.RpcType;
import org.apache.ratis.util.CheckedBiConsumer;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.apache.ratis.rpc.SupportedRpcType.GRPC;
import static org.apache.ratis.rpc.SupportedRpcType.NETTY;
import static org.mockito.Mockito.mock;
/**
* Test Containers.
*/
@Ignore("Takes too long to run this test. Ignoring for time being.")
public class TestContainerServer {
static final String TEST_DIR
= GenericTestUtils.getTestDir("dfs").getAbsolutePath() + File.separator;
private GrpcReplicationService createReplicationService(
ContainerSet containerSet) {
return new GrpcReplicationService(
new OnDemandContainerReplicationSource(containerSet));
}
@Test
public void testClientServer() throws Exception {
DatanodeDetails datanodeDetails = TestUtils.randomDatanodeDetails();
ContainerSet containerSet = new ContainerSet();
runTestClientServer(1, (pipeline, conf) -> conf
.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
pipeline.getFirstNode()
.getPort(DatanodeDetails.Port.Name.STANDALONE).getValue()),
XceiverClientGrpc::new,
(dn, conf) -> new XceiverServerGrpc(datanodeDetails, conf,
new TestContainerDispatcher(),
createReplicationService(containerSet)), (dn, p) -> {
});
}
@FunctionalInterface
interface CheckedBiFunction<LEFT, RIGHT, OUT, THROWABLE extends Throwable> {
OUT apply(LEFT left, RIGHT right) throws THROWABLE;
}
@Test
public void testClientServerRatisNetty() throws Exception {
runTestClientServerRatis(NETTY, 1);
runTestClientServerRatis(NETTY, 3);
}
@Test
public void testClientServerRatisGrpc() throws Exception {
runTestClientServerRatis(GRPC, 1);
runTestClientServerRatis(GRPC, 3);
}
static XceiverServerRatis newXceiverServerRatis(
DatanodeDetails dn, OzoneConfiguration conf) throws IOException {
conf.setInt(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_PORT,
dn.getPort(DatanodeDetails.Port.Name.RATIS).getValue());
final String dir = TEST_DIR + dn.getUuid();
conf.set(OzoneConfigKeys.DFS_CONTAINER_RATIS_DATANODE_STORAGE_DIR, dir);
final ContainerDispatcher dispatcher = new TestContainerDispatcher();
return XceiverServerRatis
.newXceiverServerRatis(dn, conf, dispatcher, null);
}
static void runTestClientServerRatis(RpcType rpc, int numNodes)
throws Exception {
runTestClientServer(numNodes,
(pipeline, conf) -> RatisTestHelper.initRatisConf(rpc, conf),
XceiverClientRatis::newXceiverClientRatis,
TestContainerServer::newXceiverServerRatis,
(dn, p) -> RatisTestHelper.initXceiverServerRatis(rpc, dn, p));
}
static void runTestClientServer(
int numDatanodes,
CheckedBiConsumer<Pipeline, OzoneConfiguration, IOException> initConf,
CheckedBiFunction<Pipeline, OzoneConfiguration, XceiverClientSpi,
IOException> createClient,
CheckedBiFunction<DatanodeDetails, OzoneConfiguration, XceiverServerSpi,
IOException> createServer,
CheckedBiConsumer<DatanodeDetails, Pipeline, IOException> initServer)
throws Exception {
final List<XceiverServerSpi> servers = new ArrayList<>();
XceiverClientSpi client = null;
String containerName = OzoneUtils.getRequestID();
try {
final Pipeline pipeline = ContainerTestHelper.createPipeline(numDatanodes);
final OzoneConfiguration conf = new OzoneConfiguration();
initConf.accept(pipeline, conf);
for (DatanodeDetails dn : pipeline.getNodes()) {
final XceiverServerSpi s = createServer.apply(dn, conf);
servers.add(s);
s.start();
initServer.accept(dn, pipeline);
}
client = createClient.apply(pipeline, conf);
client.connect();
final ContainerCommandRequestProto request =
ContainerTestHelper
.getCreateContainerRequest(
ContainerTestHelper.getTestContainerID(), pipeline);
Assert.assertNotNull(request.getTraceID());
ContainerCommandResponseProto response = client.sendCommand(request);
Assert.assertEquals(request.getTraceID(), response.getTraceID());
} finally {
if (client != null) {
client.close();
}
servers.stream().forEach(XceiverServerSpi::stop);
}
}
@Test
public void testClientServerWithContainerDispatcher() throws Exception {
XceiverServerGrpc server = null;
XceiverClientGrpc client = null;
try {
Pipeline pipeline = ContainerTestHelper.createSingleNodePipeline();
OzoneConfiguration conf = new OzoneConfiguration();
conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
pipeline.getFirstNode()
.getPort(DatanodeDetails.Port.Name.STANDALONE).getValue());
ContainerSet containerSet = new ContainerSet();
HddsDispatcher dispatcher = new HddsDispatcher(
conf, mock(ContainerSet.class), mock(VolumeSet.class), null);
dispatcher.init();
DatanodeDetails datanodeDetails = TestUtils.randomDatanodeDetails();
server = new XceiverServerGrpc(datanodeDetails, conf, dispatcher,
createReplicationService(containerSet));
client = new XceiverClientGrpc(pipeline, conf);
server.start();
client.connect();
ContainerCommandRequestProto request =
ContainerTestHelper.getCreateContainerRequest(
ContainerTestHelper.getTestContainerID(), pipeline);
ContainerCommandResponseProto response = client.sendCommand(request);
Assert.assertTrue(request.getTraceID().equals(response.getTraceID()));
Assert.assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
} finally {
if (client != null) {
client.close();
}
if (server != null) {
server.stop();
}
}
}
private static class TestContainerDispatcher implements ContainerDispatcher {
/**
* Dispatches commands to container layer.
*
* @param msg - Command Request
* @return Command Response
*/
@Override
public ContainerCommandResponseProto
dispatch(ContainerCommandRequestProto msg) {
return ContainerTestHelper.getCreateContainerResponse(msg);
}
@Override
public void init() {
}
@Override
public void shutdown() {
}
@Override
public Handler getHandler(ContainerProtos.ContainerType containerType) {
return null;
}
@Override
public void setScmId(String scmId) {
}
}
}
| |
package org.metaborg.spt.core.run;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.metaborg.core.language.ILanguageImpl;
import org.metaborg.core.messages.IMessage;
import org.metaborg.core.source.ISourceRegion;
import org.metaborg.core.source.SourceRegion;
import org.metaborg.core.syntax.ParseException;
import org.metaborg.mbt.core.model.IFragment;
import org.metaborg.mbt.core.model.IFragment.FragmentPiece;
import org.metaborg.mbt.core.model.expectations.MessageUtil;
import org.metaborg.mbt.core.run.IFragmentParserConfig;
import org.metaborg.spoofax.core.syntax.ISpoofaxSyntaxService;
import org.metaborg.spoofax.core.syntax.JSGLRParserConfiguration;
import org.metaborg.spoofax.core.unit.ISpoofaxInputUnit;
import org.metaborg.spoofax.core.unit.ISpoofaxInputUnitService;
import org.metaborg.spoofax.core.unit.ISpoofaxParseUnit;
import org.metaborg.spoofax.core.unit.ISpoofaxUnitService;
import org.metaborg.spoofax.core.unit.ParseContrib;
import org.metaborg.util.log.ILogger;
import org.metaborg.util.log.LoggerUtils;
import org.spoofax.interpreter.terms.IStrategoList;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.jsglr.client.imploder.IToken;
import org.spoofax.jsglr.client.imploder.ITokens;
import org.spoofax.jsglr.client.imploder.ImploderAttachment;
import org.spoofax.jsglr.client.imploder.ListImploderAttachment;
import org.spoofax.jsglr.client.imploder.Token;
import org.spoofax.jsglr.client.imploder.Tokenizer;
import org.spoofax.terms.util.TermUtils;
import org.spoofax.terms.visitor.AStrategoTermVisitor;
import org.spoofax.terms.visitor.StrategoTermVisitee;
import com.google.inject.Inject;
/**
* Parser for fragments languages.
*
* Ensures the correct offsets of the parse result by post processing the parse result and updating origins. Updating
* origins happens by updating the offsets of the tokens of the AST's tokenizer. This requires quite heavy knowledge of
* the Spoofax internals, so we could use an API for changing origin locations and not just querying them.
*/
public class SpoofaxOriginFragmentParser implements ISpoofaxFragmentParser {
private static final ILogger logger = LoggerUtils.logger(SpoofaxOriginFragmentParser.class);
private final ISpoofaxInputUnitService inputService;
private final ISpoofaxUnitService unitService;
private final ISpoofaxSyntaxService parseService;
@Inject public SpoofaxOriginFragmentParser(ISpoofaxInputUnitService inputService, ISpoofaxUnitService unitService,
ISpoofaxSyntaxService parseService) {
this.inputService = inputService;
this.unitService = unitService;
this.parseService = parseService;
}
@Override public ISpoofaxParseUnit parse(IFragment fragment, ILanguageImpl language,
@Nullable ILanguageImpl dialect, @Nullable ISpoofaxFragmentParserConfig config) throws ParseException {
// record the text of the fragment
final Iterable<FragmentPiece> fragmentPieces = fragment.getText();
StringBuilder textBuilder = new StringBuilder();
for(FragmentPiece piece : fragmentPieces) {
textBuilder.append(piece.text);
}
final String textStr = textBuilder.toString();
// now we can parse the fragment
final ISpoofaxInputUnit input;
JSGLRParserConfiguration pConfig = null;
if(config != null) {
pConfig = config.getParserConfigForLanguage(language);
}
if(pConfig == null) {
input = inputService.inputUnit(fragment.getResource(), textStr, language, dialect);
} else {
input = inputService.inputUnit(fragment.getResource(), textStr, language, dialect, pConfig);
}
ISpoofaxParseUnit p = parseService.parse(input);
// short circuit if there was no result
if(!p.valid()) {
return p;
}
IStrategoTerm ast = p.ast();
if(ast == null) {
return p;
}
// start changing the offsets by changing the offsets of the tokens
ITokens originalTokens = ImploderAttachment.getTokenizer(ast);
if(originalTokens == null) {
logger.warn("Found a fragment with no tokenizer! Can't update the offsets. \"{}\"", textStr);
return p;
}
// adjust the tokens for each piece of the fragment
// this makes NO assumptions about the order of the startOffsets of the token stream
// it DOES assume that the pieces of text of the fragment are ordered based on the correct order of text
Map<IToken, Integer> startOffsets = new HashMap<>(originalTokens.getTokenCount());
Map<IToken, Integer> endOffsets = new HashMap<>(originalTokens.getTokenCount());
IToken eof = null;
int currStartOffsetOfPiece = 0;
int currEndOffsetOfPiece = 0;
for(FragmentPiece piece : fragmentPieces) {
int pieceLength = piece.text.length();
currEndOffsetOfPiece = currStartOffsetOfPiece + pieceLength - 1;
int adjustment = piece.startOffset - currStartOffsetOfPiece;
for(IToken token : originalTokens.allTokens()) {
int startOffset = token.getStartOffset();
if(startOffset >= currStartOffsetOfPiece && startOffset <= currEndOffsetOfPiece) {
startOffsets.put(token, startOffset + adjustment);
endOffsets.put(token, token.getEndOffset() + adjustment);
}
if(token.getKind() == IToken.Kind.TK_EOF) {
eof = token;
}
}
currStartOffsetOfPiece += pieceLength;
}
// Only post process tokens when there are tokens, and when there is an end-of-file token.
if(!startOffsets.isEmpty() && eof != null) {
MappingTokenizer newTokenizer = new MappingTokenizer(originalTokens);
for(IToken token : originalTokens.allTokens()) {
if(token.getKind() == IToken.Kind.TK_EOF) {
int lastOffset = newTokenizer.tokens.get(newTokenizer.tokens.size() - 1).getEndOffset();
newTokenizer.addToken(lastOffset + 1, lastOffset, eof);
} else {
newTokenizer.addToken(
startOffsets.containsKey(token) ? startOffsets.get(token) : token.getStartOffset(),
endOffsets.containsKey(token) ? endOffsets.get(token) : token.getEndOffset(), token);
}
}
newTokenizer.overwriteAttachments(ast);
}
// now the offsets of the tokens are updated
// changing the state like this should update the offsets of the ast nodes automatically
// but next, we need to update the offsets of the parse messages
List<IMessage> changedMessages = new LinkedList<>();
for(IMessage m : p.messages()) {
ISourceRegion region = m.region();
if(region == null) {
continue;
}
int newStart = region.startOffset();
int newEnd = region.endOffset();
int offset = 0;
for(FragmentPiece piece : fragmentPieces) {
int startOffset = region.startOffset();
int pieceEndExclusive = offset + piece.text.length();
if(startOffset >= offset && startOffset < pieceEndExclusive) {
newStart = piece.startOffset + (startOffset - offset);
}
int endOffset = region.endOffset();
if(endOffset >= offset && endOffset < pieceEndExclusive) {
newEnd = piece.startOffset + (endOffset - offset);
}
offset += piece.text.length();
}
if(newStart != region.startOffset() || newEnd != region.endOffset()) {
ISourceRegion newRegion = new SourceRegion(newStart, newEnd);
changedMessages.add(MessageUtil.setRegion(m, newRegion));
}
}
return unitService.parseUnit(input,
new ParseContrib(p.valid(), p.success(), p.isAmbiguous(), p.ast(), changedMessages, p.duration()));
}
@Override public ISpoofaxParseUnit parse(IFragment fragment, ILanguageImpl language, ILanguageImpl dialect,
IFragmentParserConfig config) throws ParseException {
if(!(config instanceof ISpoofaxFragmentParserConfig)) {
return parse(fragment, language, dialect, (ISpoofaxFragmentParserConfig) null);
} else {
return parse(fragment, language, dialect, (ISpoofaxFragmentParserConfig) config);
}
}
private static class MappingTokenizer implements ITokens {
private final List<IToken> tokens = new ArrayList<>();
private final Map<IToken, IToken> oldToNewTokens = new HashMap<>();
private final Map<IToken, IToken> newToOldTokens = new HashMap<>();
private final String input;
private final String filename;
private MappingTokenizer(ITokens originalTokens) {
this.input = originalTokens.getInput();
this.filename = originalTokens.getFilename();
}
private void addToken(int startOffset, int endOffset, IToken originalToken) {
Token newToken = new MappedToken(this, startOffset, endOffset, originalToken);
newToken.setAstNode(originalToken.getAstNode());
tokens.add(newToken);
oldToNewTokens.put(originalToken, newToken);
newToOldTokens.put(newToken, originalToken);
}
private void overwriteAttachments(IStrategoTerm ast) {
StrategoTermVisitee.topdown(new AStrategoTermVisitor() {
@Override public boolean visit(IStrategoTerm term) {
updateImploderAttachment(term);
if(TermUtils.isList(term)) {
IStrategoList sublist = TermUtils.toList(term);
while(!sublist.isEmpty()) {
sublist = sublist.tail();
updateImploderAttachment(sublist);
}
}
return true;
}
}, ast);
}
private void updateImploderAttachment(IStrategoTerm term) {
ImploderAttachment originalAttachment = ImploderAttachment.get(term);
// For incremental parsing, the reused AST nodes already have updated ImploderAttachments with new
// MappedTokens. In this case, we should get the original token to index the oldToNewTokens Map,
// because the offsets might be updated since the previous version.
IToken leftToken = oldToNewTokens.get(originalAttachment.getLeftToken() instanceof MappedToken
? ((MappedToken) originalAttachment.getLeftToken()).originalToken : originalAttachment.getLeftToken());
IToken rightToken = oldToNewTokens.get(originalAttachment.getRightToken() instanceof MappedToken
? ((MappedToken) originalAttachment.getRightToken()).originalToken
: originalAttachment.getRightToken());
ImploderAttachment.putImploderAttachment(term, term instanceof ListImploderAttachment,
originalAttachment.getSort(), leftToken, rightToken, originalAttachment.isBracket(),
originalAttachment.isCompletion(), originalAttachment.isNestedCompletion(),
originalAttachment.isSinglePlaceholderCompletion());
ImploderAttachment newAttachment = ImploderAttachment.get(term);
originalAttachment.getInjections().forEach(newAttachment::pushInjection);
}
@Override public String getInput() {
return input;
}
@Override public int getTokenCount() {
return tokens.size();
}
@Override public IToken getTokenAtOffset(int offset) {
for(IToken token : tokens) {
if(token.getStartOffset() == offset)
return token;
}
return null;
}
@Override public String getFilename() {
return filename;
}
@Override public String toString(IToken left, IToken right) {
return toString(newToOldTokens.get(left).getStartOffset(), newToOldTokens.get(right).getEndOffset());
}
/**
* @param endOffset
* The end offset is inclusive.
*/
@Override public String toString(int startOffset, int endOffset) {
return input.substring(startOffset, endOffset + 1);
}
@Override public Iterator<IToken> iterator() {
return new Tokenizer.FilteredTokenIterator(allTokens());
}
@Override public Iterable<IToken> allTokens() {
return Collections.unmodifiableList(tokens);
}
}
private static class MappedToken extends Token {
private final IToken originalToken;
public MappedToken(ITokens tokens, int startOffset, int endOffset, IToken originalToken) {
super(tokens, tokens.getFilename(), -1, -1, -1, startOffset, endOffset, originalToken.getKind());
this.originalToken = originalToken;
}
}
}
| |
/*
* Copyright 2013 Chris Pheby
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jadira.scanner;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.jadira.scanner.classpath.filter.NameFilter;
import org.jadira.scanner.core.api.Filter;
import org.jadira.scanner.core.api.Locator;
import org.jadira.scanner.core.exception.ClasspathAccessException;
import org.jadira.scanner.core.utils.reflection.ClassLoaderUtils;
public class ConfigurationBuilder implements Configuration {
private List<URL> urls = new ArrayList<URL>();
private List<Locator<URL>> locators = new ArrayList<Locator<URL>>();
private List<ClassLoader> classLoaders = new ArrayList<ClassLoader>();
private List<Filter<?>> filters = new ArrayList<Filter<?>>();
public ConfigurationBuilder() {
ClassLoader[] cls = ClassLoaderUtils.getClassLoaders();
for (ClassLoader next : cls) {
classLoaders.add(next);
}
}
public static ConfigurationBuilder build(final Object... params) {
ConfigurationBuilder builder = new ConfigurationBuilder();
List<Object> paramsList = flattenParams(params);
List<ClassLoader> cLoaders = new ArrayList<ClassLoader>();
for (Object param : paramsList) {
if (param instanceof ClassLoader) {
cLoaders.add((ClassLoader) param);
} else if (param instanceof URL) {
builder.addUrls((URL) param);
} else if (param instanceof Locator) {
@SuppressWarnings("unchecked") final Locator<URL>[] myParams = new Locator[] {(Locator<URL>)param};
builder.addLocators(myParams);
} else if (param instanceof Filter) {
builder.addFilters((Filter<?>) param);
} else if (param instanceof String) {
builder.addFilters(new NameFilter((String)param));
} else {
throw new ClasspathAccessException("Could not handle builder parameter " + param.toString());
}
}
builder.setClassLoaders(cLoaders);
return builder;
}
private static List<Object> flattenParams(final Object... params) {
List<Object> paramsList = new ArrayList<Object>(params.length);
if (params != null) {
for (Object param : params) {
if (param != null) {
if (param.getClass().isArray()) {
for (Object nextEntry : (Object[]) param) {
if (nextEntry != null) {
paramsList.add(nextEntry);
}
}
}
else if (param instanceof Iterable) {
for (Object nextEntry : (Iterable<?>) param) {
if (nextEntry != null) {
paramsList.add(nextEntry); }
}
}
else {
paramsList.add(param);
}
}
}
}
return paramsList;
}
public Scanner build() {
return new Scanner(this);
}
@Override
public List<URL> getUrls() {
return urls;
}
ConfigurationBuilder setUrls(final Collection<URL> urls) {
this.urls = new ArrayList<URL>(urls);
return this;
}
ConfigurationBuilder setUrls(final URL... urls) {
this.urls = new ArrayList<URL>(urls.length);
for(URL next : urls) {
this.urls.add(next);
}
return this;
}
ConfigurationBuilder addUrls(final Collection<URL> urls) {
this.urls.addAll(urls);
return this;
}
ConfigurationBuilder addUrls(final URL... urls) {
for(URL next : urls) {
this.urls.add(next);
}
return this;
}
@Override
public List<Locator<URL>> getLocators() {
return locators;
}
ConfigurationBuilder setLocators(final Collection<Locator<URL>> locators) {
this.locators = new ArrayList<Locator<URL>>(locators);
return this;
}
ConfigurationBuilder setLocators(final Locator<URL>... locators) {
this.locators = new ArrayList<Locator<URL>>(locators.length);
for (Locator<URL> next : locators) {
this.locators.add(next);
}
return this;
}
ConfigurationBuilder addLocators(final Collection<Locator<URL>> locators) {
this.locators.addAll(locators);
return this;
}
ConfigurationBuilder addLocators(final Locator<URL>... locators) {
for (Locator<URL> next : locators) {
this.locators.add(next);
}
return this;
}
@Override
public List<ClassLoader> getClassLoaders() {
return classLoaders;
}
ConfigurationBuilder setClassLoaders(final Collection<ClassLoader> classLoaders) {
this.classLoaders = new ArrayList<ClassLoader>(classLoaders);
return this;
}
ConfigurationBuilder setClassLoaders(final ClassLoader... classLoaders) {
this.classLoaders = new ArrayList<ClassLoader>(classLoaders.length);
for (ClassLoader next : classLoaders) {
this.classLoaders.add(next);
}
return this;
}
ConfigurationBuilder addClassLoaders(final Collection<ClassLoader> classLoaders) {
this.classLoaders.addAll(classLoaders);
return this;
}
ConfigurationBuilder addClassLoaders(final ClassLoader... classLoaders) {
for (ClassLoader next : classLoaders) {
this.classLoaders.add(next);
}
return this;
}
@Override
public List<Filter<?>> getFilters() {
return filters;
}
ConfigurationBuilder setFilters(final List<Filter<?>> filters) {
this.filters = new ArrayList<Filter<?>>(filters);
return this;
}
ConfigurationBuilder setFilters(final Filter<?>... filters) {
this.filters = new ArrayList<Filter<?>>(filters.length);
for (Filter<?> next : filters) {
this.filters.add(next);
}
return this;
}
ConfigurationBuilder addFilters(final List<Filter<?>> filters) {
this.filters.addAll(filters);
return this;
}
ConfigurationBuilder addFilters(final Filter<?>... filters) {
for (Filter<?> next : filters) {
this.filters.add(next);
}
return this;
}
}
| |
/*
* Copyright (c) 2013 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.enzo.qlocktwo;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by
* User: hansolo
* Date: 28.02.13
* Time: 07:53
*/
public class QlockSpanish implements Qlock {
private static final QlockTwo.Language LANGUAGE = QlockTwo.Language.SPANISH;
private static final String[][] MATRIX = {
{"E", "S", "O", "N", "E", "L", "A", "S", "U", "N", "A"},
{"D", "O", "S", "I", "T", "R", "E", "S", "O", "R", "E"},
{"C", "U", "A", "T", "R", "O", "C", "I", "N", "C", "O"},
{"S", "E", "I", "S", "A", "S", "I", "E", "T", "E", "N"},
{"O", "C", "H", "O", "N", "U", "E", "V", "E", "Y", "O"},
{"L", "A", "D", "I", "E", "Z", "S", "O", "N", "C", "E"},
{"D", "O", "C", "E", "L", "Y", "M", "E", "N", "O", "S"},
{"O", "V", "E", "I", "N", "T", "E", "D", "I", "E", "Z"},
{"V", "E", "I", "N", "T", "I", "C", "I", "N", "C", "O"},
{"M", "E", "D", "I", "A", "C", "U", "A", "R", "T", "O"}
};
private final ConcurrentHashMap<Integer, String> LOOKUP;
private List<QlockWord> timeList;
public QlockSpanish() {
LOOKUP = new ConcurrentHashMap<>();
LOOKUP.putAll(QlockTwo.Language.SPANISH.getLookup());
timeList = new ArrayList<>(10);
}
@Override public String[][] getMatrix() {
return MATRIX;
}
@Override public List<QlockWord> getTime(int minute, int hour) {
if (hour > 12) {
hour -= 12;
}
if (hour <= 0) {
hour += 12;
}
if (minute > 60) {
minute -= 60;
hour++;
}
if (minute < 0) {
minute += 60;
hour--;
}
minute -= minute%5;
timeList.clear();
switch (minute)
{
case 0:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
break;
case 5:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
timeList.add(QlockLanguage.Y);
timeList.add(QlockLanguage.CINCO1);
break;
case 10:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
timeList.add(QlockLanguage.Y);
timeList.add(QlockLanguage.DIEZ1);
break;
case 15:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
timeList.add(QlockLanguage.Y);
timeList.add(QlockLanguage.CUARTO);
break;
case 20:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
timeList.add(QlockLanguage.Y);
timeList.add(QlockLanguage.VEINTE);
break;
case 25:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
timeList.add(QlockLanguage.Y);
timeList.add(QlockLanguage.VEINTICINCO);
break;
case 30:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.valueOf(LOOKUP.get(hour)));
timeList.add(QlockLanguage.Y);
timeList.add(QlockLanguage.MEDIA);
break;
case 35:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.MENOS);
timeList.add(QlockLanguage.VEINTICINCO);
addHour(timeList, hour);
break;
case 40:
timeList.add(hour == 1 ? QlockLanguage.ES : QlockLanguage.SON);
timeList.add(hour == 1 ? QlockLanguage.LA : QlockLanguage.LAS);
timeList.add(QlockLanguage.MENOS);
timeList.add(QlockLanguage.VEINTE);
addHour(timeList, hour);
break;
case 45:
timeList.add(QlockLanguage.SON);
timeList.add(QlockLanguage.LAS);
timeList.add(QlockLanguage.MENOS);
timeList.add(QlockLanguage.CUARTO);
addHour(timeList, hour);
break;
case 50:
// ES LA UNA MENOS DIEZ
timeList.add(QlockLanguage.SON);
timeList.add(QlockLanguage.LAS);
timeList.add(QlockLanguage.MENOS);
timeList.add(QlockLanguage.DIEZ1);
addHour(timeList, hour);
break;
case 55:
timeList.add(QlockLanguage.SON);
timeList.add(QlockLanguage.LAS);
timeList.add(QlockLanguage.MENOS);
timeList.add(QlockLanguage.CINCO1);
addHour(timeList, hour);
break;
}
return timeList;
}
@Override public QlockTwo.Language getLanguage() {
return LANGUAGE;
}
private void addHour(List<QlockWord> timeList, final int HOUR) {
if (HOUR == 12) {
timeList.add(QlockLanguage.UNA);
} else if (HOUR == 10) {
timeList.add(QlockLanguage.DIEZ1);
} else {
timeList.add(QlockLanguage.valueOf(LOOKUP.get(HOUR + 1)));
}
}
private enum QlockLanguage implements QlockWord {
UNA(0, 8, 10),
DOS(1, 0, 2),
TRES(1, 4, 7),
CUATRO(2, 0, 5),
CINCO(2, 6, 10),
CINCO1(8, 6, 10),
SEIS(3, 0, 3),
SIETE(3, 5, 9),
OCHO(4, 0, 3),
NUEVE(4, 4, 8),
DIEZ(5, 2, 5),
DIEZ1(7, 7, 10),
ONCE(7, 5, 10),
DOCE(6, 0, 3),
SON(0, 1, 3),
ES(0, 0, 1),
LA(0, 5, 6),
LAS(0, 5, 7),
Y(6, 5, 5),
MENOS(6, 6, 10),
CUARTO(9, 5, 10),
VEINTE(7, 1, 6),
VEINTICINCO(8, 0, 10),
MEDIA(9, 0, 4);
private final int ROW;
private final int START;
private final int STOP;
private QlockLanguage(final int ROW, final int START, final int STOP) {
this.ROW = ROW;
this.START = START;
this.STOP = STOP;
}
@Override public int getRow() {
return ROW;
}
@Override public int getStart() {
return START;
}
@Override public int getStop() {
return STOP;
}
}
}
| |
package com.mgatelabs.swftools.exploit.gui;
import com.mgatelabs.swftools.exploit.control.AppControl;
import com.mgatelabs.swftools.exploit.j2d.DataCategory;
import com.mgatelabs.swftools.exploit.j2d.J2DRenderSelectionPanel;
import com.mgatelabs.swftools.support.base.Base16;
import com.mgatelabs.swftools.support.filters.SimpleFileFilter;
import com.mgatelabs.swftools.support.loaders.BasicClassLoader;
import com.mgatelabs.swftools.support.plugins.Plugin;
import com.mgatelabs.swftools.support.plugins.PluginInfo;
import com.mgatelabs.swftools.support.tools.BrowserControl;
import com.mgatelabs.swftools.support.tools.FolderHelper;
import com.mgatelabs.swftools.support.tools.JPanelMaker;
import com.mgatelabs.swftools.support.xml.XMLHash;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.FileFilter;
import java.security.MessageDigest;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
// The All Mights Plugin Screen
public class GuiPluginDialog extends GuiInternal {
//Variables
private GuiPluginDialog mySelf;
private PluginStorage myPlugins;
private JButton bypass, buttonWebsite;
private J2DRenderSelectionPanel mySelectionPanel;
private boolean ready;
private static final int[] categories = {1, 3};
private boolean isDebug;
// Constructor
public GuiPluginDialog(JFrame master) {
super("Startup Configuration", false, false, false, false);
this.setFrameIcon(new ImageIcon(this.getClass().getResource("/com/mgatelabs/swftools/exploit/gui/images/flash.png")));
ready = false;
mySelf = this;
this.isDebug = AppControl.getInfo().getDebug();
if (isDebug) {
System.out.println("Plugin Dialog: Active");
}
myPlugins = new PluginStorage();
buildControls();
setupPluginLists();
this.getContentPane().add(buildGui());
this.pack();
this.show();
}
// Get the Plugins
public PluginStorage getPlugins() {
return myPlugins;
}
public boolean ready() {
return ready;
}
// Load Plugin Info
public void setupPluginLists() {
mySelectionPanel = new J2DRenderSelectionPanel();
mySelectionPanel.addCategory("Conversion", 1, 1);
mySelectionPanel.addCategory("Block Editor", 3, -1);
File directory = new File("plugins/");
FileFilter ff = new SimpleFileFilter(".xml");
File[] files = directory.listFiles(ff);
for (int x = 0; x < files.length; x++) {
PluginInfo pi = new PluginInfo(files[x], false);
mySelectionPanel.addToCategory(pi);
}
////C:\Users\Michael Fuller\Work\SWFTools-Core\src\com\mgatelabs\swftools\support\plugins\BSetBackGroundColor.xml
String[] internal = new String[]{
"AdvancedConverter.xml",
"BAddHidden.xml",
"BasicConverter.xml",
"BEditID.xml",
"BEditMetaData.xml",
"BEditPlaceObject.xml",
"BInsertEnd.xml",
"BInsertShow.xml",
"Block_EditTest.xml",
"BSetBackGroundColor.xml"
};
String path = "com.mgatelabs.swftools.support.plugins.";
String location = "/com/mgatelabs/swftools/support/plugins/";
for (String xmlName : internal) {
PluginInfo pi = new PluginInfo(path, this.getClass().getResourceAsStream(location + xmlName), true);
mySelectionPanel.addToCategory(pi);
}
load();
}
// Controls ///////////////////////////////////////////////////////////////
private void buildControls() {
bypass = new JButton("Start Program", new ImageIcon(this.getClass().getResource("/com/mgatelabs/swftools/exploit/gui/images/antimoney.png")));
buttonWebsite = new JButton("M-Gate Labs");
buttonWebsite.setToolTipText("Click here to visit M-Gate Labs.");
ExploitActionListener myExploitActionListener = new ExploitActionListener();
bypass.addActionListener(myExploitActionListener);
buttonWebsite.addActionListener(myExploitActionListener);
}
// Build the Gui //////////////////////////////////////////////////////////
public JPanel buildGui() {
JPanel master = JPanelMaker.XPanel(640, 480);
master.setBackground(Color.WHITE);
master.add(buildLeftPanel());
master.add(buildRightPanel());
return master;
}
public JPanel buildLeftPanel() {
JPanel master = new JPanel();
JPanelMaker.setPanelWidth(master, 250, 480);
master.setBackground(Color.WHITE);
master.add(buildImagePanel());
master.add(buildLoginPanel());
return master;
}
private JPanel buildImagePanel() {
JPanel master = JPanelMaker.YPanel(240, 100);
master.setBackground(Color.WHITE);
master.add(new JLabel(new ImageIcon(this.getClass().getResource("/com/mgatelabs/swftools/images/startup.png"))));
return master;
}
private JPanel buildLoginPanel() {
JPanel master = JPanelMaker.YPanel(240, 175);
master.setBackground(Color.WHITE);
// Button Panel 1
JPanel buttonPanel = JPanelMaker.XPanel(240, 25);
buttonPanel.setBackground(Color.WHITE);
buttonPanel.add(Box.createHorizontalStrut(10));
buttonPanel.add(Box.createHorizontalStrut(4));
buttonPanel.add(bypass);
buttonPanel.add(Box.createHorizontalGlue());
buttonPanel.add(Box.createHorizontalStrut(10));
master.add(Box.createVerticalStrut(10));
master.add(buttonPanel);
// Button Panel 2
buttonPanel = JPanelMaker.XPanel(240, 25);
buttonPanel.setBackground(Color.WHITE);
buttonPanel.add(Box.createHorizontalStrut(10));
buttonPanel.add(buttonWebsite);
buttonPanel.add(Box.createHorizontalGlue());
buttonPanel.add(Box.createHorizontalStrut(10));
master.add(Box.createVerticalStrut(10));
master.add(buttonPanel);
return master;
}
public JPanel buildRightPanel() {
JPanel master = JPanelMaker.YPanel(640 - 250, 480);
master.add(mySelectionPanel);
return master;
}
public JPanel buildLayoutPanel(Component a, Component b, int fullwidth, int widthpart, int height) {
return buildLayoutPanel(a, b, fullwidth, widthpart, height, null);
}
public JPanel buildLayoutPanel(Component a, Component b, int fullwidth, int widthpart, int height, Color back) {
JPanel master = JPanelMaker.XPanel(fullwidth, height); // 630
JPanel p1 = JPanelMaker.XPanel(widthpart, height);
JPanel p2 = JPanelMaker.XPanel(fullwidth - widthpart, height);
if (back != null) {
master.setBackground(back);
p1.setBackground(back);
p2.setBackground(back);
}
p1.add(a);
p2.add(b);
p2.add(Box.createHorizontalStrut(5));
master.add(p1);
master.add(p2);
master.add(Box.createHorizontalStrut(5));
return master;
}
private void loginProcess(boolean full) {
Hashtable myHash = null;
if (isDebug) {
System.out.println("Login: Starting");
}
if (!mySelectionPanel.ready()) {
JOptionPane.showMessageDialog(mySelf, "A required plugin has not been selected.", "Error", JOptionPane.ERROR_MESSAGE);
return;
}
if (isDebug) {
System.out.println("Login: Skipping");
}
if (loadPlugins(myHash)) {
ready = true;
save();
mySelf.dispose();
}
}
// Plugin Loader //////////////////////////////////////////////////////////
private boolean loadPlugins(Hashtable hash) {
try {
for (int x = 0; x < categories.length; x++) {
DataCategory dc = mySelectionPanel.getCategory(categories[x]); // Conversion
if (dc.getData().size() == 0) {
continue;
}
for (int y = 0; y < dc.getData().size(); y++) {
PluginInfo pi = (PluginInfo) dc.getData().get(y);
Plugin pt = null;
if (dc.isSelection(pi.id)) {
File target = new File("plugins/" + pi.fileName);
if (isDebug) {
System.out.println("Loading Plugin: " + target.toString());
}
if (pi.encrypted == false)
{
try {
// Old Class Loader Code
BasicClassLoader bcl = new BasicClassLoader();
if (pi.isPackaged()) {
pt = (Plugin) bcl.findPackagedClass(pi.className, pi.className).newInstance();
} else {
pt = (Plugin) bcl.findClass("plugins." + pi.className, target).newInstance();
}
} catch (Exception ert) {
JOptionPane.showMessageDialog(mySelf, "Error: Plugin Loader ->" + ert.getMessage() + ": " + ert.getCause(), "Plugin Error", JOptionPane.ERROR_MESSAGE);
return false;
}
}
}
if (pt != null) {
myPlugins.setPlugin(pt, categories[x]);
}
}
}
return true;
} catch (Exception e) {
System.out.println("\"" + e + "\"");
return false;
}
}
// Form Data I/O
private void save() {
Hashtable myTable = new Hashtable();
Vector myCategories = mySelectionPanel.getCategories();
for (int x = 0; x < myCategories.size(); x++) {
DataCategory dc = (DataCategory) myCategories.get(x);
for (int y = 0; y < dc.getData().size(); y++) {
PluginInfo pi = (PluginInfo) dc.getData().get(y);
if (dc.isSelection(pi.id)) {
myTable.put("!" + pi.id, "" + dc.getType());
}
}
}
myTable.put("ok", "yeah");
XMLHash myHash = new XMLHash();
myHash.write(FolderHelper.getAppPath("last.xml"), myTable);
}
// Data Loading
private void load() {
XMLHash myHash = new XMLHash();
Hashtable myTable = myHash.read(FolderHelper.getAppPath("last.xml"));
DataCategory dc = null;//
// Conversion
Enumeration eKeys = myTable.keys();
while (eKeys.hasMoreElements()) {
String key = (String) eKeys.nextElement();
if (key.startsWith("!")) // Check for Plugins
{
key = key.substring(1);
// Get Plugin Category
String cat = (String) myTable.get("!" + key);
try {
int iKey = Integer.parseInt(key);
int iCat = Integer.parseInt(cat);
// Try to get the Key
dc = mySelectionPanel.getCategory(iCat);
if (dc != null) {
dc.addSelection(iKey);
}
} catch (Exception ex) {
System.out.println("load: " + ex);
}
}
}
if (dc != null) {
if (myTable.get("1") != null) {
dc.setupFor(Integer.parseInt((String) myTable.get("1")));
}
}
if (myTable.get("ok") == null) {
JOptionPane.showMessageDialog(mySelf, "Welcome to Flash Exploit. To start using\nthe program you must select a conversion plugin.", "Welcome", JOptionPane.INFORMATION_MESSAGE);
}
}
// Encryption Functions
public String getMD5Hash(String value) {
String result = "";
try {
MessageDigest md = MessageDigest.getInstance("MD5");
md.update(value.getBytes());
byte[] toChapter1Digest = md.digest();
result = getString(toChapter1Digest);
} catch (Exception cnse) {
System.out.println(cnse);
}
return result;
}
private String getString(byte[] bytes) {
Base16 b16 = new Base16();
bytes = b16.encode(bytes);
return new String(bytes);
}
// Listen for button clicks ///////////////////////////////////////////////
private class ExploitActionListener implements ActionListener {
public void actionPerformed(ActionEvent e) {
//System.out.println("click");
if (e.getSource() == bypass) {
loginProcess(false);
} else if (e.getSource() == buttonWebsite) {
BrowserControl.displayURL("http://www.MGateLabs.com");
}
// else if (e.getSource() == buttonBlock)
// {
//
// }
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.integration;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.errors.RetriableException;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.apache.kafka.connect.storage.StringConverter;
import org.apache.kafka.connect.transforms.Transformation;
import org.apache.kafka.connect.util.clusters.EmbeddedConnectCluster;
import org.apache.kafka.test.IntegrationTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.apache.kafka.connect.runtime.ConnectorConfig.CONNECTOR_CLASS_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.ERRORS_LOG_ENABLE_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.ERRORS_LOG_INCLUDE_MESSAGES_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.ERRORS_RETRY_TIMEOUT_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.ERRORS_TOLERANCE_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.KEY_CONVERTER_CLASS_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.TASKS_MAX_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.TRANSFORMS_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.VALUE_CONVERTER_CLASS_CONFIG;
import static org.apache.kafka.connect.runtime.SinkConnectorConfig.DLQ_CONTEXT_HEADERS_ENABLE_CONFIG;
import static org.apache.kafka.connect.runtime.SinkConnectorConfig.DLQ_TOPIC_NAME_CONFIG;
import static org.apache.kafka.connect.runtime.SinkConnectorConfig.DLQ_TOPIC_REPLICATION_FACTOR_CONFIG;
import static org.apache.kafka.connect.runtime.SinkConnectorConfig.TOPICS_CONFIG;
import static org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter.ERROR_HEADER_EXCEPTION;
import static org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter.ERROR_HEADER_EXCEPTION_MESSAGE;
import static org.apache.kafka.connect.runtime.errors.DeadLetterQueueReporter.ERROR_HEADER_ORIG_TOPIC;
import static org.apache.kafka.test.TestUtils.waitForCondition;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Integration test for the different error handling policies in Connect (namely, retry policies, skipping bad records,
* and dead letter queues).
*/
@Category(IntegrationTest.class)
public class ErrorHandlingIntegrationTest {
private static final Logger log = LoggerFactory.getLogger(ErrorHandlingIntegrationTest.class);
private static final int NUM_WORKERS = 1;
private static final String DLQ_TOPIC = "my-connector-errors";
private static final String CONNECTOR_NAME = "error-conn";
private static final String TASK_ID = "error-conn-0";
private static final int NUM_RECORDS_PRODUCED = 20;
private static final int EXPECTED_CORRECT_RECORDS = 19;
private static final int EXPECTED_INCORRECT_RECORDS = 1;
private static final int NUM_TASKS = 1;
private static final long CONNECTOR_SETUP_DURATION_MS = TimeUnit.SECONDS.toMillis(60);
private static final long CONSUME_MAX_DURATION_MS = TimeUnit.SECONDS.toMillis(30);
private EmbeddedConnectCluster connect;
private ConnectorHandle connectorHandle;
@Before
public void setup() throws InterruptedException {
// setup Connect cluster with defaults
connect = new EmbeddedConnectCluster.Builder().build();
// start Connect cluster
connect.start();
connect.assertions().assertAtLeastNumWorkersAreUp(NUM_WORKERS,
"Initial group of workers did not start in time.");
// get connector handles before starting test.
connectorHandle = RuntimeHandles.get().connectorHandle(CONNECTOR_NAME);
}
@After
public void close() {
RuntimeHandles.get().deleteConnector(CONNECTOR_NAME);
connect.stop();
}
@Test
public void testSkipRetryAndDLQWithHeaders() throws Exception {
// create test topic
connect.kafka().createTopic("test-topic");
// setup connector config
Map<String, String> props = new HashMap<>();
props.put(CONNECTOR_CLASS_CONFIG, MonitorableSinkConnector.class.getSimpleName());
props.put(TASKS_MAX_CONFIG, String.valueOf(NUM_TASKS));
props.put(TOPICS_CONFIG, "test-topic");
props.put(KEY_CONVERTER_CLASS_CONFIG, StringConverter.class.getName());
props.put(VALUE_CONVERTER_CLASS_CONFIG, StringConverter.class.getName());
props.put(TRANSFORMS_CONFIG, "failing_transform");
props.put("transforms.failing_transform.type", FaultyPassthrough.class.getName());
// log all errors, along with message metadata
props.put(ERRORS_LOG_ENABLE_CONFIG, "true");
props.put(ERRORS_LOG_INCLUDE_MESSAGES_CONFIG, "true");
// produce bad messages into dead letter queue
props.put(DLQ_TOPIC_NAME_CONFIG, DLQ_TOPIC);
props.put(DLQ_CONTEXT_HEADERS_ENABLE_CONFIG, "true");
props.put(DLQ_TOPIC_REPLICATION_FACTOR_CONFIG, "1");
// tolerate all erros
props.put(ERRORS_TOLERANCE_CONFIG, "all");
// retry for up to one second
props.put(ERRORS_RETRY_TIMEOUT_CONFIG, "1000");
// set expected records to successfully reach the task
connectorHandle.taskHandle(TASK_ID).expectedRecords(EXPECTED_CORRECT_RECORDS);
connect.configureConnector(CONNECTOR_NAME, props);
connect.assertions().assertConnectorAndAtLeastNumTasksAreRunning(CONNECTOR_NAME, NUM_TASKS,
"Connector tasks did not start in time.");
waitForCondition(this::checkForPartitionAssignment,
CONNECTOR_SETUP_DURATION_MS,
"Connector task was not assigned a partition.");
// produce some strings into test topic
for (int i = 0; i < NUM_RECORDS_PRODUCED; i++) {
connect.kafka().produce("test-topic", "key-" + i, "value-" + i);
}
// consume all records from test topic
log.info("Consuming records from test topic");
int i = 0;
for (ConsumerRecord<byte[], byte[]> rec : connect.kafka().consume(NUM_RECORDS_PRODUCED, CONSUME_MAX_DURATION_MS, "test-topic")) {
String k = new String(rec.key());
String v = new String(rec.value());
log.debug("Consumed record (key='{}', value='{}') from topic {}", k, v, rec.topic());
assertEquals("Unexpected key", k, "key-" + i);
assertEquals("Unexpected value", v, "value-" + i);
i++;
}
// wait for records to reach the task
connectorHandle.taskHandle(TASK_ID).awaitRecords(CONSUME_MAX_DURATION_MS);
// consume failed records from dead letter queue topic
log.info("Consuming records from test topic");
ConsumerRecords<byte[], byte[]> messages = connect.kafka().consume(EXPECTED_INCORRECT_RECORDS, CONSUME_MAX_DURATION_MS, DLQ_TOPIC);
for (ConsumerRecord<byte[], byte[]> recs : messages) {
log.debug("Consumed record (key={}, value={}) from dead letter queue topic {}",
new String(recs.key()), new String(recs.value()), DLQ_TOPIC);
assertTrue(recs.headers().toArray().length > 0);
assertValue("test-topic", recs.headers(), ERROR_HEADER_ORIG_TOPIC);
assertValue(RetriableException.class.getName(), recs.headers(), ERROR_HEADER_EXCEPTION);
assertValue("Error when value='value-7'", recs.headers(), ERROR_HEADER_EXCEPTION_MESSAGE);
}
connect.deleteConnector(CONNECTOR_NAME);
connect.assertions().assertConnectorAndTasksAreStopped(CONNECTOR_NAME,
"Connector tasks did not stop in time.");
}
/**
* Check if a partition was assigned to each task. This method swallows exceptions since it is invoked from a
* {@link org.apache.kafka.test.TestUtils#waitForCondition} that will throw an error if this method continued
* to return false after the specified duration has elapsed.
*
* @return true if each task was assigned a partition each, false if this was not true or an error occurred when
* executing this operation.
*/
private boolean checkForPartitionAssignment() {
try {
ConnectorStateInfo info = connect.connectorStatus(CONNECTOR_NAME);
return info != null && info.tasks().size() == NUM_TASKS
&& connectorHandle.taskHandle(TASK_ID).partitionsAssigned() == 1;
} catch (Exception e) {
// Log the exception and return that the partitions were not assigned
log.error("Could not check connector state info.", e);
return false;
}
}
private void assertValue(String expected, Headers headers, String headerKey) {
byte[] actual = headers.lastHeader(headerKey).value();
if (expected == null && actual == null) {
return;
}
if (expected == null || actual == null) {
fail();
}
assertEquals(expected, new String(actual));
}
public static class FaultyPassthrough<R extends ConnectRecord<R>> implements Transformation<R> {
static final ConfigDef CONFIG_DEF = new ConfigDef();
/**
* An arbitrary id which causes this transformation to fail with a {@link RetriableException}, but succeeds
* on subsequent attempt.
*/
static final int BAD_RECORD_VAL_RETRIABLE = 4;
/**
* An arbitrary id which causes this transformation to fail with a {@link RetriableException}.
*/
static final int BAD_RECORD_VAL = 7;
private boolean shouldFail = true;
@Override
public R apply(R record) {
String badValRetriable = "value-" + BAD_RECORD_VAL_RETRIABLE;
if (badValRetriable.equals(record.value()) && shouldFail) {
shouldFail = false;
throw new RetriableException("Error when value='" + badValRetriable
+ "'. A reattempt with this record will succeed.");
}
String badVal = "value-" + BAD_RECORD_VAL;
if (badVal.equals(record.value())) {
throw new RetriableException("Error when value='" + badVal + "'");
}
return record;
}
@Override
public ConfigDef config() {
return CONFIG_DEF;
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> configs) {
}
}
}
| |
/**
*
* Copyright (c) 2017 ytk-learn https://github.com/yuantiku
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.fenbi.ytklearn.optimizer;
import com.fenbi.ytklearn.dataflow.FFMModelDataFlow;
import com.fenbi.ytklearn.dataflow.ContinuousDataFlow;
/**
* @author xialong
*/
public class FFMHoagOptimizer extends HoagOptimizer {
private final int firstOrderIndexStart;
private final int secondOrderIndexStart;
private final int[]K;
private final int sok;
private final boolean needFirstOrder;
private final boolean needSecondOrder;
private final boolean biasNeedLatentFactor;
private final int fieldSize;
private final float[] assist;
public FFMHoagOptimizer(String modelName,
ContinuousDataFlow dataFlow,
int threadIdx) throws Exception {
super(modelName, dataFlow, threadIdx);
FFMModelDataFlow ffmModelDataFlow = (FFMModelDataFlow)dataFlow;
firstOrderIndexStart = ffmModelDataFlow.firstOrderIndexStart();
secondOrderIndexStart = ffmModelDataFlow.secondOrderIndexStart();
K = ffmModelDataFlow.getK();
sok = K[1];
needFirstOrder = ffmModelDataFlow.isNeedFirstOrder();
needSecondOrder = ffmModelDataFlow.isNeedSecondOrder();
biasNeedLatentFactor = ffmModelDataFlow.isBiasNeedLatentFactor();
fieldSize = ffmModelDataFlow.getFieldSize();
// if (needSecondOrder) {
// l2[1] /= (sok * fieldSize);
// l1[1] /= (sok * fieldSize);
// }
int maxFeatureNum = ffmModelDataFlow.getMaxFeatureNum();
assist = new float[sok * fieldSize * (maxFeatureNum + 1)];
}
@Override
public int[] getRegularStart() {
int []start = new int[2];
start[0] = firstOrderIndexStart;
start[1] = secondOrderIndexStart;
return start;
}
@Override
public int[] getRegularEnd() {
int []end = new int[2];
end[0] = secondOrderIndexStart;
end[1] = dim;
return end;
}
@Override
public double calcPureLossAndGrad(float[] w, float[] g, int iter) {
double loss = 0.0;
for (int i = 0; i < dim; i++) {
g[i] = 0.0f;
}
int stride = sok * fieldSize;
for (int k = 0; k < threadTrainCoreData.cursor2d; k++) {
int lsNumInt = realNum[k];
for (int i = 0; i < lsNumInt; i++) {
double wei = weight[k][i];
double wx = 0.0;
int cidx = 0;
for (int j = xidx[k][i]; j < xidx[k][i + 1]; j+=3) {
if (x[k][j] == -1) {
continue;
}
double fval = Float.intBitsToFloat(x[k][j+1]);
wx += w[x[k][j]] * fval;
int idx = secondOrderIndexStart + x[k][j] * stride;
System.arraycopy(w, idx, assist, cidx * stride, stride);
cidx ++;
}
double fx = 0.0;
int pidx = 0;
for (int p = xidx[k][i]; p < xidx[k][i + 1]; p += 3) {
double pval = Float.intBitsToFloat(x[k][p + 1]);
int pfieldstart = x[k][p + 2] * sok;
int qidx = pidx + 1;
int pstartIdx = pidx * stride;
for (int q = p + 3; q < xidx[k][i + 1]; q += 3) {
double qval = Float.intBitsToFloat(x[k][q + 1]);
int qfieldstart = x[k][q + 2] * sok;
int qstartIdx = qidx * stride;
double wTw = 0.0;
for (int f = 0; f < sok; f++) {
wTw += assist[pstartIdx + qfieldstart + f] * assist[qstartIdx + pfieldstart + f];
}
wTw *= pval * qval;
fx += wTw;
qidx ++;
}
pidx ++;
}
fx += wx;
loss += wei * lossFunction.loss(fx, y[k][i]);
predict[k][i] = (float) lossFunction.predict(fx);
// grad
double gradscalar = wei * lossFunction.firstDerivative(fx, y[k][i]);
for (int j = xidx[k][i]; j < xidx[k][i + 1]; j+=3) {
double fval = Float.intBitsToFloat(x[k][j+1]);
g[x[k][j]] += gradscalar * fval;
}
pidx = 0;
for (int p = xidx[k][i]; p < xidx[k][i + 1]; p += 3) {
double pval = Float.intBitsToFloat(x[k][p + 1]);
int pfieldstart = x[k][p + 2] * sok;
int qidx = pidx + 1;
//int pstartIdx = pidx * stride;
int gidx = secondOrderIndexStart + x[k][p] * stride;
for (int q = p + 3; q < xidx[k][i + 1]; q += 3) {
double qval = Float.intBitsToFloat(x[k][q + 1]);
int qfieldstart = x[k][q + 2] * sok;
int qstartIdx = qidx * stride;
for (int f = 0; f < sok; f++) {
g[gidx + qfieldstart + f] += gradscalar * assist[qstartIdx + pfieldstart + f] * pval * qval;
}
qidx ++;
}
pidx ++;
}
int qidx = 1;
for (int q = xidx[k][i] + 3; q < xidx[k][i + 1]; q += 3) {
double qval = Float.intBitsToFloat(x[k][q + 1]);
int qfieldstart = x[k][q + 2] * sok;
pidx = qidx - 1;
//int pstartIdx = pidx * stride;
int gidx = secondOrderIndexStart + x[k][q] * stride;
for (int p = q - 3; p >= xidx[k][i]; p -= 3) {
double pval = Float.intBitsToFloat(x[k][p + 1]);
int pfieldstart = x[k][p + 2] * sok;
int pstartIdx = pidx * stride;
for (int f = 0; f < sok; f++) {
g[gidx + pfieldstart + f] += gradscalar * assist[pstartIdx + qfieldstart + f] * pval * qval;
}
pidx --;
}
qidx ++;
}
}
}
if (!needFirstOrder) {
for (int i = firstOrderIndexStart; i < secondOrderIndexStart; i++) {
g[i] = 0.0f;
}
}
if (!needSecondOrder) {
for (int i = secondOrderIndexStart; i < g.length; i++) {
g[i] = 0.0f;
}
}
if (!biasNeedLatentFactor && needSecondOrder && modelParams.need_bias) {
for (int i = secondOrderIndexStart; i < secondOrderIndexStart + stride; i++) {
g[i] = 0.0f;
}
}
return loss;
}
@Override
public double calTestPureLossAndGrad(float []wtest, float []gtest, int iter, boolean needCalcGrad) {
if (!hasTestData) {
return -1.0;
}
double loss = 0.0;
if (needCalcGrad) {
for (int i = 0; i < dim; i++) {
gtest[i] = 0.0f;
}
}
int stride = sok * fieldSize;
for (int k = 0; k < threadTestCoreData.cursor2d; k++) {
int lsNumInt = (int)realNumtest[k];
for (int i = 0; i < lsNumInt; i++) {
double wei = weighttest[k][i];
double wx = 0.0;
int cidx = 0;
for (int j = xidxtest[k][i]; j < xidxtest[k][i + 1]; j+=3) {
double fval = Float.intBitsToFloat(xtest[k][j+1]);
wx += wtest[xtest[k][j]] * fval;
int idx = secondOrderIndexStart + xtest[k][j] * stride;
System.arraycopy(wtest, idx, assist, cidx * stride, stride);
cidx ++;
}
double fx = 0.0;
int pidx = 0;
for (int p = xidxtest[k][i]; p < xidxtest[k][i + 1]; p += 3) {
double pval = Float.intBitsToFloat(xtest[k][p + 1]);
int pfieldstart = xtest[k][p + 2] * sok;
int qidx = pidx + 1;
int pstartIdx = pidx * stride;
for (int q = p + 3; q < xidxtest[k][i + 1]; q += 3) {
double qval = Float.intBitsToFloat(xtest[k][q + 1]);
int qfieldstart = xtest[k][q + 2] * sok;
int qstartIdx = qidx * stride;
double wTw = 0.0;
for (int f = 0; f < sok; f++) {
wTw += assist[pstartIdx + qfieldstart + f] * assist[qstartIdx + pfieldstart + f];
}
wTw *= pval * qval;
fx += wTw;
qidx ++;
}
pidx ++;
}
fx += wx;
loss += wei * lossFunction.loss(fx, ytest[k][i]);
predicttest[k][i] = (float) lossFunction.predict(fx);
if (needCalcGrad) {
// grad
double gradscalar = wei * lossFunction.firstDerivative(fx, ytest[k][i]);
for (int j = xidxtest[k][i]; j < xidxtest[k][i + 1]; j+=3) {
double fval = Float.intBitsToFloat(xtest[k][j+1]);
gtest[xtest[k][j]] += gradscalar * fval;
}
pidx = 0;
for (int p = xidxtest[k][i]; p < xidxtest[k][i + 1]; p += 3) {
double pval = Float.intBitsToFloat(xtest[k][p + 1]);
int pfieldstart = xtest[k][p + 2] * sok;
int qidx = pidx + 1;
//int pstartIdx = pidx * stride;
int gidx = secondOrderIndexStart + xtest[k][p] * stride;
for (int q = p + 3; q < xidxtest[k][i + 1]; q += 3) {
double qval = Float.intBitsToFloat(xtest[k][q + 1]);
int qfieldstart = xtest[k][q + 2] * sok;
int qstartIdx = qidx * stride;
for (int f = 0; f < sok; f++) {
gtest[gidx + qfieldstart + f] += gradscalar * assist[qstartIdx + pfieldstart + f] * pval * qval;
}
qidx ++;
}
pidx ++;
}
int qidx = 1;
for (int q = xidxtest[k][i] + 3; q < xidxtest[k][i + 1]; q += 3) {
double qval = Float.intBitsToFloat(xtest[k][q + 1]);
int qfieldstart = xtest[k][q + 2] * sok;
pidx = qidx - 1;
//int pstartIdx = pidx * stride;
int gidx = secondOrderIndexStart + xtest[k][q] * stride;
for (int p = q - 3; p >= xidxtest[k][i]; p -= 3) {
double pval = Float.intBitsToFloat(xtest[k][p + 1]);
int pfieldstart = xtest[k][p + 2] * sok;
int pstartIdx = pidx * stride;
for (int f = 0; f < sok; f++) {
gtest[gidx + pfieldstart + f] += gradscalar * assist[pstartIdx + qfieldstart + f] * pval * qval;
}
pidx --;
}
qidx ++;
}
}
}
}
if (needCalcGrad) {
if (!needFirstOrder) {
for (int i = firstOrderIndexStart; i < secondOrderIndexStart; i++) {
gtest[i] = 0.0f;
}
}
if (!needSecondOrder) {
for (int i = secondOrderIndexStart; i < gtest.length; i++) {
gtest[i] = 0.0f;
}
}
if (!biasNeedLatentFactor && needSecondOrder && modelParams.need_bias) {
for (int i = secondOrderIndexStart; i < secondOrderIndexStart + stride; i++) {
gtest[i] = 0.0f;
}
}
}
return loss;
}
}
| |
package com.ctrip.xpipe.redis.console.service.migration.impl;
import com.ctrip.xpipe.api.codec.Codec;
import com.ctrip.xpipe.api.command.Command;
import com.ctrip.xpipe.api.migration.OuterClientService;
import com.ctrip.xpipe.command.AbstractCommand;
import com.ctrip.xpipe.command.CommandRetryWrapper;
import com.ctrip.xpipe.redis.console.config.ConsoleConfig;
import com.ctrip.xpipe.redis.checker.controller.result.RetMessage;
import com.ctrip.xpipe.redis.console.job.retry.RetryCondition;
import com.ctrip.xpipe.redis.console.model.ClusterTbl;
import com.ctrip.xpipe.redis.console.model.DcTbl;
import com.ctrip.xpipe.redis.console.service.ClusterService;
import com.ctrip.xpipe.redis.console.service.DcService;
import com.ctrip.xpipe.redis.console.service.migration.CheckMigrationCommandBuilder;
import com.ctrip.xpipe.redis.core.entity.KeeperMeta;
import com.ctrip.xpipe.redis.core.exception.NoResourceException;
import com.ctrip.xpipe.redis.core.metaserver.META_SERVER_SERVICE;
import com.ctrip.xpipe.redis.core.service.AbstractService;
import com.ctrip.xpipe.tuple.Pair;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
public class DefaultCheckMigrationCommandBuilder extends AbstractService implements CheckMigrationCommandBuilder {
private ScheduledExecutorService scheduled;
private DcService dcService;
private ClusterService clusterService;
private OuterClientService outerClientService;
private ConsoleConfig consoleConfig;
public DefaultCheckMigrationCommandBuilder(ScheduledExecutorService scheduled, DcService dcService, ClusterService clusterService,
OuterClientService outerClientService, ConsoleConfig consoleConfig) {
this.scheduled = scheduled;
this.dcService = dcService;
this.clusterService = clusterService;
this.outerClientService = outerClientService;
this.consoleConfig = consoleConfig;
}
@Override
public Command<RetMessage> checkCommand(CHECK_MIGRATION_SYSTEM_STEP step) {
Pair<String, String> clusterShard = consoleConfig.getClusterShardForMigrationSysCheck();
String clusterName = clusterShard.getKey(), shardName = clusterShard.getValue();
switch (step) {
case CHECK_DATA_BASE:
return checkDatabaseRetryCommand(clusterName);
case CHECK_OUTER_CLIENT:
return new CheckOuterClientCommand(clusterName);
case CHECK_METASERVER:
Map<String, String> metaServers = consoleConfig.getMetaservers();
return new CheckMetaServerCommand(metaServers, clusterName, shardName);
}
return null;
}
private Command<RetMessage> checkDatabaseRetryCommand(String clusterName) {
RetryCondition<RetMessage> retryCondition = new RetryCondition.AbstractRetryCondition<RetMessage>() {
@Override
public boolean isSatisfied(RetMessage retMessage) {
return retMessage.getState() == RetMessage.SUCCESS_STATE;
}
@Override
public boolean isExceptionExpected(Throwable th) {
return false;
}
};
return CommandRetryWrapper.buildCountRetry(3, retryCondition, new CheckDatabaseCommand(clusterName), scheduled);
}
private abstract class AbstractCheckMigrationSystemCommand<T> extends AbstractCommand<RetMessage> {
@Override
protected void doExecute() throws Exception {
T response = null;
try {
response = getResponse();
} catch (Exception e) {
future().setFailure(e);
return;
}
if(response == null) {
future().setFailure(new NoResourceException("no response from source"));
return;
}
try {
future().setSuccess(validate(response));
} catch (Exception e) {
future().setFailure(e);
}
}
@Override
protected void doReset() {
}
@Override
public String getName() {
return getClass().getSimpleName();
}
protected abstract T getResponse() throws Exception;
protected abstract RetMessage validate(T response) throws Exception;
}
public class CheckDatabaseCommand extends AbstractCheckMigrationSystemCommand<ClusterTbl> {
private String clusterName;
public CheckDatabaseCommand(String clusterName) {
this.clusterName = clusterName;
}
@Override
protected ClusterTbl getResponse() {
return clusterService.find(clusterName);
}
@Override
protected RetMessage validate(ClusterTbl response) throws Exception {
if (response.getClusterName().equals(clusterName)) {
return RetMessage.createSuccessMessage();
}
return RetMessage.createFailMessage(String.format("cluster name not matched from database as: %s",
response.getClusterName()));
}
}
public class CheckOuterClientCommand extends AbstractCheckMigrationSystemCommand<OuterClientService.ClusterInfo> {
private String clusterName;
public CheckOuterClientCommand(String clusterName) {
this.clusterName = clusterName;
}
@Override
protected OuterClientService.ClusterInfo getResponse() throws Exception {
return outerClientService.getClusterInfo(clusterName);
}
@Override
protected RetMessage validate(OuterClientService.ClusterInfo response) {
if(clusterName.equals(response.getName())) {
return RetMessage.createSuccessMessage();
}
return RetMessage.createFailMessage(String.format("cluster name not matched from outer client as: %s",
response.getName()));
}
}
public class CheckMetaServerCommand extends AbstractCheckMigrationSystemCommand<List<String>> {
private Map<String, String> metaServerAddresses;
private List<String> targetMetaServers = Lists.newArrayList();
private String clusterName, shardName;
public CheckMetaServerCommand(Map<String, String> metaservers, String clusterName, String shardName) {
this.metaServerAddresses = metaservers;
this.clusterName = clusterName;
this.shardName = shardName;
}
@Override
protected List<String> getResponse() throws Exception {
List<DcTbl> dcTbls = dcService.findClusterRelatedDc(clusterName);
for(DcTbl dcTbl : dcTbls) {
if(metaServerAddresses.containsKey(dcTbl.getDcName())) {
targetMetaServers.add(metaServerAddresses.get(dcTbl.getDcName()));
}
}
logger.info("[CheckMetaServerCommand][target meta-servers]{}", targetMetaServers);
Set<String> result = Sets.newHashSet();
for(String metaServerAddress : targetMetaServers) {
String activeKeeperPath = META_SERVER_SERVICE.GET_ACTIVE_KEEPER.getRealPath(metaServerAddress);
try {
KeeperMeta keeperMeta = restTemplate.getForObject(activeKeeperPath, KeeperMeta.class, clusterName, shardName);
if (keeperMeta != null) {
result.add(metaServerAddress);
}
} catch (Exception e) {
logger.error("[CheckMetaServerCommand][{}][{}]", clusterName, metaServerAddress, e);
// throw new NoResponseException(String.format("MetaServer: %s", metaServerAddress), e);
}
}
return Lists.newArrayList(result);
}
@Override
protected RetMessage validate(List<String> response) {
if(response.size() == targetMetaServers.size()) {
return RetMessage.createSuccessMessage();
} else if(response.isEmpty()) {
return RetMessage.createFailMessage("All MetaServers Down");
}
List<String> problemMetaServers = Lists.newArrayList(targetMetaServers);
problemMetaServers.removeAll(response);
StringBuilder sb = new StringBuilder("Non-Responsed Metaservers: ");
for(String addr : problemMetaServers) {
sb.append(addr).append(",");
}
return RetMessage.createWarningMessage(sb.toString());
}
}
}
| |
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.mods.gemfire.support;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import org.vertx.java.core.file.FileSystem;
import org.vertx.java.core.json.JsonArray;
import org.vertx.java.core.json.JsonObject;
import com.gemstone.gemfire.cache.client.ClientCache;
import com.gemstone.gemfire.cache.client.ClientCacheFactory;
import com.gemstone.gemfire.pdx.PdxSerializer;
public class ClientCacheConfigurer {
public static ClientCache configure(FileSystem fs, JsonObject config) {
ClientCacheFactory factory = new ClientCacheFactory();
configurePropertiesFile(factory, config);
configureProperties(factory, config);
configurePoolProperties(factory, config);
configureLocators(factory, config);
configurePool(factory, config);
configurePDX(factory, config);
ClientCache cache = factory.create();
// FIXME this all seems a bit risky
String cacheXmlFile = config.getString("cache-xml-file", "client-cache.xml");
System.out.printf("cacheXmlFile: %s%n", cacheXmlFile);
try (InputStream is = new FileInputStream(new File(cacheXmlFile))) {
cache.loadCacheXml(is);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return cache;
}
private static void configurePropertiesFile(ClientCacheFactory factory, JsonObject config) {
String propertiesFile = config.getString("properties-file", "gemfire.properties");
System.out.printf("propertiesFile: %s%n", propertiesFile);
if (propertiesFile == null) {
return;
}
Properties properties = new Properties();
try (InputStream is = new FileInputStream(new File(propertiesFile))) {
if (propertiesFile.endsWith(".xml")) {
properties.loadFromXML(is);
}
else {
properties.load(is);
}
for (String name : properties.stringPropertyNames()) {
String value = properties.getProperty(name);
factory.set(name, value);
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private static void configureProperties(ClientCacheFactory factory, JsonObject config) {
JsonObject propertiesConf = config.getObject("properties");
if (propertiesConf == null || propertiesConf.getFieldNames().size() == 0) {
return;
}
if (propertiesConf != null) {
for (String name : propertiesConf.getFieldNames()) {
String value = propertiesConf.getString(name);
factory.set(name, value);
}
}
}
private static void configurePoolProperties(ClientCacheFactory factory, JsonObject config) {
JsonObject properties = config.getObject("pool-properties");
if (properties == null || properties.getFieldNames().size() == 0) {
return;
}
int connectionTimeout = properties.getInteger("connection-timeout");
factory.setPoolFreeConnectionTimeout(connectionTimeout);
long idleTimeout = properties.getLong("idle-timeout");
factory.setPoolIdleTimeout(idleTimeout);
int loadConditioningInterval = properties.getInteger("load-conditioning-interval");
factory.setPoolLoadConditioningInterval(loadConditioningInterval);
int maxConnections = properties.getInteger("max-connections");
factory.setPoolMaxConnections(maxConnections);
int minConnections = properties.getInteger("min-connections");
factory.setPoolMinConnections(minConnections);
boolean multiuserAuthenticationEnabled = properties.getBoolean("multiuser-authentication");
factory.setPoolMultiuserAuthentication(multiuserAuthenticationEnabled);
long pingInterval = properties.getLong("ping-interval");
factory.setPoolPingInterval(pingInterval);
boolean pRSingleHopEnabled = properties.getBoolean("pr-single-hop");
factory.setPoolPRSingleHopEnabled(pRSingleHopEnabled);
int poolReadTimeout = properties.getInteger("pool-read-timeout");
factory.setPoolReadTimeout(poolReadTimeout);
int retryAttempts = properties.getInteger("retry-attempts");
factory.setPoolRetryAttempts(retryAttempts);
String group = properties.getString("group");
factory.setPoolServerGroup(group);
int bufferSize = properties.getInteger("buffer-size");
factory.setPoolSocketBufferSize(bufferSize);
int statisticInterval = properties.getInteger("statistic-interval");
factory.setPoolStatisticInterval(statisticInterval);
int ackInterval = properties.getInteger("ack-interval");
factory.setPoolSubscriptionAckInterval(ackInterval);
boolean subscriptionEnabled = properties.getBoolean("subscription-enabled");
factory.setPoolSubscriptionEnabled(subscriptionEnabled);
int messageTrackingTimeout = properties.getInteger("message-tracking-timeout");
factory.setPoolSubscriptionMessageTrackingTimeout(messageTrackingTimeout);
int redundancy = properties.getInteger("redundancy");
factory.setPoolSubscriptionRedundancy(redundancy);
boolean threadLocalConnections = properties.getBoolean("thread-local-connections");
factory.setPoolThreadLocalConnections(threadLocalConnections);
}
private static void configureLocators(ClientCacheFactory factory, JsonObject config) {
JsonArray poolLocators = config.getArray("pool-locators");
if (poolLocators == null) {
return;
}
for (Object o : poolLocators) {
JsonObject locator = (JsonObject) o;
String host = locator.getString("host");
int port = locator.getInteger("port");
factory.addPoolLocator(host, port);
}
}
private static void configurePool(ClientCacheFactory factory, JsonObject config) {
JsonArray poolServers = config.getArray("pool-servers");
if (poolServers == null) {
return;
}
for (Object o : poolServers) {
JsonObject locator = (JsonObject) o;
String host = locator.getString("host");
int port = locator.getInteger("port");
factory.addPoolServer(host, port);
}
}
private static void configurePDX(ClientCacheFactory factory, JsonObject config) {
JsonObject pdxConfig = config.getObject("pdx");
if (pdxConfig == null) {
return;
}
String diskStoreName = pdxConfig.getString("disk-store-name");
factory.setPdxDiskStore(diskStoreName);
boolean ignore = pdxConfig.getBoolean("ignore-unread-fields", true);
factory.setPdxIgnoreUnreadFields(ignore);
boolean isPersistent = pdxConfig.getBoolean("persistent", true);
factory.setPdxPersistent(isPersistent);
boolean readSerialized = pdxConfig.getBoolean("read-serialized", true);
factory.setPdxReadSerialized(readSerialized);
String serializerClassName = pdxConfig.getString("pdx-serializer-class");
PdxSerializer serializer = InstantiationUtils.instantiate(PdxSerializer.class, serializerClassName);
factory.setPdxSerializer(serializer);
}
}
| |
/*
* The MIT License
*
* Copyright 2018 Karus Labs.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.karuslabs.commons.util;
import com.karuslabs.annotations.ValueBased;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.function.*;
import java.util.stream.Stream;
import static com.karuslabs.commons.util.WeakValue.EMPTY;
import static java.util.stream.Stream.ofNullable;
public @ValueBased interface Weak<T> {
public static <T> Weak<T> of(T value) {
return new WeakValue<>(value);
}
public static <T> Weak<T> empty() {
return (Weak<T>) EMPTY;
}
public Weak<T> filter(Predicate<? super T> predicate);
public <U> Weak<U> flatMap(Function<? super T, ? extends Weak<? extends U>> mapper);
public <U> Weak<U> map(Function<? super T, ? extends U> mapper);
public Weak<T> orElse(Supplier<? extends Weak<? extends T>> other);
public T or(T other);
public T or(Supplier<T> other);
public T orThrow();
public <E extends Throwable> T orThrow(Supplier<? extends E> exception) throws E;
public void ifPresent(Consumer<? super T> action);
public void ifPresent(Consumer<? super T> action, Runnable otherwise);
public boolean isPresent();
public Stream<T> stream();
}
@ValueBased final class WeakValue<T> extends WeakReference<T> implements Weak<T> {
static final Weak<?> EMPTY = new WeakValue<>(null);
WeakValue(T referent) {
super(referent);
}
@Override
public Weak<T> filter(Predicate<? super T> predicate) {
T value = get();
return value != null && predicate.test(value) ? this : (Weak<T>) EMPTY;
}
@Override
public <U> Weak<U> flatMap(Function<? super T, ? extends Weak<? extends U>> mapper) {
T value = get();
return value != null ? (Weak<U>) mapper.apply(value) : (Weak<U>) EMPTY;
}
@Override
public <U> Weak<U> map(Function<? super T, ? extends U> mapper) {
T value = get();
return value != null ? new WeakValue<>(mapper.apply(value)) : (Weak<U>) EMPTY;
}
@Override
public Weak<T> orElse(Supplier<? extends Weak<? extends T>> other) {
T value = get();
return value != null ? this : (Weak<T>) other.get();
}
@Override
public T or(T other) {
T value = get();
return value != null ? value : other;
}
@Override
public T or(Supplier<T> other) {
T value = get();
return value != null ? value : other.get();
}
@Override
public T orThrow() {
T value = get();
if (value != null) {
return value;
} else {
throw new NoSuchElementException("Value was reclaimed");
}
}
@Override
public <E extends Throwable> T orThrow(Supplier<? extends E> exception) throws E {
T value = get();
if (value != null) {
return value;
} else {
throw exception.get();
}
}
@Override
public void ifPresent(Consumer<? super T> action) {
T value = get();
if (value != null) {
action.accept(value);
}
}
@Override
public void ifPresent(Consumer<? super T> action, Runnable otherwise) {
T value = get();
if (value != null) {
action.accept(value);
} else {
otherwise.run();
}
}
@Override
public boolean isPresent() {
return get() != null;
}
@Override
public Stream<T> stream() {
return ofNullable(get());
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof Weak)) {
return false;
}
return Objects.equals(get(), ((Weak<?>) other).or(null));
}
@Override
public int hashCode() {
return Objects.hashCode(get());
}
@Override
public String toString() {
T value = get();
return value != null ? String.format("Weak[%s]", value) : "Weak.empty";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec.net;
import java.io.UnsupportedEncodingException;
import java.util.BitSet;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.EncoderException;
import org.apache.commons.codec.StringDecoder;
import org.apache.commons.codec.StringEncoder;
/**
* <p>
* Similar to the Quoted-Printable content-transfer-encoding defined in <a
* href="http://www.ietf.org/rfc/rfc1521.txt">RFC 1521</a> and designed to
* allow text containing mostly ASCII characters to be decipherable on an ASCII
* terminal without decoding.
* </p>
*
* <p>
* <a href="http://www.ietf.org/rfc/rfc1522.txt">RFC 1522</a> describes
* techniques to allow the encoding of non-ASCII text in various portions of a
* RFC 822 [2] message header, in a manner which is unlikely to confuse existing
* message handling software.
* </p>
*
* @see <a href="http://www.ietf.org/rfc/rfc1522.txt">MIME (Multipurpose
* Internet Mail Extensions) Part Two: Message Header Extensions for
* Non-ASCII Text</a>
*
* @author Apache Software Foundation
* @since 1.3
* @version $Id: QCodec.java 582446 2007-10-06 04:11:49 +0000 (Sa, 06 Okt 2007)
* bayard $
*/
public class QCodec extends RFC1522Codec implements StringEncoder,
StringDecoder {
/**
* The default charset used for string decoding and encoding.
*/
private String charset = CharacterEncodingNames.UTF8;
/**
* BitSet of printable characters as defined in RFC 1522.
*/
static final private BitSet PRINTABLE_CHARS = new BitSet(256);
// Static initializer for printable chars collection
static {
// alpha characters
PRINTABLE_CHARS.set(' ');
PRINTABLE_CHARS.set('!');
PRINTABLE_CHARS.set('"');
PRINTABLE_CHARS.set('#');
PRINTABLE_CHARS.set('$');
PRINTABLE_CHARS.set('%');
PRINTABLE_CHARS.set('&');
PRINTABLE_CHARS.set('\'');
PRINTABLE_CHARS.set('(');
PRINTABLE_CHARS.set(')');
PRINTABLE_CHARS.set('*');
PRINTABLE_CHARS.set('+');
PRINTABLE_CHARS.set(',');
PRINTABLE_CHARS.set('-');
PRINTABLE_CHARS.set('.');
PRINTABLE_CHARS.set('/');
for (int i = '0'; i <= '9'; i++) {
PRINTABLE_CHARS.set(i);
}
PRINTABLE_CHARS.set(':');
PRINTABLE_CHARS.set(';');
PRINTABLE_CHARS.set('<');
PRINTABLE_CHARS.set('>');
PRINTABLE_CHARS.set('@');
for (int i = 'A'; i <= 'Z'; i++) {
PRINTABLE_CHARS.set(i);
}
PRINTABLE_CHARS.set('[');
PRINTABLE_CHARS.set('\\');
PRINTABLE_CHARS.set(']');
PRINTABLE_CHARS.set('^');
PRINTABLE_CHARS.set('`');
for (int i = 'a'; i <= 'z'; i++) {
PRINTABLE_CHARS.set(i);
}
PRINTABLE_CHARS.set('{');
PRINTABLE_CHARS.set('|');
PRINTABLE_CHARS.set('}');
PRINTABLE_CHARS.set('~');
}
static final private byte BLANK = 32;
static final private byte UNDERSCORE = 95;
private boolean encodeBlanks = false;
/**
* Default constructor.
*/
public QCodec() {
super();
}
/**
* Constructor which allows for the selection of a default charset
*
* @param charset
* the default string charset to use.
*
* @see <a
* href="http://java.sun.com/j2se/1.3/docs/api/java/lang/package-summary.html#charenc">JRE
* character encoding names</a>
*/
public QCodec(final String charset) {
super();
this.charset = charset;
}
protected String getEncoding() {
return "Q";
}
/**
* The default charset used for string decoding and encoding.
*
* @return the default string charset.
*/
public String getDefaultCharset() {
return this.charset;
}
/**
* Tests if optional tranformation of SPACE characters is to be used
*
* @return <code>true</code> if SPACE characters are to be transformed,
* <code>false</code> otherwise
*/
public boolean isEncodeBlanks() {
return this.encodeBlanks;
}
/**
* Defines whether optional tranformation of SPACE characters is to be used
*
* @param b
* <code>true</code> if SPACE characters are to be transformed,
* <code>false</code> otherwise
*/
public void setEncodeBlanks(boolean b) {
this.encodeBlanks = b;
}
public byte[] doEncoding(byte[] bytes) {
if (bytes == null) {
return null;
}
byte[] data = QuotedPrintableCodec.encodeQuotedPrintable(
PRINTABLE_CHARS, bytes);
if (this.encodeBlanks) {
for (int i = 0; i < data.length; i++) {
if (data[i] == BLANK) {
data[i] = UNDERSCORE;
}
}
}
return data;
}
public byte[] doDecoding(byte[] bytes) throws DecoderException {
if (bytes == null) {
return null;
}
boolean hasUnderscores = false;
for (int i = 0; i < bytes.length; i++) {
if (bytes[i] == UNDERSCORE) {
hasUnderscores = true;
break;
}
}
if (hasUnderscores) {
byte[] tmp = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++) {
byte b = bytes[i];
if (b != UNDERSCORE) {
tmp[i] = b;
} else {
tmp[i] = BLANK;
}
}
return QuotedPrintableCodec.decodeQuotedPrintable(tmp);
}
return QuotedPrintableCodec.decodeQuotedPrintable(bytes);
}
/**
* Encodes a string into its quoted-printable form using the specified
* charset. Unsafe characters are escaped.
*
* @param pString
* string to convert to quoted-printable form
* @param charset
* the charset for pString
* @return quoted-printable string
*
* @throws EncoderException
* thrown if a failure condition is encountered during the
* encoding process.
*/
public String encode(final String pString, final String charset)
throws EncoderException {
if (pString == null) {
return null;
}
try {
return encodeText(pString, charset);
} catch (UnsupportedEncodingException e) {
throw new EncoderException(e.getMessage());
}
}
/**
* Encodes a string into its quoted-printable form using the default
* charset. Unsafe characters are escaped.
*
* @param pString
* string to convert to quoted-printable form
* @return quoted-printable string
*
* @throws EncoderException
* thrown if a failure condition is encountered during the
* encoding process.
*/
public String encode(String pString) throws EncoderException {
if (pString == null) {
return null;
}
return encode(pString, getDefaultCharset());
}
/**
* Decodes a quoted-printable string into its original form. Escaped
* characters are converted back to their original representation.
*
* @param pString
* quoted-printable string to convert into its original form
*
* @return original string
*
* @throws DecoderException
* A decoder exception is thrown if a failure condition is
* encountered during the decode process.
*/
public String decode(String pString) throws DecoderException {
if (pString == null) {
return null;
}
try {
return decodeText(pString);
} catch (UnsupportedEncodingException e) {
throw new DecoderException(e.getMessage());
}
}
/**
* Encodes an object into its quoted-printable form using the default
* charset. Unsafe characters are escaped.
*
* @param pObject
* object to convert to quoted-printable form
* @return quoted-printable object
*
* @throws EncoderException
* thrown if a failure condition is encountered during the
* encoding process.
*/
public Object encode(Object pObject) throws EncoderException {
if (pObject == null) {
return null;
} else if (pObject instanceof String) {
return encode((String) pObject);
} else {
throw new EncoderException("Objects of type "
+ pObject.getClass().getName()
+ " cannot be encoded using Q codec");
}
}
/**
* Decodes a quoted-printable object into its original form. Escaped
* characters are converted back to their original representation.
*
* @param pObject
* quoted-printable object to convert into its original form
*
* @return original object
*
* @throws DecoderException
* Thrown if the argument is not a <code>String</code>.
* Thrown if a failure condition is encountered during the
* decode process.
*/
public Object decode(Object pObject) throws DecoderException {
if (pObject == null) {
return null;
} else if (pObject instanceof String) {
return decode((String) pObject);
} else {
throw new DecoderException("Objects of type "
+ pObject.getClass().getName()
+ " cannot be decoded using Q codec");
}
}
}
| |
/*
* Copyright 2015-2021 OpenEstate.org.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openestate.io.examples;
import com.thedeanda.lorem.Lorem;
import com.thedeanda.lorem.LoremIpsum;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Calendar;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.io.output.NullWriter;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.openestate.io.kyero.KyeroDocument;
import org.openestate.io.kyero.KyeroUtils;
import org.openestate.io.kyero.KyeroVersion;
import org.openestate.io.kyero.xml.CurrencyType;
import org.openestate.io.kyero.xml.EnergyRatingMarkType;
import org.openestate.io.kyero.xml.ImagesType.Image;
import org.openestate.io.kyero.xml.KyeroType;
import org.openestate.io.kyero.xml.ObjectFactory;
import org.openestate.io.kyero.xml.PriceFreqType;
import org.openestate.io.kyero.xml.PropertyType;
import org.openestate.io.kyero.xml.Root;
import org.openestate.io.kyero.xml.Root.Agent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Example for writing Kyero XML feeds.
* <p>
* This example illustrates the programmatic creation of Kyero documents, how they are written into XML and how they are
* downgraded to earlier versions.
*
* @author Andreas Rudolph
* @since 1.0
*/
public class KyeroWritingExample {
@SuppressWarnings("unused")
private final static Logger LOGGER = LoggerFactory.getLogger(KyeroWritingExample.class);
private final static ObjectFactory FACTORY = KyeroUtils.getFactory();
private final static Lorem RANDOMIZER = new LoremIpsum();
private final static boolean PRETTY_PRINT = true;
/**
* Start the example application.
*
* @param args command line arguments
*/
@SuppressWarnings("Duplicates")
public static void main(String[] args) {
// create a Root object with some example data
// this object corresponds to the <root> element in XML
Root root = FACTORY.createRoot();
root.setKyero(createKyero());
root.setAgent(createAgent());
int propertyCount = RandomUtils.nextInt(3, 10);
for (int i = 0; i < propertyCount; i++) {
root.getProperty().add(createProperty());
}
// convert the Root object into an XML document
KyeroDocument doc = null;
try {
doc = KyeroDocument.newDocument(root);
} catch (Exception ex) {
LOGGER.error("Can't create XML document!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
// write XML document into a java.io.File
try {
write(doc, File.createTempFile("output-", ".xml"));
} catch (IOException ex) {
LOGGER.error("Can't create temporary file!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
// write XML document into a java.io.OutputStream
write(doc, NullOutputStream.NULL_OUTPUT_STREAM);
// write XML document into a java.io.Writer
write(doc, new NullWriter());
// write XML document into a string and send it to the console
writeToConsole(doc);
// downgrade XML document to an earlier version
// and write it to the console
doc.downgrade(KyeroVersion.V2_1);
writeToConsole(doc);
}
/**
* Create an {@link Agent} object with some example data.
*
* @return created example object
*/
private static Agent createAgent() {
Agent agent = FACTORY.createRootAgent();
agent.setAddr1(RANDOMIZER.getWords(1, 5));
agent.setAddr2(RANDOMIZER.getWords(1, 5));
agent.setCountry(RANDOMIZER.getCountry());
agent.setEmail(RANDOMIZER.getEmail());
agent.setFax(RANDOMIZER.getPhone());
agent.setId(BigInteger.valueOf(RandomUtils.nextLong(1, 10000)));
agent.setMob(RANDOMIZER.getPhone());
agent.setName(RANDOMIZER.getName());
agent.setPostcode(RANDOMIZER.getZipCode());
agent.setRegion(RANDOMIZER.getStateFull());
agent.setTel(RANDOMIZER.getPhone());
agent.setTown(RANDOMIZER.getCity());
return agent;
}
/**
* Create a {@link KyeroType} object with some example data.
*
* @return created example object
*/
private static KyeroType createKyero() {
KyeroType kyero = FACTORY.createKyeroType();
kyero.setFeedGenerated(Calendar.getInstance());
kyero.setFeedVersion(KyeroUtils.VERSION.toXmlVersion());
return kyero;
}
/**
* Create a {@link PropertyType} object with some example data.
*
* @return created example object
*/
private static PropertyType createProperty() {
// create an example real estate
PropertyType obj = FACTORY.createPropertyType();
obj.setBaths(BigInteger.valueOf(RandomUtils.nextLong(0, 5)));
obj.setBeds(BigInteger.valueOf(RandomUtils.nextLong(0, 5)));
obj.setCountry(RANDOMIZER.getCountry());
obj.setCurrency(randomValue(CurrencyType.values()));
obj.setDate(Calendar.getInstance());
obj.setId(RandomStringUtils.randomAlphanumeric(5));
obj.setLeasehold(RandomUtils.nextBoolean());
obj.setLocationDetail(RANDOMIZER.getWords(2, 10));
obj.setNewBuild(RandomUtils.nextBoolean());
obj.setNotes(RANDOMIZER.getWords(10, 50));
obj.setPartOwnership(RandomUtils.nextBoolean());
obj.setPool(RandomUtils.nextBoolean());
obj.setPrice(RandomUtils.nextLong(10000, 9999999));
obj.setPriceFreq(randomValue(PriceFreqType.values()));
obj.setProvince(RANDOMIZER.getStateFull());
obj.setRef(RandomStringUtils.randomAlphanumeric(5));
obj.setTown(RANDOMIZER.getCity());
obj.setType(RANDOMIZER.getWords(1));
obj.setDesc(FACTORY.createLangType());
obj.getDesc().setCa(RANDOMIZER.getWords(10, 50));
obj.getDesc().setDa(RANDOMIZER.getWords(10, 50));
obj.getDesc().setDe(RANDOMIZER.getWords(10, 50));
obj.getDesc().setEn(RANDOMIZER.getWords(10, 50));
obj.getDesc().setEs(RANDOMIZER.getWords(10, 50));
obj.getDesc().setFi(RANDOMIZER.getWords(10, 50));
obj.getDesc().setFr(RANDOMIZER.getWords(10, 50));
obj.getDesc().setIt(RANDOMIZER.getWords(10, 50));
obj.getDesc().setNl(RANDOMIZER.getWords(10, 50));
obj.getDesc().setNo(RANDOMIZER.getWords(10, 50));
obj.getDesc().setPt(RANDOMIZER.getWords(10, 50));
obj.getDesc().setRu(RANDOMIZER.getWords(10, 50));
obj.getDesc().setSv(RANDOMIZER.getWords(10, 50));
obj.setEnergyRating(FACTORY.createEnergyRatingType());
obj.getEnergyRating().setConsumption(randomValue(EnergyRatingMarkType.values()));
obj.getEnergyRating().setEmissions(randomValue(EnergyRatingMarkType.values()));
obj.setFeatures(FACTORY.createFeaturesType());
int featureCount = RandomUtils.nextInt(1, 10);
for (int i = 0; i < featureCount; i++) {
obj.getFeatures().getFeature().add(RANDOMIZER.getWords(1, 5));
}
obj.setImages(FACTORY.createImagesType());
int imageCount = RandomUtils.nextInt(1, 10);
for (int i = 0; i < imageCount; i++) {
obj.getImages().getImage().add(createPropertyImage(i));
}
obj.setLocation(FACTORY.createGpsLocationType());
obj.getLocation().setLatitude(BigDecimal.valueOf(RandomUtils.nextDouble(0, 90)));
obj.getLocation().setLongitude(BigDecimal.valueOf(RandomUtils.nextDouble(0, 90)));
obj.setSurfaceArea(FACTORY.createSurfaceType());
obj.getSurfaceArea().setBuilt(BigInteger.valueOf(RandomUtils.nextLong(50, 250)));
obj.getSurfaceArea().setPlot(BigInteger.valueOf(RandomUtils.nextLong(100, 1500)));
obj.setUrl(FACTORY.createUrlType());
//noinspection CatchMayIgnoreException
try {
obj.getUrl().setCa(new URI("https://www.example.com/catalan/" + obj.getId() + ".html"));
obj.getUrl().setDa(new URI("https://www.example.com/danish/" + obj.getId() + ".html"));
obj.getUrl().setDe(new URI("https://www.example.com/german/" + obj.getId() + ".html"));
obj.getUrl().setEn(new URI("https://www.example.com/english/" + obj.getId() + ".html"));
obj.getUrl().setEs(new URI("https://www.example.com/spanish/" + obj.getId() + ".html"));
obj.getUrl().setFi(new URI("https://www.example.com/finnish/" + obj.getId() + ".html"));
obj.getUrl().setFr(new URI("https://www.example.com/french/" + obj.getId() + ".html"));
obj.getUrl().setIt(new URI("https://www.example.com/italian/" + obj.getId() + ".html"));
obj.getUrl().setNl(new URI("https://www.example.com/dutch/" + obj.getId() + ".html"));
obj.getUrl().setNo(new URI("https://www.example.com/norwegian/" + obj.getId() + ".html"));
obj.getUrl().setPt(new URI("https://www.example.com/portuguese/" + obj.getId() + ".html"));
obj.getUrl().setRu(new URI("https://www.example.com/russian/" + obj.getId() + ".html"));
obj.getUrl().setSv(new URI("https://www.example.com/swedish/" + obj.getId() + ".html"));
} catch (URISyntaxException ex) {
}
return obj;
}
/**
* Create an {@link Image} object with some example data.
*
* @param pos index position within the property images
* @return created example object
*/
private static Image createPropertyImage(int pos) {
// create an example image
Image img = FACTORY.createImagesTypeImage();
img.setId(pos);
//noinspection CatchMayIgnoreException
try {
img.setUrl(new URI("https://www.example.com/image-" + pos + ".jpg"));
} catch (URISyntaxException ex) {
}
return img;
}
/**
* Get a random value from an array.
*
* @param values array containing values to select from
* @param <T> type of contained values
* @return randomly selected value
*/
private static <T> T randomValue(T[] values) {
return (values != null && values.length > 0) ?
values[RandomUtils.nextInt(0, values.length)] :
null;
}
/**
* Write a {@link KyeroDocument} into a {@link File}.
*
* @param doc the document to write
* @param file the file, where the document is written to
*/
@SuppressWarnings("Duplicates")
private static void write(KyeroDocument doc, File file) {
LOGGER.info("writing document with version " + doc.getDocumentVersion());
try {
doc.toXml(file, PRETTY_PRINT);
LOGGER.info("> written to: " + file.getAbsolutePath());
} catch (Exception ex) {
LOGGER.error("Can't write document into a file!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
/**
* Write a {@link KyeroDocument} into an {@link OutputStream}.
*
* @param doc the document to write
* @param output the stream, where the document is written to
*/
@SuppressWarnings({"Duplicates", "SameParameterValue"})
private static void write(KyeroDocument doc, OutputStream output) {
LOGGER.info("writing document with version " + doc.getDocumentVersion());
try {
doc.toXml(output, PRETTY_PRINT);
LOGGER.info("> written to a java.io.OutputStream");
} catch (Exception ex) {
LOGGER.error("Can't write document into an OutputStream!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
/**
* Write a {@link KyeroDocument} into a {@link Writer}.
*
* @param doc the document to write
* @param output the writer, where the document is written to
*/
@SuppressWarnings("Duplicates")
private static void write(KyeroDocument doc, Writer output) {
LOGGER.info("writing document with version " + doc.getDocumentVersion());
try {
doc.toXml(output, PRETTY_PRINT);
LOGGER.info("> written to a java.io.Writer");
} catch (Exception ex) {
LOGGER.error("Can't write document into a Writer!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
/**
* Write a {@link KyeroDocument} into a {@link String} and print the
* results to the console.
*
* @param doc the document to write
*/
@SuppressWarnings("Duplicates")
private static void writeToConsole(KyeroDocument doc) {
LOGGER.info("writing document with version " + doc.getDocumentVersion());
try {
String xml = doc.toXmlString(PRETTY_PRINT);
LOGGER.info(StringUtils.repeat("-", 50)
+ System.lineSeparator() + xml);
} catch (Exception ex) {
LOGGER.error("Can't write document into a string!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
}
| |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.schedulers;
import static io.reactivex.Flowable.*;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.*;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import io.reactivex.*;
import io.reactivex.Scheduler.Worker;
import io.reactivex.disposables.*;
import io.reactivex.exceptions.TestException;
import io.reactivex.functions.Function;
import io.reactivex.internal.schedulers.SchedulerWhen.*;
import io.reactivex.observers.DisposableCompletableObserver;
import io.reactivex.processors.PublishProcessor;
import io.reactivex.schedulers.*;
import io.reactivex.subscribers.TestSubscriber;
public class SchedulerWhenTest {
@Test
public void testAsyncMaxConcurrent() {
TestScheduler tSched = new TestScheduler();
SchedulerWhen sched = maxConcurrentScheduler(tSched);
TestSubscriber<Long> tSub = TestSubscriber.create();
asyncWork(sched).subscribe(tSub);
tSub.assertValueCount(0);
tSched.advanceTimeBy(0, SECONDS);
tSub.assertValueCount(0);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(2);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(4);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(5);
tSub.assertComplete();
sched.dispose();
}
@Test
public void testAsyncDelaySubscription() {
final TestScheduler tSched = new TestScheduler();
SchedulerWhen sched = throttleScheduler(tSched);
TestSubscriber<Long> tSub = TestSubscriber.create();
asyncWork(sched).subscribe(tSub);
tSub.assertValueCount(0);
tSched.advanceTimeBy(0, SECONDS);
tSub.assertValueCount(0);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(1);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(1);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(2);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(2);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(3);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(3);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(4);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(4);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(5);
tSub.assertComplete();
sched.dispose();
}
@Test
public void testSyncMaxConcurrent() {
TestScheduler tSched = new TestScheduler();
SchedulerWhen sched = maxConcurrentScheduler(tSched);
TestSubscriber<Long> tSub = TestSubscriber.create();
syncWork(sched).subscribe(tSub);
tSub.assertValueCount(0);
tSched.advanceTimeBy(0, SECONDS);
// since all the work is synchronous nothing is blocked and its all done
tSub.assertValueCount(5);
tSub.assertComplete();
sched.dispose();
}
@Test
public void testSyncDelaySubscription() {
final TestScheduler tSched = new TestScheduler();
SchedulerWhen sched = throttleScheduler(tSched);
TestSubscriber<Long> tSub = TestSubscriber.create();
syncWork(sched).subscribe(tSub);
tSub.assertValueCount(0);
tSched.advanceTimeBy(0, SECONDS);
tSub.assertValueCount(1);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(2);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(3);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(4);
tSched.advanceTimeBy(1, SECONDS);
tSub.assertValueCount(5);
tSub.assertComplete();
sched.dispose();
}
private Flowable<Long> asyncWork(final Scheduler sched) {
return Flowable.range(1, 5).flatMap(new Function<Integer, Flowable<Long>>() {
@Override
public Flowable<Long> apply(Integer t) {
return Flowable.timer(1, SECONDS, sched);
}
});
}
private Flowable<Long> syncWork(final Scheduler sched) {
return Flowable.range(1, 5).flatMap(new Function<Integer, Flowable<Long>>() {
@Override
public Flowable<Long> apply(Integer t) {
return Flowable.defer(new Callable<Flowable<Long>>() {
@Override
public Flowable<Long> call() {
return Flowable.just(0l);
}
}).subscribeOn(sched);
}
});
}
private SchedulerWhen maxConcurrentScheduler(TestScheduler tSched) {
SchedulerWhen sched = new SchedulerWhen(new Function<Flowable<Flowable<Completable>>, Completable>() {
@Override
public Completable apply(Flowable<Flowable<Completable>> workerActions) {
Flowable<Completable> workers = workerActions.map(new Function<Flowable<Completable>, Completable>() {
@Override
public Completable apply(Flowable<Completable> actions) {
return Completable.concat(actions);
}
});
return Completable.merge(workers, 2);
}
}, tSched);
return sched;
}
private SchedulerWhen throttleScheduler(final TestScheduler tSched) {
SchedulerWhen sched = new SchedulerWhen(new Function<Flowable<Flowable<Completable>>, Completable>() {
@Override
public Completable apply(Flowable<Flowable<Completable>> workerActions) {
Flowable<Completable> workers = workerActions.map(new Function<Flowable<Completable>, Completable>() {
@Override
public Completable apply(Flowable<Completable> actions) {
return Completable.concat(actions);
}
});
return Completable.concat(workers.map(new Function<Completable, Completable>() {
@Override
public Completable apply(Completable worker) {
return worker.delay(1, SECONDS, tSched);
}
}));
}
}, tSched);
return sched;
}
@Test(timeout = 1000)
public void testRaceConditions() {
Scheduler comp = Schedulers.computation();
Scheduler limited = comp.when(new Function<Flowable<Flowable<Completable>>, Completable>() {
@Override
public Completable apply(Flowable<Flowable<Completable>> t) {
return Completable.merge(Flowable.merge(t, 10));
}
});
merge(just(just(1).subscribeOn(limited).observeOn(comp)).repeat(1000)).blockingSubscribe();
}
@Test
public void subscribedDisposable() {
SchedulerWhen.SUBSCRIBED.dispose();
assertFalse(SchedulerWhen.SUBSCRIBED.isDisposed());
}
@Test(expected = TestException.class)
public void combineCrashInConstructor() {
new SchedulerWhen(new Function<Flowable<Flowable<Completable>>, Completable>() {
@Override
public Completable apply(Flowable<Flowable<Completable>> v)
throws Exception {
throw new TestException();
}
}, Schedulers.single());
}
@Test
public void disposed() {
SchedulerWhen sw = new SchedulerWhen(new Function<Flowable<Flowable<Completable>>, Completable>() {
@Override
public Completable apply(Flowable<Flowable<Completable>> v)
throws Exception {
return Completable.never();
}
}, Schedulers.single());
assertFalse(sw.isDisposed());
sw.dispose();
assertTrue(sw.isDisposed());
}
@Test
public void scheduledActiondisposedSetRace() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final ScheduledAction sa = new ScheduledAction() {
private static final long serialVersionUID = -672980251643733156L;
@Override
protected Disposable callActual(Worker actualWorker,
CompletableObserver actionCompletable) {
return Disposables.empty();
}
};
assertFalse(sa.isDisposed());
Runnable r1 = new Runnable() {
@Override
public void run() {
sa.dispose();
}
};
TestHelper.race(r1, r1);
assertTrue(sa.isDisposed());
}
}
@Test
public void scheduledActionStates() {
final AtomicInteger count = new AtomicInteger();
ScheduledAction sa = new ScheduledAction() {
private static final long serialVersionUID = -672980251643733156L;
@Override
protected Disposable callActual(Worker actualWorker,
CompletableObserver actionCompletable) {
count.incrementAndGet();
return Disposables.empty();
}
};
assertFalse(sa.isDisposed());
sa.dispose();
assertTrue(sa.isDisposed());
sa.dispose();
assertTrue(sa.isDisposed());
// should not run when disposed
sa.call(Schedulers.single().createWorker(), null);
assertEquals(0, count.get());
// should not run when already scheduled
sa.set(Disposables.empty());
sa.call(Schedulers.single().createWorker(), null);
assertEquals(0, count.get());
// disposed while in call
sa = new ScheduledAction() {
private static final long serialVersionUID = -672980251643733156L;
@Override
protected Disposable callActual(Worker actualWorker,
CompletableObserver actionCompletable) {
count.incrementAndGet();
dispose();
return Disposables.empty();
}
};
sa.call(Schedulers.single().createWorker(), null);
assertEquals(1, count.get());
}
@Test
public void onCompleteActionRunCrash() {
final AtomicInteger count = new AtomicInteger();
OnCompletedAction a = new OnCompletedAction(new Runnable() {
@Override
public void run() {
throw new TestException();
}
}, new DisposableCompletableObserver() {
@Override
public void onComplete() {
count.incrementAndGet();
}
@Override
public void onError(Throwable e) {
count.decrementAndGet();
e.printStackTrace();
}
});
try {
a.run();
fail("Should have thrown");
} catch (TestException expected) {
}
assertEquals(1, count.get());
}
@Test
public void queueWorkerDispose() {
QueueWorker qw = new QueueWorker(PublishProcessor.<ScheduledAction>create(), Schedulers.single().createWorker());
assertFalse(qw.isDisposed());
qw.dispose();
assertTrue(qw.isDisposed());
qw.dispose();
assertTrue(qw.isDisposed());
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.sns.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Input for SetEndpointAttributes action.
* </p>
*/
public class SetEndpointAttributesRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* EndpointArn used for SetEndpointAttributes action.
* </p>
*/
private String endpointArn;
/**
* <p>
* A map of the endpoint attributes. Attributes in this map include the
* following:
* </p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to associate with
* the endpoint. Amazon SNS does not use this data. The data must be in
* UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery to the
* endpoint. Amazon SNS will set this to false when a notification service
* indicates to Amazon SNS that the endpoint is invalid. Users can set it
* back to true, typically after updating Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned from the
* notification service when an app and mobile device are registered with
* the notification service.</li>
* </ul>
*/
private com.amazonaws.internal.SdkInternalMap<String, String> attributes;
/**
* <p>
* EndpointArn used for SetEndpointAttributes action.
* </p>
*
* @param endpointArn
* EndpointArn used for SetEndpointAttributes action.
*/
public void setEndpointArn(String endpointArn) {
this.endpointArn = endpointArn;
}
/**
* <p>
* EndpointArn used for SetEndpointAttributes action.
* </p>
*
* @return EndpointArn used for SetEndpointAttributes action.
*/
public String getEndpointArn() {
return this.endpointArn;
}
/**
* <p>
* EndpointArn used for SetEndpointAttributes action.
* </p>
*
* @param endpointArn
* EndpointArn used for SetEndpointAttributes action.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SetEndpointAttributesRequest withEndpointArn(String endpointArn) {
setEndpointArn(endpointArn);
return this;
}
/**
* <p>
* A map of the endpoint attributes. Attributes in this map include the
* following:
* </p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to associate with
* the endpoint. Amazon SNS does not use this data. The data must be in
* UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery to the
* endpoint. Amazon SNS will set this to false when a notification service
* indicates to Amazon SNS that the endpoint is invalid. Users can set it
* back to true, typically after updating Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned from the
* notification service when an app and mobile device are registered with
* the notification service.</li>
* </ul>
*
* @return A map of the endpoint attributes. Attributes in this map include
* the following:</p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to
* associate with the endpoint. Amazon SNS does not use this data.
* The data must be in UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery
* to the endpoint. Amazon SNS will set this to false when a
* notification service indicates to Amazon SNS that the endpoint is
* invalid. Users can set it back to true, typically after updating
* Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned
* from the notification service when an app and mobile device are
* registered with the notification service.</li>
*/
public java.util.Map<String, String> getAttributes() {
if (attributes == null) {
attributes = new com.amazonaws.internal.SdkInternalMap<String, String>();
}
return attributes;
}
/**
* <p>
* A map of the endpoint attributes. Attributes in this map include the
* following:
* </p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to associate with
* the endpoint. Amazon SNS does not use this data. The data must be in
* UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery to the
* endpoint. Amazon SNS will set this to false when a notification service
* indicates to Amazon SNS that the endpoint is invalid. Users can set it
* back to true, typically after updating Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned from the
* notification service when an app and mobile device are registered with
* the notification service.</li>
* </ul>
*
* @param attributes
* A map of the endpoint attributes. Attributes in this map include
* the following:</p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to
* associate with the endpoint. Amazon SNS does not use this data.
* The data must be in UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery to
* the endpoint. Amazon SNS will set this to false when a
* notification service indicates to Amazon SNS that the endpoint is
* invalid. Users can set it back to true, typically after updating
* Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned
* from the notification service when an app and mobile device are
* registered with the notification service.</li>
*/
public void setAttributes(java.util.Map<String, String> attributes) {
this.attributes = attributes == null ? null
: new com.amazonaws.internal.SdkInternalMap<String, String>(
attributes);
}
/**
* <p>
* A map of the endpoint attributes. Attributes in this map include the
* following:
* </p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to associate with
* the endpoint. Amazon SNS does not use this data. The data must be in
* UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery to the
* endpoint. Amazon SNS will set this to false when a notification service
* indicates to Amazon SNS that the endpoint is invalid. Users can set it
* back to true, typically after updating Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned from the
* notification service when an app and mobile device are registered with
* the notification service.</li>
* </ul>
*
* @param attributes
* A map of the endpoint attributes. Attributes in this map include
* the following:</p>
* <ul>
* <li><code>CustomUserData</code> -- arbitrary user data to
* associate with the endpoint. Amazon SNS does not use this data.
* The data must be in UTF-8 format and less than 2KB.</li>
* <li><code>Enabled</code> -- flag that enables/disables delivery to
* the endpoint. Amazon SNS will set this to false when a
* notification service indicates to Amazon SNS that the endpoint is
* invalid. Users can set it back to true, typically after updating
* Token.</li>
* <li><code>Token</code> -- device token, also referred to as a
* registration id, for an app and mobile device. This is returned
* from the notification service when an app and mobile device are
* registered with the notification service.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SetEndpointAttributesRequest withAttributes(
java.util.Map<String, String> attributes) {
setAttributes(attributes);
return this;
}
public SetEndpointAttributesRequest addAttributesEntry(String key,
String value) {
if (null == this.attributes) {
this.attributes = new com.amazonaws.internal.SdkInternalMap<String, String>();
}
if (this.attributes.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.attributes.put(key, value);
return this;
}
/**
* Removes all the entries added into Attributes. <p> Returns a reference
* to this object so that method calls can be chained together.
*/
public SetEndpointAttributesRequest clearAttributesEntries() {
this.attributes = null;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEndpointArn() != null)
sb.append("EndpointArn: " + getEndpointArn() + ",");
if (getAttributes() != null)
sb.append("Attributes: " + getAttributes());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SetEndpointAttributesRequest == false)
return false;
SetEndpointAttributesRequest other = (SetEndpointAttributesRequest) obj;
if (other.getEndpointArn() == null ^ this.getEndpointArn() == null)
return false;
if (other.getEndpointArn() != null
&& other.getEndpointArn().equals(this.getEndpointArn()) == false)
return false;
if (other.getAttributes() == null ^ this.getAttributes() == null)
return false;
if (other.getAttributes() != null
&& other.getAttributes().equals(this.getAttributes()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getEndpointArn() == null) ? 0 : getEndpointArn().hashCode());
hashCode = prime * hashCode
+ ((getAttributes() == null) ? 0 : getAttributes().hashCode());
return hashCode;
}
@Override
public SetEndpointAttributesRequest clone() {
return (SetEndpointAttributesRequest) super.clone();
}
}
| |
package org.radargun.stages.cache.stresstest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import org.radargun.DistStageAck;
import org.radargun.config.Init;
import org.radargun.config.Property;
import org.radargun.config.SizeConverter;
import org.radargun.config.Stage;
import org.radargun.config.TimeConverter;
import org.radargun.reporting.Report;
import org.radargun.stages.AbstractDistStage;
import org.radargun.stages.DefaultDistStageAck;
import org.radargun.stages.cache.generators.ByteArrayValueGenerator;
import org.radargun.stages.cache.generators.KeyGenerator;
import org.radargun.stages.cache.generators.StringKeyGenerator;
import org.radargun.stages.cache.generators.ValueGenerator;
import org.radargun.stages.cache.generators.WrappedArrayValueGenerator;
import org.radargun.stages.helpers.BucketPolicy;
import org.radargun.stats.*;
import org.radargun.stats.representation.Histogram;
import org.radargun.traits.BasicOperations;
import org.radargun.traits.BulkOperations;
import org.radargun.traits.ConditionalOperations;
import org.radargun.traits.InjectTrait;
import org.radargun.traits.Transactional;
import org.radargun.utils.Fuzzy;
import org.radargun.utils.Utils;
/**
* Simulates the work with a distributed web sessions.
*
* @author Mircea Markus <Mircea.Markus@jboss.com>
* @author Radim Vansa <rvansa@redhat.com>
*/
@Stage(doc = "Benchmark where several client threads access cache limited by time or number of requests.",
deprecatedName = "WebSessionBenchmark")
public class StressTestStage extends AbstractDistStage {
@Property(doc = "Name of the test as used for reporting. Default is StressTest.")
protected String testName = "StressTest";
@Property(doc = "Number of operations after which a log entry should be written. Default is 10000.")
protected int logPeriod = 10000;
@Property(doc = "Total number of request to be made against this session: reads + writes. If duration " +
"is specified this value is ignored. Default is 50000.")
protected long numRequests = 50000;
@Property(doc = "Number of key-value entries per each client thread which should be used. Default is 100.")
protected int numEntries = 100;
@Property(doc = "Applicable only with fixedKeys=false, makes sense for entrySize with multiple values. " +
"Replaces numEntries; requested number of bytes in values set by the stressor. By default not set.", converter = SizeConverter.class)
protected long numBytes = 0;
@Property(doc = "Size of the value in bytes. Default is 1000.", converter = Fuzzy.IntegerConverter.class)
protected Fuzzy<Integer> entrySize = Fuzzy.always(1000);
@Property(doc = "Ratio of writes = PUT requests (percentage). Default is 20%")
protected int writePercentage = 20;
@Property(doc = "The frequency of removes (percentage). Default is 0%")
protected int removePercentage = 0;
@Property(doc = "In case we test replace performance, the frequency of replaces that should fail (percentage). Default is 40%")
protected int replaceInvalidPercentage = 40;
@Property(doc = "Used only when useConditionalOperations=true: The frequency of conditional removes that should fail (percentage). Default is 10%")
protected int removeInvalidPercentage = 10;
@Property(doc = "The number of threads that will work on this slave. Default is 10.")
protected int numThreads = 10;
@Property(doc = "Full class name of the key generator. Default is org.radargun.stressors.StringKeyGenerator.")
protected String keyGeneratorClass = StringKeyGenerator.class.getName();
@Property(doc = "Used to initialize the key generator. Null by default.")
protected String keyGeneratorParam = null;
@Property(doc = "Full class name of the value generator. Default is org.radargun.stressors.ByteArrayValueGenerator if useConditionalOperations=false and org.radargun.stressors.WrappedArrayValueGenerator otherwise.")
protected String valueGeneratorClass = null;
@Property(doc = "Used to initialize the value generator. Null by default.")
protected String valueGeneratorParam = null;
@Property(doc = "Specifies if the requests should be explicitely wrapped in transactions. By default" +
"the cachewrapper is queried whether it does support the transactions, if it does," +
"transactions are used, otherwise these are not.")
protected Boolean useTransactions = null;
@Property(doc = "Specifies whether the transactions should be committed (true) or rolled back (false). " +
"Default is true")
protected boolean commitTransactions = true;
@Property(doc = "Number of requests in one transaction. Default is 1.")
protected int transactionSize = 1;
@Property(doc = "Number of keys inserted/retrieved within one operation. Applicable only when the cache wrapper" +
"supports bulk operations. Default is 1 (no bulk operations).")
protected int bulkSize = 1;
@Property(doc = "When executing bulk operations, prefer version with multiple async operations over native implementation. Default is false.")
protected boolean preferAsyncOperations = false;
@Property(converter = TimeConverter.class, doc = "Benchmark duration. This takes precedence over numRequests. By default switched off.")
protected long duration = 0;
@Property(doc = "By default each client thread operates on his private set of keys. Setting this to true " +
"introduces contention between the threads, the numThreads property says total amount of entries that are " +
"used by all threads. Default is false.")
protected boolean sharedKeys = false;
@Property(doc = "Which buckets will the stressors use. Available is 'none' (no buckets = null)," +
"'thread' (each thread will use bucked_/threadId/) or " +
"'all:/bucketName/' (all threads will use bucketName). Default is 'none'.",
converter = BucketPolicy.Converter.class)
protected BucketPolicy bucketPolicy = new BucketPolicy(BucketPolicy.Type.NONE, null);
@Property(doc = "This option is valid only for sharedKeys=true. It forces local loading of all keys (not only numEntries/numNodes). Default is false.")
protected boolean loadAllKeys = false;
@Property(doc = "The keys can be fixed for the whole test run period or we the set can change over time. Default is true = fixed.")
protected boolean fixedKeys = true;
@Property(doc = "Due to configuration (eviction, expiration), some keys may spuriously disappear. Do not issue a warning for this situation. Default is false.")
protected boolean expectLostKeys = false;
@Property(doc = "If true, putIfAbsent and replace operations are used. Default is false.")
protected boolean useConditionalOperations = false;
@Property(doc = "Keep all keys in a pool - do not generate the keys for each request anew. Default is true.")
protected boolean poolKeys = true;
@Property(doc = "Generate a range for histogram with operations statistics (for use in next stress tests). Default is false.")
protected boolean generateHistogramRange = false;
@Property(doc = "The test will produce operation statistics in histogram. Default is false.")
protected boolean useHistogramStatistics = false;
@Property(doc = "The test will produce operation statistics as average values. Default is true.")
protected boolean useSimpleStatistics = true;
@Property(doc = "Period of single statistics result. By default periodic statistics are not used.", converter = TimeConverter.class)
protected long statisticsPeriod = 0;
@Property(doc = "With fixedKeys=false, maximum lifespan of an entry. Default is 1 hour.", converter = TimeConverter.class)
protected long entryLifespan = 3600000;
@InjectTrait
protected BasicOperations basicOperations;
@InjectTrait
protected ConditionalOperations conditionalOperations;
@InjectTrait
protected BulkOperations bulkOperations;
@InjectTrait
protected Transactional transactional;
protected transient volatile KeyGenerator keyGenerator;
protected transient volatile ValueGenerator valueGenerator;
private transient ArrayList<Object> sharedKeysPool = new ArrayList<Object>();
protected transient volatile long startNanos;
private transient PhaseSynchronizer synchronizer = new PhaseSynchronizer();
private transient volatile Completion completion;
private transient volatile boolean finished = false;
private transient volatile boolean terminated = false;
protected transient List<Stressor> stressors = new ArrayList<Stressor>(numThreads);
private transient Statistics statisticsPrototype = new DefaultStatistics(new DefaultOperationStats());
@Init
public void init() {
if (valueGeneratorClass == null) {
if (useConditionalOperations) valueGeneratorClass = WrappedArrayValueGenerator.class.getName();
else valueGeneratorClass = ByteArrayValueGenerator.class.getName();
}
}
protected List<List<Statistics>> execute() {
log.info("Starting " + toString());
loadStatistics();
slaveState.put(BucketPolicy.LAST_BUCKET, bucketPolicy.getBucketName(-1));
slaveState.put(KeyGenerator.KEY_GENERATOR, getKeyGenerator());
slaveState.put(ValueGenerator.VALUE_GENERATOR, getValueGenerator());
List<List<Statistics>> results = stress();
storeStatistics(results);
return results;
}
protected void loadStatistics() {
Statistics statistics;
if (generateHistogramRange) {
statistics = new DefaultStatistics(new AllRecordingOperationStats());
} else if (useHistogramStatistics) {
Histogram[] histograms = (Histogram[]) slaveState.get(Histogram.OPERATIONS_HISTOGRAMS);
if (histograms == null) {
throw new IllegalStateException("The histogram statistics are not generated. Please run StressTestWarmup with generateHistogramRange=true");
}
OperationStats[] histogramProtypes = new OperationStats[histograms.length];
for (int i = 0; i < histograms.length; ++i) {
if (useSimpleStatistics) {
histogramProtypes[i] = new MultiOperationStats(new DefaultOperationStats(), new HistogramOperationStats(histograms[i]));
} else {
histogramProtypes[i] = new HistogramOperationStats(histograms[i]);
}
}
statistics = new HistogramStatistics(histogramProtypes, new DefaultOperationStats());
} else {
statistics = new DefaultStatistics(new DefaultOperationStats());
}
if (statisticsPeriod > 0) {
statistics = new PeriodicStatistics(statistics, statisticsPeriod);
}
statisticsPrototype = statistics;
}
private void storeStatistics(List<List<Statistics>> results) {
if (generateHistogramRange) {
Statistics statistics = statisticsPrototype.copy();
for (List<Statistics> iteration : results) {
for (Statistics s : iteration) {
s.merge(statistics);
}
}
slaveState.put(Histogram.OPERATIONS_HISTOGRAMS, statistics.getRepresentations(Histogram.class));
}
}
public DistStageAck executeOnSlave() {
DefaultDistStageAck result = new DefaultDistStageAck(slaveState.getSlaveIndex(), slaveState.getLocalAddress());
if (slaves != null && !slaves.contains(slaveState.getSlaveIndex())) {
log.info(String.format("The stage should not run on this slave (%d): slaves=%s", slaveState.getSlaveIndex(), slaves));
return result;
}
if (!isServiceRunnning()) {
log.info("Not running test on this slave as service is not running.");
return result;
}
log.info("Executing: " + this.toString());
startNanos = System.nanoTime();
try {
List<List<Statistics>> results = execute();
result.setPayload(results);
return result;
} catch (Exception e) {
log.warn("Exception while initializing the test", e);
result.setError(true);
result.setRemoteException(e);
return result;
}
}
public boolean processAckOnMaster(List<DistStageAck> acks) {
logDurationInfo(acks);
boolean success = true;
Report report = masterState.getReport();
Report.Test test = report.createTest(testName);
for (DistStageAck ack : acks) {
DefaultDistStageAck wAck = (DefaultDistStageAck) ack;
if (wAck.isError()) {
success = false;
log.warn("Received error ack: " + wAck);
} else {
if (log.isTraceEnabled())
log.trace("Received success ack: " + wAck);
}
List<List<Statistics>> iterations = (List<List<Statistics>>) wAck.getPayload();
if (iterations != null) {
test.addIterations(ack.getSlaveIndex(), iterations);
} else {
log.trace("No report received from slave: " + ack.getSlaveIndex());
}
}
return success;
}
public List<List<Statistics>> stress() {
Completion completion;
if (duration > 0) {
completion = new TimeStressorCompletion(duration);
} else {
completion = new OperationCountCompletion(numRequests, logPeriod);
}
setCompletion(completion);
if (!startOperations()) return Collections.EMPTY_LIST;
try {
executeOperations();
} catch (Exception e) {
throw new RuntimeException(e);
}
log.info("Finished test. Test duration is: " + Utils.getNanosDurationString(System.nanoTime() - startNanos));
List<Statistics> results = gatherResults();
finishOperations();
if (statisticsPeriod > 0) {
/* expand the periodic statistics into iterations */
List<List<Statistics>> all = new ArrayList<List<Statistics>>();
for (Statistics s : results) {
int iteration = 0;
for (Statistics s2 : ((PeriodicStatistics) s).asList()) {
while (iteration >= all.size()) {
all.add(new ArrayList<Statistics>(results.size()));
}
all.get(iteration++).add(s2);
}
}
return all;
}
return Collections.singletonList(results);
}
protected boolean startOperations() {
try {
synchronizer.masterPhaseStart();
} catch (InterruptedException e) {
return false;
}
return true;
}
protected List<Statistics> gatherResults() {
List<Statistics> stats = new ArrayList<Statistics>(stressors.size());
for (Stressor stressor : stressors) {
stats.add(stressor.getStats());
}
return stats;
}
protected Statistics createStatistics() {
return statisticsPrototype.copy();
}
protected void executeOperations() throws InterruptedException {
synchronizer.setSlaveCount(numThreads);
for (int threadIndex = stressors.size(); threadIndex < numThreads; threadIndex++) {
Stressor stressor = createStressor(threadIndex);
stressors.add(stressor);
stressor.start();
}
synchronizer.masterPhaseEnd();
// wait until all slaves have initialized keys
synchronizer.masterPhaseStart();
// nothing to do here
synchronizer.masterPhaseEnd();
log.info("Started " + stressors.size() + " stressor threads.");
// wait until all threads have finished
synchronizer.masterPhaseStart();
}
protected Stressor createStressor(int threadIndex) {
return new Stressor(this, getLogic(), threadIndex, slaveState.getSlaveIndex(), slaveState.getClusterSize());
}
protected void finishOperations() {
finished = true;
synchronizer.masterPhaseEnd();
for (Stressor s : stressors) {
try {
s.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
stressors.clear();
}
protected boolean isFinished() {
return finished;
}
protected boolean isTerminated() {
return terminated;
}
public void setTerminated() {
this.terminated = true;
}
protected void setCompletion(Completion completion) {
this.completion = completion;
}
public Completion getCompletion() {
return completion;
}
public PhaseSynchronizer getSynchronizer() {
return synchronizer;
}
public OperationLogic getLogic() {
if (fixedKeys && numBytes > 0) {
throw new IllegalArgumentException("numBytes can be set only for fixedKeys=false");
} else if (sharedKeys && !fixedKeys) {
throw new IllegalArgumentException("Cannot use both shared and non-fixed keys - not implemented");
} else if (!fixedKeys) {
if (!poolKeys) {
throw new IllegalArgumentException("Keys have to be pooled with changing set.");
}
if (bulkSize != 1 || useConditionalOperations) {
throw new IllegalArgumentException("Replace/bulk operations on changing set not supported.");
}
if (removePercentage > 0) {
throw new IllegalArgumentException("Removes cannot be configured in when using non-fixed keys");
}
return new ChangingSetOperationLogic(this);
} else if (bulkSize != 1) {
if (bulkSize > 1 && bulkSize <= numEntries) {
if (bulkOperations != null) {
if (sharedKeys) {
return new BulkOperationLogic(this, new FixedSetSharedOperationLogic(this, sharedKeysPool), preferAsyncOperations);
} else {
return new BulkOperationLogic(this, new FixedSetPerThreadOperationLogic(this), preferAsyncOperations);
}
} else {
throw new IllegalArgumentException("Service " + slaveState.getServiceName() + " does not support bulk operations.");
}
} else {
throw new IllegalArgumentException("Invalid bulk size, must be 1 < bulkSize(" + bulkSize + ") < numEntries(" + numEntries + ")");
}
} else if (useConditionalOperations) {
if (sharedKeys) {
throw new IllegalArgumentException("Atomics on shared keys are not supported.");
} else if (conditionalOperations != null) {
if (!poolKeys) {
log.warn("Keys are not pooled, but last values must be recorded!");
}
return new FixedSetConditionalOperationLogic(this);
} else {
throw new IllegalArgumentException("Atomics can be executed only on wrapper which supports atomic operations.");
}
} else if (sharedKeys) {
return new FixedSetSharedOperationLogic(this, sharedKeysPool);
} else {
return new FixedSetPerThreadOperationLogic(this);
}
}
protected Object generateValue(Object key, int maxValueSize) {
Random random = ThreadLocalRandom.current();
int size = entrySize.next(random);
size = Math.min(size, maxValueSize);
return getValueGenerator().generateValue(key, size, random);
}
public KeyGenerator getKeyGenerator() {
if (keyGenerator == null) {
synchronized (this) {
if (keyGenerator != null) return keyGenerator;
log.info("Using key generator " + keyGeneratorClass + ", param " + keyGeneratorParam);
ClassLoader classLoader = slaveState.getClassLoadHelper().getLoader();
keyGenerator = (KeyGenerator) Utils.instantiate(keyGeneratorClass, classLoader);
keyGenerator.init(keyGeneratorParam, classLoader);
}
}
return keyGenerator;
}
public ValueGenerator getValueGenerator() {
if (valueGenerator == null) {
synchronized (this) {
if (valueGenerator != null) return valueGenerator;
log.info("Using value generator " + valueGeneratorClass + ", param " + valueGeneratorParam);
ClassLoader classLoader = slaveState.getClassLoadHelper().getLoader();
valueGenerator = (ValueGenerator) Utils.instantiate(valueGeneratorClass, classLoader);
valueGenerator.init(valueGeneratorParam, classLoader);
}
}
return valueGenerator;
}
protected static void avoidJit(Object result) {
//this line was added just to make sure JIT doesn't skip call to cacheWrapper.get
if (result != null && System.identityHashCode(result) == result.hashCode()) System.out.print("");
}
}
| |
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.network;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.CloudProvider;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.OperationNotSupportedException;
import org.dasein.cloud.ProviderContext;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.ResourceStatus;
import org.dasein.cloud.Tag;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.util.TagUtils;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Basic implementation of firewall support methods to minimize the work in implementing support in a new cloud.
* <p>Created by George Reese: 2/1/13 8:40 AM</p>
* @author George Reese
* @since 2013.04
* @version 2013.04
* @version 2014.03 added support for authorizing with rule create options
*/
@SuppressWarnings("UnusedDeclaration")
public abstract class AbstractFirewallSupport<T extends CloudProvider> implements FirewallSupport {
private T provider;
public AbstractFirewallSupport(@Nonnull T provider) {
this.provider = provider;
}
@Override
@Deprecated
public final @Nonnull String authorize(@Nonnull String firewallId, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException {
return authorize(firewallId, Direction.INGRESS, Permission.ALLOW, RuleTarget.getCIDR(source), protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort, 0);
}
@Override
@Deprecated
public final @Nonnull String authorize(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException {
if( direction.equals(Direction.INGRESS) ) {
return authorize(firewallId, direction, Permission.ALLOW, RuleTarget.getCIDR(source), protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort, 0);
}
else {
return authorize(firewallId, direction, Permission.ALLOW, RuleTarget.getGlobal(firewallId), protocol, RuleTarget.getCIDR(source), beginPort, endPort, 0);
}
}
@Override
@Deprecated
public final @Nonnull String authorize(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException {
if( direction.equals(Direction.INGRESS) ) {
return authorize(firewallId, direction, permission, RuleTarget.getCIDR(source), protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort, 0);
}
else {
return authorize(firewallId, direction, permission, RuleTarget.getGlobal(firewallId), protocol, RuleTarget.getCIDR(source), beginPort, endPort, 0);
}
}
@Override
@Deprecated
public final @Nonnull String authorize(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull String source, @Nonnull Protocol protocol, @Nonnull RuleTarget target, int beginPort, int endPort) throws CloudException, InternalException {
if( direction.equals(Direction.INGRESS) ) {
return authorize(firewallId, direction, permission, RuleTarget.getCIDR(source), protocol, target, beginPort, endPort, 0);
}
else {
return authorize(firewallId, direction, permission, target, protocol, RuleTarget.getCIDR(source), beginPort, endPort, 0);
}
}
@Override
public @Nonnull String authorize(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull RuleTarget sourceEndpoint, @Nonnull Protocol protocol, @Nonnull RuleTarget destinationEndpoint, int beginPort, int endPort, @Nonnegative int precedence) throws CloudException, InternalException {
throw new OperationNotSupportedException("Authorization is not currently implemented for " + getProvider().getCloudName());
}
@Override
public @Nonnull String authorize(@Nonnull String firewallId, @Nonnull FirewallRuleCreateOptions options) throws CloudException, InternalException {
RuleTarget source = options.getSourceEndpoint();
RuleTarget dest = options.getDestinationEndpoint();
if( source == null ) {
source = RuleTarget.getGlobal(firewallId);
}
if( dest == null ) {
dest = RuleTarget.getGlobal(firewallId);
}
return authorize(firewallId, options.getDirection(), options.getPermission(), source, options.getProtocol(), dest, options.getPortRangeStart(), options.getPortRangeEnd(), options.getPrecedence());
}
@Override
@Deprecated
public @Nonnull String create(@Nonnull String name, @Nonnull String description) throws InternalException, CloudException {
return create(FirewallCreateOptions.getInstance(name, description));
}
@Override
public @Nonnull String create(@Nonnull FirewallCreateOptions options) throws InternalException, CloudException {
throw new OperationNotSupportedException("Firewall creation is not currently implemented for " + getProvider().getCloudName());
}
@Override
@Deprecated
public @Nonnull String createInVLAN(@Nonnull String name, @Nonnull String description, @Nonnull String providerVlanId) throws InternalException, CloudException {
return create(FirewallCreateOptions.getInstance(providerVlanId, name, description));
}
/**
* @return the current authentication context for any calls through this support object
* @throws CloudException no context was set
*/
protected @Nonnull ProviderContext getContext() throws CloudException {
ProviderContext ctx = getProvider().getContext();
if( ctx == null ) {
throw new CloudException("No context was specified for this request");
}
return ctx;
}
@Override
public @Nullable Map<FirewallConstraints.Constraint, Object> getActiveConstraintsForFirewall(@Nonnull String firewallId) throws CloudException, InternalException {
HashMap<FirewallConstraints.Constraint, Object> active = new HashMap<FirewallConstraints.Constraint, Object>();
FirewallConstraints fields = getCapabilities().getFirewallConstraintsForCloud();
if( fields.isOpen() ) {
return active;
}
Firewall firewall = getFirewall(firewallId);
if( firewall == null ) {
return null;
}
for( FirewallConstraints.Constraint c : fields.getConstraints() ) {
FirewallConstraints.Level l = fields.getConstraintLevel(c);
if( !l.equals(FirewallConstraints.Level.NOT_CONSTRAINED) ) {
active.put(c, c.getValue(getProvider(), firewallId));
}
}
return active;
}
@Override
public @Nullable Firewall getFirewall(@Nonnull String firewallId) throws InternalException, CloudException {
for( Firewall fw : list() ) {
if( firewallId.equals(fw.getProviderFirewallId()) ) {
return fw;
}
}
return null;
}
@Override
@Deprecated
public @Nonnull FirewallConstraints getFirewallConstraintsForCloud() throws InternalException, CloudException {
return getCapabilities().getFirewallConstraintsForCloud();
}
/**
* @return the provider object associated with any calls through this support object
*/
protected final @Nonnull T getProvider() {
return provider;
}
@Override
public @Nonnull Collection<FirewallRule> getRules(@Nonnull String firewallId) throws InternalException, CloudException {
return Collections.emptyList();
}
@Override
@Deprecated
public @Nonnull Requirement identifyPrecedenceRequirement(boolean inVlan) throws InternalException, CloudException {
return getCapabilities().identifyPrecedenceRequirement(inVlan);
}
@Override
@Deprecated
public boolean isZeroPrecedenceHighest() throws InternalException, CloudException {
return getCapabilities().isZeroPrecedenceHighest();
}
@Override
public @Nonnull Iterable<ResourceStatus> listFirewallStatus() throws InternalException, CloudException {
ArrayList<ResourceStatus> status = new ArrayList<ResourceStatus>();
for( Firewall fw : list() ) {
//noinspection ConstantConditions
status.add(new ResourceStatus(fw.getProviderFirewallId(), true));
}
return status;
}
@Deprecated
public @Nonnull Iterable<RuleTargetType> listSupportedDestinationTypes(boolean inVlan) throws InternalException, CloudException {
return getCapabilities().listSupportedDestinationTypes(inVlan);
}
@Deprecated
public @Nonnull Iterable<Direction> listSupportedDirections(boolean inVlan) throws InternalException, CloudException {
return getCapabilities().listSupportedDirections(inVlan);
}
@Override
@Deprecated
public @Nonnull Iterable<Permission> listSupportedPermissions(boolean inVlan) throws InternalException, CloudException {
return getCapabilities().listSupportedPermissions(inVlan);
}
@Override
@Deprecated
public @Nonnull Iterable<RuleTargetType> listSupportedSourceTypes(boolean inVlan) throws InternalException, CloudException {
return getCapabilities().listSupportedSourceTypes(inVlan);
}
@Override
public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) {
return new String[0];
}
@Override
public void removeTags(@Nonnull String volumeId, @Nonnull Tag... tags) throws CloudException, InternalException {
// NO-OP
}
@Override
public void removeTags(@Nonnull String[] vmIds, @Nonnull Tag... tags) throws CloudException, InternalException {
for( String id : vmIds ) {
removeTags(id, tags);
}
}
@Override
public void revoke(@Nonnull String providerFirewallRuleId) throws InternalException, CloudException {
throw new OperationNotSupportedException("Revoke is not currently implemented in " + getProvider().getCloudName());
}
@Override
public void revoke(@Nonnull String firewallId, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException {
revoke(firewallId, Direction.INGRESS, Permission.ALLOW, source, protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort);
}
@Override
public void revoke(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException {
revoke(firewallId, direction, Permission.ALLOW, source, protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort);
}
@Override
public void revoke(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull String source, @Nonnull Protocol protocol, int beginPort, int endPort) throws CloudException, InternalException {
revoke(firewallId, direction, permission, source, protocol, RuleTarget.getGlobal(firewallId), beginPort, endPort);
}
@Override
public void revoke(@Nonnull String firewallId, @Nonnull Direction direction, @Nonnull Permission permission, @Nonnull String source, @Nonnull Protocol protocol, @Nonnull RuleTarget target, int beginPort, int endPort) throws CloudException, InternalException {
for( FirewallRule rule : getRules(firewallId) ) {
if( !rule.getDirection().equals(direction) ) {
continue;
}
if( !rule.getPermission().equals(permission) ) {
continue;
}
if( !rule.getProtocol().equals(protocol) ) {
continue;
}
if( rule.getStartPort() != beginPort || rule.getEndPort() != endPort ) {
continue;
}
if( direction.equals(Direction.INGRESS) ) {
RuleTarget se = rule.getSourceEndpoint();
if( !se.getRuleTargetType().equals(RuleTargetType.CIDR) ) {
continue;
}
if( !source.equals(se.getCidr()) ) {
continue;
}
}
else {
RuleTarget de = rule.getDestinationEndpoint();
if( !de.getRuleTargetType().equals(RuleTargetType.CIDR) ) {
continue;
}
if( !source.equals(de.getCidr()) ) {
continue;
}
}
RuleTarget ruleEndpoint;
RuleTargetType type;
if( direction.equals(Direction.INGRESS) ) {
ruleEndpoint = rule.getDestinationEndpoint();
}
else {
ruleEndpoint = rule.getSourceEndpoint();
}
type = ruleEndpoint.getRuleTargetType();
if( !type.equals(target.getRuleTargetType()) ) {
continue;
}
switch( type ) {
case CIDR:
//noinspection ConstantConditions
if( !target.getCidr().equals(ruleEndpoint.getCidr()) ) { continue; }
break;
case VLAN:
//noinspection ConstantConditions
if( !target.getProviderVlanId().equals(ruleEndpoint.getProviderVlanId()) ) { continue; }
break;
case VM:
//noinspection ConstantConditions
if( !target.getProviderVirtualMachineId().equals(ruleEndpoint.getProviderVirtualMachineId()) ) { continue; }
break;
case GLOBAL:
//noinspection ConstantConditions
if( !target.getProviderFirewallId().equals(ruleEndpoint.getProviderFirewallId()) ) { continue; }
break;
default:
throw new CloudException("Unknown rule target type: " + type);
}
revoke(rule.getProviderRuleId());
return;
}
}
@Override
@Deprecated
public boolean supportsRules(@Nonnull Direction direction, @Nonnull Permission permission, boolean inVlan) throws CloudException, InternalException {
return getCapabilities().supportsRules(direction, permission, inVlan);
}
@Override
@Deprecated
public boolean supportsFirewallCreation(boolean inVlan) throws CloudException, InternalException {
return getCapabilities().supportsFirewallCreation(inVlan);
}
@Override
@Deprecated
public boolean requiresRulesOnCreation() throws CloudException, InternalException{
return getCapabilities().requiresRulesOnCreation();
}
@Override
@Deprecated
public boolean supportsFirewallDeletion() throws CloudException, InternalException {
return getCapabilities().supportsFirewallDeletion();
}
@Override
@Deprecated
public boolean supportsFirewallSources() throws CloudException, InternalException {
return false;
}
@Override
public void updateTags(@Nonnull String volumeId, @Nonnull Tag... tags) throws CloudException, InternalException {
// NO-OP
}
@Override
public void updateTags(@Nonnull String[] vmIds, @Nonnull Tag... tags) throws CloudException, InternalException {
for( String id : vmIds ) {
updateTags(id, tags);
}
}
@Override
public void setTags( @Nonnull String firewallId, @Nonnull Tag... tags ) throws CloudException, InternalException {
setTags(new String[]{firewallId}, tags);
}
@Override
public void setTags( @Nonnull String[] firewallIds, @Nonnull Tag... tags ) throws CloudException, InternalException {
for( String id : firewallIds ) {
Tag[] collectionForDelete = TagUtils.getTagsForDelete(getFirewall(id).getTags(), tags);
if( collectionForDelete.length != 0) {
removeTags(id, collectionForDelete);
}
updateTags(id, tags);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.