gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.rest.controller;
import com.google.common.collect.ImmutableMap;
import org.adrianwalker.multilinestring.Multiline;
import org.apache.metron.indexing.dao.InMemoryDao;
import org.apache.metron.indexing.dao.IndexingDaoIntegrationTest;
import org.apache.metron.rest.service.SearchService;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@ActiveProfiles(TEST_PROFILE)
public class SearchControllerIntegrationTest {
@Autowired
private SearchService searchService;
@Autowired
private WebApplicationContext wac;
private MockMvc mockMvc;
private String searchUrl = "/api/v1/search";
private String user = "user";
private String password = "password";
@Before
public void setup() throws Exception {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).apply(springSecurity()).build();
loadTestData();
}
@After
public void cleanup() throws Exception {
InMemoryDao.clear();
}
@Test
public void testSecurity() throws Exception {
this.mockMvc.perform(post(searchUrl + "/search").with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.allQuery))
.andExpect(status().isUnauthorized());
}
@Test
public void test() throws Exception {
this.mockMvc.perform(post(searchUrl + "/search").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.allQuery))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.total").value(10))
.andExpect(jsonPath("$.results[0].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[0].source.timestamp").value(10))
.andExpect(jsonPath("$.results[1].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[1].source.timestamp").value(9))
.andExpect(jsonPath("$.results[2].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[2].source.timestamp").value(8))
.andExpect(jsonPath("$.results[3].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[3].source.timestamp").value(7))
.andExpect(jsonPath("$.results[4].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[4].source.timestamp").value(6))
.andExpect(jsonPath("$.results[5].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[5].source.timestamp").value(5))
.andExpect(jsonPath("$.results[6].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[6].source.timestamp").value(4))
.andExpect(jsonPath("$.results[7].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[7].source.timestamp").value(3))
.andExpect(jsonPath("$.results[8].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[8].source.timestamp").value(2))
.andExpect(jsonPath("$.results[9].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[9].source.timestamp").value(1));
this.mockMvc.perform(post(searchUrl + "/search").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.filterQuery))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.total").value(3))
.andExpect(jsonPath("$.results[0].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[0].source.timestamp").value(9))
.andExpect(jsonPath("$.results[1].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[1].source.timestamp").value(7))
.andExpect(jsonPath("$.results[2].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[2].source.timestamp").value(1));
this.mockMvc.perform(post(searchUrl + "/search").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.sortQuery))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.total").value(10))
.andExpect(jsonPath("$.results[0].source.ip_src_port").value(8001))
.andExpect(jsonPath("$.results[1].source.ip_src_port").value(8002))
.andExpect(jsonPath("$.results[2].source.ip_src_port").value(8003))
.andExpect(jsonPath("$.results[3].source.ip_src_port").value(8004))
.andExpect(jsonPath("$.results[4].source.ip_src_port").value(8005))
.andExpect(jsonPath("$.results[5].source.ip_src_port").value(8006))
.andExpect(jsonPath("$.results[6].source.ip_src_port").value(8007))
.andExpect(jsonPath("$.results[7].source.ip_src_port").value(8008))
.andExpect(jsonPath("$.results[8].source.ip_src_port").value(8009))
.andExpect(jsonPath("$.results[9].source.ip_src_port").value(8010));
this.mockMvc.perform(post(searchUrl + "/search").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.paginationQuery))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.total").value(10))
.andExpect(jsonPath("$.results[0].source.source:type").value("snort"))
.andExpect(jsonPath("$.results[0].source.timestamp").value(6))
.andExpect(jsonPath("$.results[1].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[1].source.timestamp").value(5))
.andExpect(jsonPath("$.results[2].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[2].source.timestamp").value(4));
this.mockMvc.perform(post(searchUrl + "/search").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.indexQuery))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.total").value(5))
.andExpect(jsonPath("$.results[0].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[0].source.timestamp").value(5))
.andExpect(jsonPath("$.results[1].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[1].source.timestamp").value(4))
.andExpect(jsonPath("$.results[2].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[2].source.timestamp").value(3))
.andExpect(jsonPath("$.results[3].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[3].source.timestamp").value(2))
.andExpect(jsonPath("$.results[4].source.source:type").value("bro"))
.andExpect(jsonPath("$.results[4].source.timestamp").value(1));
this.mockMvc.perform(post(searchUrl + "/search").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(IndexingDaoIntegrationTest.exceededMaxResultsQuery))
.andExpect(status().isInternalServerError())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.responseCode").value(500))
.andExpect(jsonPath("$.message").value("Search result size must be less than 100"));
}
private void loadTestData() throws ParseException {
Map<String, List<String>> backingStore = new HashMap<>();
for(Map.Entry<String, String> indices :
ImmutableMap.of(
"bro_index_2017.01.01.01", IndexingDaoIntegrationTest.broData,
"snort_index_2017.01.01.01", IndexingDaoIntegrationTest.snortData
).entrySet()
)
{
List<String> results = new ArrayList<>();
backingStore.put(indices.getKey(), results);
JSONArray broArray = (JSONArray) new JSONParser().parse(indices.getValue());
for(Object o: broArray) {
JSONObject jsonObject = (JSONObject) o;
results.add(jsonObject.toJSONString());
}
}
InMemoryDao.load(backingStore);
}
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.protocols.channels;
import com.google.bitcoin.core.*;
import com.google.bitcoin.utils.Threading;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.HashMultimap;
import com.google.protobuf.ByteString;
import net.jcip.annotations.GuardedBy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.math.BigInteger;
import java.util.Date;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.locks.ReentrantLock;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* This class maintains a set of {@link StoredClientChannel}s, automatically (re)broadcasting the contract transaction
* and broadcasting the refund transaction over the given {@link TransactionBroadcaster}.
*/
public class StoredPaymentChannelClientStates implements WalletExtension {
private static final Logger log = LoggerFactory.getLogger(StoredPaymentChannelClientStates.class);
static final String EXTENSION_ID = StoredPaymentChannelClientStates.class.getName();
@GuardedBy("lock") @VisibleForTesting final HashMultimap<Sha256Hash, StoredClientChannel> mapChannels = HashMultimap.create();
@VisibleForTesting final Timer channelTimeoutHandler = new Timer(true);
private Wallet containingWallet;
private final TransactionBroadcaster announcePeerGroup;
protected final ReentrantLock lock = Threading.lock("StoredPaymentChannelClientStates");
/**
* Creates a new StoredPaymentChannelClientStates and associates it with the given {@link Wallet} and
* {@link TransactionBroadcaster} which are used to complete and announce contract and refund
* transactions.
*/
public StoredPaymentChannelClientStates(Wallet containingWallet, TransactionBroadcaster announcePeerGroup) {
this.announcePeerGroup = checkNotNull(announcePeerGroup);
this.containingWallet = checkNotNull(containingWallet);
}
/** Returns this extension from the given wallet, or null if no such extension was added. */
@Nullable
public static StoredPaymentChannelClientStates getFromWallet(Wallet wallet) {
return (StoredPaymentChannelClientStates) wallet.getExtensions().get(EXTENSION_ID);
}
/** Returns the outstanding amount of money sent back to us for all channels to this server added together. */
public BigInteger getBalanceForServer(Sha256Hash id) {
BigInteger balance = BigInteger.ZERO;
lock.lock();
try {
Set<StoredClientChannel> setChannels = mapChannels.get(id);
for (StoredClientChannel channel : setChannels) {
synchronized (channel) {
if (channel.close != null) continue;
balance = balance.add(channel.valueToMe);
}
}
return balance;
} finally {
lock.unlock();
}
}
/**
* Returns the number of seconds from now until this servers next channel will expire, or zero if no unexpired
* channels found.
*/
public long getSecondsUntilExpiry(Sha256Hash id) {
lock.lock();
try {
final Set<StoredClientChannel> setChannels = mapChannels.get(id);
final long nowSeconds = Utils.currentTimeMillis() / 1000;
int earliestTime = Integer.MAX_VALUE;
for (StoredClientChannel channel : setChannels) {
synchronized (channel) {
if (channel.expiryTimeSeconds() > nowSeconds)
earliestTime = Math.min(earliestTime, (int) channel.expiryTimeSeconds());
}
}
return earliestTime == Integer.MAX_VALUE ? 0 : earliestTime - nowSeconds;
} finally {
lock.unlock();
}
}
/**
* Finds an inactive channel with the given id and returns it, or returns null.
*/
@Nullable
StoredClientChannel getUsableChannelForServerID(Sha256Hash id) {
lock.lock();
try {
Set<StoredClientChannel> setChannels = mapChannels.get(id);
for (StoredClientChannel channel : setChannels) {
synchronized (channel) {
// Check if the channel is usable (has money, inactive) and if so, activate it.
log.info("Considering channel {} contract {}", channel.hashCode(), channel.contract.getHash());
if (channel.close != null || channel.valueToMe.equals(BigInteger.ZERO)) {
log.info(" ... but is closed or empty");
continue;
}
if (!channel.active) {
log.info(" ... activating");
channel.active = true;
return channel;
}
log.info(" ... but is already active");
}
}
} finally {
lock.unlock();
}
return null;
}
/**
* Finds a channel with the given id and contract hash and returns it, or returns null.
*/
@Nullable
StoredClientChannel getChannel(Sha256Hash id, Sha256Hash contractHash) {
lock.lock();
try {
Set<StoredClientChannel> setChannels = mapChannels.get(id);
for (StoredClientChannel channel : setChannels) {
if (channel.contract.getHash().equals(contractHash))
return channel;
}
return null;
} finally {
lock.unlock();
}
}
/**
* Adds the given channel to this set of stored states, broadcasting the contract and refund transactions when the
* channel expires and notifies the wallet of an update to this wallet extension
*/
void putChannel(final StoredClientChannel channel) {
putChannel(channel, true);
}
// Adds this channel and optionally notifies the wallet of an update to this extension (used during deserialize)
private void putChannel(final StoredClientChannel channel, boolean updateWallet) {
lock.lock();
try {
mapChannels.put(channel.id, channel);
channelTimeoutHandler.schedule(new TimerTask() {
@Override
public void run() {
removeChannel(channel);
announcePeerGroup.broadcastTransaction(channel.contract);
announcePeerGroup.broadcastTransaction(channel.refund);
}
// Add the difference between real time and Utils.now() so that test-cases can use a mock clock.
}, new Date(channel.expiryTimeSeconds() * 1000 + (System.currentTimeMillis() - Utils.currentTimeMillis())));
} finally {
lock.unlock();
}
if (updateWallet)
containingWallet.addOrUpdateExtension(this);
}
/**
* <p>Removes the channel with the given id from this set of stored states and notifies the wallet of an update to
* this wallet extension.</p>
*
* <p>Note that the channel will still have its contract and refund transactions broadcast via the connected
* {@link TransactionBroadcaster} as long as this {@link StoredPaymentChannelClientStates} continues to
* exist in memory.</p>
*/
void removeChannel(StoredClientChannel channel) {
lock.lock();
try {
mapChannels.remove(channel.id, channel);
} finally {
lock.unlock();
}
containingWallet.addOrUpdateExtension(this);
}
@Override
public String getWalletExtensionID() {
return EXTENSION_ID;
}
@Override
public boolean isWalletExtensionMandatory() {
return false;
}
@Override
public byte[] serializeWalletExtension() {
lock.lock();
try {
ClientState.StoredClientPaymentChannels.Builder builder = ClientState.StoredClientPaymentChannels.newBuilder();
for (StoredClientChannel channel : mapChannels.values()) {
// First a few asserts to make sure things won't break
checkState(channel.valueToMe.compareTo(BigInteger.ZERO) >= 0 && channel.valueToMe.compareTo(NetworkParameters.MAX_MONEY) < 0);
checkState(channel.refundFees.compareTo(BigInteger.ZERO) >= 0 && channel.refundFees.compareTo(NetworkParameters.MAX_MONEY) < 0);
checkNotNull(channel.myKey.getPrivKeyBytes());
checkState(channel.refund.getConfidence().getSource() == TransactionConfidence.Source.SELF);
final ClientState.StoredClientPaymentChannel.Builder value = ClientState.StoredClientPaymentChannel.newBuilder()
.setId(ByteString.copyFrom(channel.id.getBytes()))
.setContractTransaction(ByteString.copyFrom(channel.contract.bitcoinSerialize()))
.setRefundTransaction(ByteString.copyFrom(channel.refund.bitcoinSerialize()))
.setMyKey(ByteString.copyFrom(channel.myKey.getPrivKeyBytes()))
.setValueToMe(channel.valueToMe.longValue())
.setRefundFees(channel.refundFees.longValue());
if (channel.close != null)
value.setCloseTransactionHash(ByteString.copyFrom(channel.close.getHash().getBytes()));
builder.addChannels(value);
}
return builder.build().toByteArray();
} finally {
lock.unlock();
}
}
@Override
public void deserializeWalletExtension(Wallet containingWallet, byte[] data) throws Exception {
lock.lock();
try {
checkState(this.containingWallet == null || this.containingWallet == containingWallet);
this.containingWallet = containingWallet;
NetworkParameters params = containingWallet.getParams();
ClientState.StoredClientPaymentChannels states = ClientState.StoredClientPaymentChannels.parseFrom(data);
for (ClientState.StoredClientPaymentChannel storedState : states.getChannelsList()) {
Transaction refundTransaction = new Transaction(params, storedState.getRefundTransaction().toByteArray());
refundTransaction.getConfidence().setSource(TransactionConfidence.Source.SELF);
StoredClientChannel channel = new StoredClientChannel(new Sha256Hash(storedState.getId().toByteArray()),
new Transaction(params, storedState.getContractTransaction().toByteArray()),
refundTransaction,
new ECKey(new BigInteger(1, storedState.getMyKey().toByteArray()), null, true),
BigInteger.valueOf(storedState.getValueToMe()),
BigInteger.valueOf(storedState.getRefundFees()), false);
if (storedState.hasCloseTransactionHash())
channel.close = containingWallet.getTransaction(new Sha256Hash(storedState.toByteArray()));
putChannel(channel, false);
}
} finally {
lock.unlock();
}
}
@Override
public String toString() {
lock.lock();
try {
StringBuilder buf = new StringBuilder("Client payment channel states:\n");
for (StoredClientChannel channel : mapChannels.values())
buf.append(" ").append(channel).append("\n");
return buf.toString();
} finally {
lock.unlock();
}
}
}
/**
* Represents the state of a channel once it has been opened in such a way that it can be stored and used to resume a
* channel which was interrupted (eg on connection failure) or keep track of refund transactions which need broadcast
* when they expire.
*/
class StoredClientChannel {
Sha256Hash id;
Transaction contract, refund;
// The transaction that closed the channel (generated by the server)
Transaction close;
ECKey myKey;
BigInteger valueToMe, refundFees;
// In-memory flag to indicate intent to resume this channel (or that the channel is already in use)
boolean active = false;
StoredClientChannel(Sha256Hash id, Transaction contract, Transaction refund, ECKey myKey, BigInteger valueToMe,
BigInteger refundFees, boolean active) {
this.id = id;
this.contract = contract;
this.refund = refund;
this.myKey = myKey;
this.valueToMe = valueToMe;
this.refundFees = refundFees;
this.active = active;
}
long expiryTimeSeconds() {
return refund.getLockTime() + 60 * 5;
}
@Override
public String toString() {
final String newline = String.format("%n");
final String closeStr = close == null ? "still open" : close.toString().replaceAll(newline, newline + " ");
return String.format("Stored client channel for server ID %s (%s)%n" +
" Key: %s%n" +
" Value left: %d%n" +
" Refund fees: %d%n" +
" Contract: %s" +
"Refund: %s" +
"Close: %s",
id, active ? "active" : "inactive", myKey, valueToMe, refundFees,
contract.toString().replaceAll(newline, newline + " "),
refund.toString().replaceAll(newline, newline + " "),
closeStr);
}
}
| |
/*
* Copyright (C) 2015 Information Management Services, Inc.
*/
package com.imsweb.naaccrxml;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import com.imsweb.naaccrxml.entity.Patient;
/**
* This class can be called on the command line to process an entire folder of files.
* <br/>
* Usage: java -cp naaccr-xml-x.x-all.jar BatchProcessor options.properties
*/
public final class BatchProcessor {
// full path to the folder containing the input files (required)
private static final String _OPTION_INPUT_FOLDER = "input.folder";
// file name inclusion regex (optional)
private static final String _OPTION_INPUT_REGEX_INCLUDE = "input.regex-include";
// file name exclusion regex (optional)
private static final String _OPTION_INPUT_REGEX_EXCLUDE = "input.regex-exclude";
// processing mode: flat-to-xml or xml-to-flat (required)
private static final String _OPTION_PROCESSING_MODE = "processing.mode";
// the list of error codes (comma separated) to process (optional, if not provided, all codes are processed)
private static final String _OPTION_PROCESSING_ERROR_CODES = "processing.error-codes";
// the number of threads to use (optional, defaults to Min(num-processors + 1, 5))
private static final String _OPTION_PROCESSING_NUM_THREADS = "processing.num-threads";
// compression for the created data files; gz, xz, none, as-input (optional, defaults to as-input)
private static final String _OPTION_PROCESSING_COMPRESSION = "processing.compression";
// full path to the folder where the files should be created (required)
private static final String _OPTION_OUTPUT_FOLDER = "output.folder";
// whether or not the created files should be auto-deleted: true or false (defaults to false)
private static final String _OPTION_OUTPUT_CLEAN_CREATED_FILES = "output.clean-created-files";
// whether or not a report file (report.txt) should be created in the output folder: true or false (defaults to false)
private static final String _OPTION_OUTPUT_CREATE_REPORT = "output.create-report";
// the report name (optional, defaults to report.txt)
private static final String _OPTION_OUTPUT_REPORT_NAME = "output.report-name";
// whether or not the file names should be de-identified (defaults to false)
private static final String _OPTION_OUTPUT_DEIDENTIFY_FILES = "output.de-identify-files";
/**
* Main method, entry point.
*/
public static void main(String[] args) throws IOException, InterruptedException {
// read the options
Properties opt = readOptions(args);
if (opt == null)
throw new RuntimeException("Unable to find options file path, it must be provided as an argument to the call.");
// validate the options
if (opt.getProperty(_OPTION_INPUT_FOLDER) == null || opt.getProperty(_OPTION_INPUT_FOLDER).isEmpty())
throw new RuntimeException("Option " + _OPTION_INPUT_FOLDER + " is required.");
File inputDir = new File(opt.getProperty(_OPTION_INPUT_FOLDER));
if (!inputDir.exists())
throw new RuntimeException("Invalid input folder.");
Pattern incRegex = opt.getProperty(_OPTION_INPUT_REGEX_INCLUDE) == null ? null : Pattern.compile(opt.getProperty(_OPTION_INPUT_REGEX_INCLUDE));
Pattern excRegex = opt.getProperty(_OPTION_INPUT_REGEX_EXCLUDE) == null ? null : Pattern.compile(opt.getProperty(_OPTION_INPUT_REGEX_EXCLUDE));
String mode = opt.getProperty(_OPTION_PROCESSING_MODE);
if (mode == null)
throw new RuntimeException("Option " + _OPTION_PROCESSING_MODE + " is required.");
if (!"flat-to-xml".equals(mode) && !"xml-to-flat".equals(mode))
throw new RuntimeException("Invalid mode (must be flat-to-xml or xml-to-flat).");
String rawErrorCodes = opt.getProperty(_OPTION_PROCESSING_ERROR_CODES);
List<String> errorCodes = null;
if (rawErrorCodes != null && !rawErrorCodes.isEmpty()) {
errorCodes = new ArrayList<>();
for (String s : StringUtils.split(rawErrorCodes, ','))
errorCodes.add(s.trim());
}
int numThreads = Math.min(Runtime.getRuntime().availableProcessors() + 1, 5);
if (opt.getProperty(_OPTION_PROCESSING_NUM_THREADS) != null && !opt.getProperty(_OPTION_PROCESSING_NUM_THREADS).isEmpty())
numThreads = Integer.parseInt(opt.getProperty(_OPTION_PROCESSING_NUM_THREADS));
if (opt.getProperty(_OPTION_OUTPUT_FOLDER) == null || opt.getProperty(_OPTION_OUTPUT_FOLDER).isEmpty())
throw new RuntimeException("Option " + _OPTION_OUTPUT_FOLDER + " is required.");
String compression = opt.getProperty(_OPTION_PROCESSING_COMPRESSION);
if (compression != null && !compression.equals("gz") && !compression.equals("xz") && !compression.equals("none") && !compression.equals("as-input"))
throw new RuntimeException("Invalid compression (must be gz, xz, none, or as-input).");
File outputDir = new File(opt.getProperty(_OPTION_OUTPUT_FOLDER));
if (!outputDir.exists())
throw new RuntimeException("Invalid outupt folder.");
boolean cleanCreatedFiles = opt.getProperty(_OPTION_OUTPUT_CLEAN_CREATED_FILES) == null ? false : Boolean.valueOf(opt.getProperty(_OPTION_OUTPUT_CLEAN_CREATED_FILES));
boolean createReport = opt.getProperty(_OPTION_OUTPUT_CREATE_REPORT) == null ? false : Boolean.valueOf(opt.getProperty(_OPTION_OUTPUT_CREATE_REPORT));
String reportName = opt.getProperty(_OPTION_OUTPUT_REPORT_NAME) == null ? "report.txt" : opt.getProperty(_OPTION_OUTPUT_REPORT_NAME);
boolean deidentify = opt.getProperty(_OPTION_OUTPUT_DEIDENTIFY_FILES) == null ? false : Boolean.valueOf(opt.getProperty(_OPTION_OUTPUT_DEIDENTIFY_FILES));
// gather the files to process
List<File> toProcess = new ArrayList<>();
File[] files = inputDir.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory())
continue;
boolean add = true;
if (incRegex != null || excRegex != null) {
if (incRegex != null && !incRegex.matcher(file.getName()).matches())
add = false;
if (excRegex != null && excRegex.matcher(file.getName()).matches())
add = false;
}
if (add)
toProcess.add(file);
}
}
// we will report the information in this collection
Map<String, List<String>> reportData = new TreeMap<>();
Map<String, AtomicInteger> globalCounts = new HashMap<>();
Map<String, Set<String>> globalDetails = new HashMap<>();
AtomicInteger globalTumorCount = new AtomicInteger();
// create the work
long start = System.currentTimeMillis();
ExecutorService executor = Executors.newFixedThreadPool(numThreads);
for (File inputFile : toProcess) {
String outputFilename = invertFilename(inputFile, compression);
@SuppressWarnings("ConstantConditions")
File outputFile = new File(outputDir, outputFilename);
if (inputFile.equals(outputFile))
throw new RuntimeException("Was about to write output file into the input file, this can't be good!");
if (cleanCreatedFiles)
outputFile.deleteOnExit();
List<String> data = new ArrayList<>();
reportData.put(inputFile.getName(), data);
executor.execute(new FileProcessor(inputFile, outputFile, data, cleanCreatedFiles, "flat-to-xml".equals(mode), globalCounts, globalDetails, globalTumorCount, errorCodes));
}
executor.shutdown();
// wait for the work to be completed
executor.awaitTermination(1, TimeUnit.DAYS);
// write the report
if (createReport) {
Writer reportWriter = new OutputStreamWriter(new FileOutputStream(new File(outputDir, reportName)), StandardCharsets.UTF_8);
reportWriter.write("Report created on " + new Date() + "\n\n");
reportWriter.write("total number of files: " + formatNumber(toProcess.size()) + "\n");
reportWriter.write("total processing time: " + formatTime(System.currentTimeMillis() - start) + "\n");
reportWriter.write("total number of processed tumors: " + formatNumber(globalTumorCount.get()) + "\n");
reportWriter.write("combined warnings:\n");
int globalCount = 0;
for (String code : NaaccrErrorUtils.getAllValidationErrors().keySet()) {
if (errorCodes != null && !errorCodes.contains(code))
continue;
int count = globalCounts.containsKey(code) ? globalCounts.get(code).get() : 0;
if (count > 0) {
reportWriter.write(" " + code + ": " + formatNumber(count) + " cases\n");
if (globalDetails.containsKey(code)) {
List<String> list = new ArrayList<>(globalDetails.get(code));
Collections.sort(list);
reportWriter.write(" involved item(s): " + list.size() + " " + list + "\n");
}
}
globalCount += count;
}
if (globalCount == 0)
reportWriter.write(" no warning found\n");
for (Entry<String, List<String>> entry : reportData.entrySet()) {
reportWriter.write("\n\n");
reportWriter.write(deidentify ? "<de-identified file name>" : entry.getKey());
reportWriter.write("\n");
for (String line : entry.getValue()) {
reportWriter.write(line);
reportWriter.write("\n");
}
}
reportWriter.close();
}
}
private static Properties readOptions(String[] args) {
Properties opt = null;
if (args.length != 0) {
File file = new File(args[0]);
if (file.exists()) {
try (Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)) {
opt = new Properties();
opt.load(reader);
}
catch (IOException e) {
opt = null;
}
}
}
return opt;
}
private static String invertFilename(File file, String compression) {
// first invert the filename
String[] name = StringUtils.split(file.getName(), '.');
if (name.length < 2)
return null;
String extension = name[name.length - 1];
boolean compressed = false;
if (extension.equalsIgnoreCase("gz")) {
extension = name[name.length - 2];
compressed = true;
}
StringBuilder result = new StringBuilder();
for (int i = 0; i < (compressed ? name.length - 2 : name.length - 1); i++)
result.append(name[i]).append(".");
result.append(extension.equalsIgnoreCase("xml") ? "txt" : "xml");
if (compressed)
result.append(".gz");
// then update the extension using the requested compression
String newName = result.toString();
if ("gz".equals(compression)) {
if (newName.endsWith(".xz"))
newName = newName.replace(".xz", "");
if (!newName.endsWith(".gz"))
newName = newName + ".gz";
}
else if ("xz".equals(compression)) {
if (newName.endsWith(".gz"))
newName = newName.replace(".gz", "");
if (!newName.endsWith(".xz"))
newName = newName + ".xz";
}
else if ("none".equals(compression)) {
if (newName.endsWith(".gz"))
newName = newName.replace(".gz", "");
else if (newName.endsWith(".xz"))
newName = newName.replace(".xz", "");
}
return new File(file.getParentFile(), newName).getName();
}
private static final class FileProcessor implements Runnable {
private File _inputFile;
private File _outputFile;
private List<String> _reportData;
private boolean _deleteOutputFiles, _flatToXml;
private Map<String, AtomicInteger> _globalCounts;
private Map<String, Set<String>> _globalDetails;
private AtomicInteger _globalTumorCount;
private List<String> _errorCodes;
public FileProcessor(File inputFile, File outputFile, List<String> reportData, boolean deleteOutputFiles, boolean flatToXml, Map<String, AtomicInteger> globalCounts, Map<String, Set<String>> globalDetails, AtomicInteger globalTumorCount, List<String> errorCodes) {
_inputFile = inputFile;
_outputFile = outputFile;
_reportData = reportData;
_deleteOutputFiles = deleteOutputFiles;
_flatToXml = flatToXml;
_globalCounts = globalCounts;
_globalDetails = globalDetails;
_globalTumorCount = globalTumorCount;
_errorCodes = errorCodes;
}
@Override
@SuppressWarnings("ResultOfMethodCallIgnored")
public void run() {
Map<String, AtomicInteger> warningCounts = new HashMap<>();
Map<String, Set<String>> warningDetails = new HashMap<>();
AtomicInteger tumorCount = new AtomicInteger();
NaaccrOptions options = new NaaccrOptions();
options.setReportLevelMismatch(true);
NaaccrObserver observer = new FileObserver(warningCounts, warningDetails, tumorCount, _globalCounts, _globalDetails, _globalTumorCount);
try {
long start = System.currentTimeMillis();
if (_flatToXml)
NaaccrXmlUtils.flatToXml(_inputFile, _outputFile, options, null, observer);
else
NaaccrXmlUtils.xmlToFlat(_inputFile, _outputFile, options, null, observer);
_reportData.add(" original size: " + formatFileSize(_inputFile.length()));
_reportData.add(" created size: " + formatFileSize(_outputFile.length()));
_reportData.add(" processing time: " + formatTime(System.currentTimeMillis() - start));
_reportData.add(" number of processed tumors: " + formatNumber(tumorCount.get()));
_reportData.add(" warnings:");
int globalCount = 0;
for (String code : NaaccrErrorUtils.getAllValidationErrors().keySet()) {
if (_errorCodes != null && !_errorCodes.contains(code))
continue;
int count = warningCounts.containsKey(code) ? warningCounts.get(code).get() : 0;
if (count > 0) {
_reportData.add(" " + code + ": " + formatNumber(count) + " cases");
if (warningDetails.containsKey(code)) {
List<String> list = new ArrayList<>(warningDetails.get(code));
Collections.sort(list);
_reportData.add(" involved item(s): " + list.size() + " " + list);
}
}
globalCount += count;
}
if (globalCount == 0)
_reportData.add(" no warning found");
}
catch (NaaccrIOException e) {
_reportData.add(" processing error: " + e.getMessage());
}
if (_deleteOutputFiles)
if (!_outputFile.delete())
System.err.println("Unable to delete " + _outputFile.getPath());
}
}
private static final class FileObserver implements NaaccrObserver {
private Map<String, AtomicInteger> _warningCounts, _globalCounts;
private Map<String, Set<String>> _warningDetails, _globalDetails;
private AtomicInteger _tumorCount, _globalTumorCount;
public FileObserver(Map<String, AtomicInteger> warningCounts, Map<String, Set<String>> warningDetails, AtomicInteger tumorCount, Map<String, AtomicInteger> globalCounts, Map<String, Set<String>> globalDetails, AtomicInteger globalTumorCount) {
_warningCounts = warningCounts;
_warningDetails = warningDetails;
_tumorCount = tumorCount;
_globalCounts = globalCounts;
_globalDetails = globalDetails;
_globalTumorCount = globalTumorCount;
}
@Override
public void patientRead(Patient patient) {
handlePatient(patient);
}
@Override
public void patientWritten(Patient patient) {
handlePatient(patient);
_tumorCount.addAndGet(patient.getTumors().size());
_globalTumorCount.addAndGet(patient.getTumors().size());
}
private void handlePatient(Patient patient) {
for (NaaccrValidationError error : patient.getAllValidationErrors()) {
// file count
AtomicInteger count = _warningCounts.get(error.getCode());
if (count == null)
_warningCounts.put(error.getCode(), new AtomicInteger(1));
else
count.incrementAndGet();
// global count
AtomicInteger globalCount = _globalCounts.get(error.getCode());
if (globalCount == null)
_globalCounts.put(error.getCode(), new AtomicInteger(1));
else
globalCount.incrementAndGet();
if (error.getNaaccrId() != null) {
// file properties
_warningDetails.computeIfAbsent(error.getCode(), k -> new HashSet<>()).add(error.getNaaccrId());
// global properties
_globalDetails.computeIfAbsent(error.getCode(), k -> new HashSet<>()).add(error.getNaaccrId());
}
}
}
}
public static String formatNumber(int num) {
DecimalFormat format = new DecimalFormat();
format.setDecimalSeparatorAlwaysShown(false);
return format.format(num);
}
@SuppressWarnings("UnusedDeclaration")
public static String formatTime(long timeInMilli) {
long hourBasis = 60;
StringBuilder formattedTime = new StringBuilder();
long secTmp = timeInMilli / 1000;
long sec = secTmp % hourBasis;
long minTmp = secTmp / hourBasis;
long min = minTmp % hourBasis;
long hour = minTmp / hourBasis;
if (hour > 0) {
formattedTime.append(hour).append(" hour");
if (hour > 1)
formattedTime.append("s");
}
if (min > 0) {
if (formattedTime.length() > 0)
formattedTime.append(", ");
formattedTime.append(min).append(" minute");
if (min > 1)
formattedTime.append("s");
}
if (sec > 0) {
if (formattedTime.length() > 0)
formattedTime.append(", ");
formattedTime.append(sec).append(" second");
if (sec > 1)
formattedTime.append("s");
}
if (formattedTime.length() > 0)
return formattedTime.toString();
return "< 1 second";
}
public static String formatFileSize(long size) {
if (size < 1024)
return size + " B";
else if (size < 1024 * 1024)
return new DecimalFormat("#.# KB").format((double)size / 1024);
else if (size < 1024 * 1024 * 1024)
return new DecimalFormat("#.# MB").format((double)size / 1024 / 1024);
return new DecimalFormat("#.# GB").format((double)size / 1024 / 1024 / 1024);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.Cast;
import org.elasticsearch.painless.Definition.Type;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import java.util.Objects;
/**
* The superclass for all E* (expression) and P* (postfix) nodes.
*/
public abstract class AExpression extends ANode {
/**
* Prefix is the predecessor to this node in a variable chain.
* This is used to analyze and write variable chains in a
* more natural order since the parent node of a variable
* chain will want the data from the final postfix to be
* analyzed.
*/
AExpression prefix;
/**
* Set to false when an expression will not be read from such as
* a basic assignment. Note this variable is always set by the parent
* as input.
*/
boolean read = true;
/**
* Set to true when an expression can be considered a stand alone
* statement. Used to prevent extraneous bytecode. This is always
* set by the node as output.
*/
boolean statement = false;
/**
* Set to the expected type this node needs to be. Note this variable
* is always set by the parent as input and should never be read from.
*/
Type expected = null;
/**
* Set to the actual type this node is. Note this variable is always
* set by the node as output and should only be read from outside of the
* node itself. <b>Also, actual can always be read after a cast is
* called on this node to get the type of the node after the cast.</b>
*/
Type actual = null;
/**
* Set by {@link EExplicit} if a cast made on an expression node should be
* explicit.
*/
boolean explicit = false;
/**
* Set to true if a cast is allowed to boxed/unboxed. This is used
* for method arguments because casting may be required.
*/
boolean internal = false;
/**
* Set to the value of the constant this expression node represents if
* and only if the node represents a constant. If this is not null
* this node will be replaced by an {@link EConstant} during casting
* if it's not already one.
*/
Object constant = null;
/**
* Set to true by {@link ENull} to represent a null value.
*/
boolean isNull = false;
/**
* Standard constructor with location used for error tracking.
*/
AExpression(Location location) {
super(location);
prefix = null;
}
/**
* This constructor is used by variable/method chains when postfixes are specified.
*/
AExpression(Location location, AExpression prefix) {
super(location);
this.prefix = Objects.requireNonNull(prefix);
}
/**
* Inserts {@link ECast} nodes into the tree for implicit casts. Also replaces
* nodes with the constant variable set to a non-null value with {@link EConstant}.
* @return The new child node for the parent node calling this method.
*/
AExpression cast(Locals locals) {
Cast cast = locals.getDefinition().caster.getLegalCast(location, actual, expected, explicit, internal);
if (cast == null) {
if (constant == null || this instanceof EConstant) {
// For the case where a cast is not required and a constant is not set
// or the node is already an EConstant no changes are required to the tree.
return this;
} else {
// For the case where a cast is not required but a
// constant is set, an EConstant replaces this node
// with the constant copied from this node. Note that
// for constants output data does not need to be copied
// from this node because the output data for the EConstant
// will already be the same.
EConstant econstant = new EConstant(location, constant);
econstant.analyze(locals);
if (!expected.equals(econstant.actual)) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
return econstant;
}
} else {
if (constant == null) {
// For the case where a cast is required and a constant is not set.
// Modify the tree to add an ECast between this node and its parent.
// The output data from this node is copied to the ECast for
// further reads done by the parent.
ECast ecast = new ECast(location, this, cast);
ecast.statement = statement;
ecast.actual = expected;
ecast.isNull = isNull;
return ecast;
} else {
if (Definition.isConstantType(expected)) {
// For the case where a cast is required, a constant is set,
// and the constant can be immediately cast to the expected type.
// An EConstant replaces this node with the constant cast appropriately
// from the constant value defined by this node. Note that
// for constants output data does not need to be copied
// from this node because the output data for the EConstant
// will already be the same.
constant = locals.getDefinition().caster.constCast(location, constant, cast);
EConstant econstant = new EConstant(location, constant);
econstant.analyze(locals);
if (!expected.equals(econstant.actual)) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
return econstant;
} else if (this instanceof EConstant) {
// For the case where a cast is required, a constant is set,
// the constant cannot be immediately cast to the expected type,
// and this node is already an EConstant. Modify the tree to add
// an ECast between this node and its parent. Note that
// for constants output data does not need to be copied
// from this node because the output data for the EConstant
// will already be the same.
ECast ecast = new ECast(location, this, cast);
ecast.actual = expected;
return ecast;
} else {
// For the case where a cast is required, a constant is set,
// the constant cannot be immediately cast to the expected type,
// and this node is not an EConstant. Replace this node with
// an Econstant node copying the constant from this node.
// Modify the tree to add an ECast between the EConstant node
// and its parent. Note that for constants output data does not
// need to be copied from this node because the output data for
// the EConstant will already be the same.
EConstant econstant = new EConstant(location, constant);
econstant.analyze(locals);
if (!actual.equals(econstant.actual)) {
throw createError(new IllegalStateException("Illegal tree structure."));
}
ECast ecast = new ECast(location, econstant, cast);
ecast.actual = expected;
return ecast;
}
}
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.propertyInspector;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.ex.MultiLineLabel;
import com.intellij.openapi.util.Comparing;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.uiDesigner.radComponents.*;
import com.intellij.uiDesigner.UIDesignerBundle;
import com.intellij.uiDesigner.componentTree.ComponentSelectionListener;
import com.intellij.uiDesigner.componentTree.ComponentTree;
import com.intellij.uiDesigner.designSurface.GuiEditor;
import com.intellij.uiDesigner.designSurface.GridCaptionPanel;
import com.intellij.uiDesigner.quickFixes.QuickFixManager;import com.intellij.util.IJSwingUtilities;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.NonNls;
import javax.swing.*;import javax.swing.event.ChangeListener;import javax.swing.event.ChangeEvent;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.*;
import java.util.List;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class PropertyInspector extends JPanel{
private final PropertyInspectorTable myInspectorTable;
private final ComponentTree myComponentTree;
private final QuickFixManager myQuickFixManager;
private GuiEditor myEditor;
private final PropertyInspector.MyComponentSelectionListener myComponentSelectionListener;
@NonNls private static final String INSPECTOR_CARD = "inspector";
@NonNls private static final String EMPTY_CARD = "empty";
@NonNls private static final String CUSTOM_CARD = "column";
private final JScrollPane myCustomPropertiesScrollPane = new JScrollPane();
private CustomPropertiesPanel myCustomPropertiesPanel;
private final ChangeListener myCustomPropertiesChangeListener;
private RadContainer myPropertiesPanelContainer;
public PropertyInspector(Project project, @NotNull final ComponentTree componentTree) {
super(new CardLayout());
myInspectorTable = new PropertyInspectorTable(project, componentTree);
myComponentTree = componentTree;
// Card with property inspector
final JPanel inspectorCard = new JPanel(new GridBagLayout());
final JScrollPane inspectorScrollPane = ScrollPaneFactory.createScrollPane(myInspectorTable);
inspectorCard.add(inspectorScrollPane,
new GridBagConstraints(0, 0, 0, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)
);
final JCheckBox chkShowExpertProperties = new JCheckBox(UIDesignerBundle.message("chk.show.expert.properties"));
inspectorCard.add(
chkShowExpertProperties,
new GridBagConstraints(0, 1, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0)
);
chkShowExpertProperties.addActionListener(
new ActionListener() {
public void actionPerformed(final ActionEvent e) {
myInspectorTable.setShowExpertProperties(chkShowExpertProperties.isSelected());
}
}
);
add(inspectorCard, INSPECTOR_CARD);
// Empty card
final MultiLineLabel label = new MultiLineLabel(UIDesignerBundle.message("label.select.single.component.to.edit.its.properties")){
public void updateUI() {
super.updateUI();
setBackground(myInspectorTable.getBackground());
}
};
label.setOpaque(true);
label.setHorizontalAlignment(SwingConstants.CENTER);
add(label, EMPTY_CARD);
add(myCustomPropertiesScrollPane, CUSTOM_CARD);
myComponentSelectionListener = new MyComponentSelectionListener();
synchWithTree(false);
// Install light bulb
myQuickFixManager = new QuickFixManagerImpl(null, myInspectorTable, inspectorScrollPane.getViewport());
myCustomPropertiesChangeListener = new ChangeListener() {
public void stateChanged(ChangeEvent e) {
if (myPropertiesPanelContainer != null) {
myPropertiesPanelContainer.revalidate();
}
if (myEditor.ensureEditable()) {
myEditor.refreshAndSave(true);
}
}
};
}
public void setEditor(final GuiEditor editor) {
if (myEditor != editor) {
if (myEditor != null) {
myEditor.removeComponentSelectionListener(myComponentSelectionListener);
}
myEditor = editor;
myInspectorTable.setEditor(myEditor);
myQuickFixManager.setEditor(myEditor);
if (myEditor != null) {
myEditor.addComponentSelectionListener(myComponentSelectionListener);
}
else {
if (myCustomPropertiesPanel != null) {
myCustomPropertiesPanel.removeChangeListener(myCustomPropertiesChangeListener);
}
}
}
}
public void refreshIntentionHint() {
myQuickFixManager.refreshIntentionHint();
}
public void synchWithTree(final boolean forceSynch) {
final CardLayout cardLayout = (CardLayout)getLayout();
if (!showSelectedColumnProperties()) {
final RadComponent[] selectedComponents = myComponentTree.getSelectedComponents();
if(selectedComponents.length >= 1){
cardLayout.show(this, INSPECTOR_CARD);
myInspectorTable.synchWithTree(forceSynch);
}
else{
List<RadButtonGroup> buttonGroups = myComponentTree.getSelectedElements(RadButtonGroup.class);
if (buttonGroups.size() > 0) {
showButtonGroupProperties(buttonGroups.get(0));
}
else {
cardLayout.show(this, EMPTY_CARD);
}
}
}
}
private void showButtonGroupProperties(final RadButtonGroup group) {
ButtonGroupPropertiesPanel props = new ButtonGroupPropertiesPanel(myEditor.getRootContainer(), group);
myPropertiesPanelContainer = null;
showCustomPropertiesPanel(props);
}
private boolean showSelectedColumnProperties() {
if (myCustomPropertiesPanel != null && myPropertiesPanelContainer != null &&
IJSwingUtilities.hasFocus(myCustomPropertiesPanel.getComponent())) {
return true;
}
if (myEditor == null) return false;
GridCaptionPanel panel = myEditor.getFocusedCaptionPanel();
if (panel == null) return false;
RadContainer container = panel.getSelectedContainer();
if (container == null) return false;
final int[] selection = panel.getSelectedCells(null);
myPropertiesPanelContainer = container;
final CustomPropertiesPanel propertiesPanel = container.getGridLayoutManager().getRowColumnPropertiesPanel(container, panel.isRow(), selection);
if (propertiesPanel == null) return false;
showCustomPropertiesPanel(propertiesPanel);
return true;
}
private void showCustomPropertiesPanel(final CustomPropertiesPanel propertiesPanel) {
if (!Comparing.equal(propertiesPanel, myCustomPropertiesPanel)) {
if (myCustomPropertiesPanel != null) {
myCustomPropertiesPanel.removeChangeListener(myCustomPropertiesChangeListener);
}
myCustomPropertiesPanel = propertiesPanel;
myCustomPropertiesPanel.addChangeListener(myCustomPropertiesChangeListener);
myCustomPropertiesScrollPane.getViewport().setView(myCustomPropertiesPanel.getComponent());
}
final CardLayout cardLayout = (CardLayout)getLayout();
cardLayout.show(this, CUSTOM_CARD);
}
public boolean isEditing(){
return myInspectorTable.isEditing();
}
public void stopEditing(){
myInspectorTable.editingStopped(null);
}
public void requestFocus() {
myInspectorTable.requestFocus();
}
/**
* Synchronizes state with component which is selected in the ComponentTree
*/
private final class MyComponentSelectionListener implements ComponentSelectionListener{
public void selectedComponentChanged(final GuiEditor source){
synchWithTree(false);
}
}
}
| |
/*
* Copyright 2014, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.grpc;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.grpc.ClientCallImpl.ClientTransportProvider;
import io.grpc.MessageEncoding.Compressor;
import io.grpc.Metadata.Headers;
import io.grpc.internal.ClientStream;
import io.grpc.internal.ClientStreamListener;
import io.grpc.internal.ClientTransport;
import io.grpc.internal.ClientTransport.PingCallback;
import io.grpc.internal.ClientTransportFactory;
import io.grpc.internal.HttpUtil;
import io.grpc.internal.SerializingExecutor;
import io.grpc.internal.SharedResourceHolder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
/** A communication channel for making outgoing RPCs. */
@ThreadSafe
public final class ChannelImpl extends Channel {
private static final Logger log = Logger.getLogger(ChannelImpl.class.getName());
private final ClientTransportFactory transportFactory;
private final ExecutorService executor;
private final String userAgent;
private final Object lock = new Object();
/**
* Executor that runs deadline timers for requests.
*/
private ScheduledExecutorService scheduledExecutor;
// TODO(carl-mastrangelo): Allow clients to pass this in
private final BackoffPolicy.Provider backoffPolicyProvider =
new ExponentialBackoffPolicy.Provider();
/**
* We delegate to this channel, so that we can have interceptors as necessary. If there aren't
* any interceptors this will just be {@link RealChannel}.
*/
private final Channel interceptorChannel;
/**
* All transports that are not stopped. At the very least {@link #activeTransport} will be
* present, but previously used transports that still have streams or are stopping may also be
* present.
*/
@GuardedBy("lock")
private Collection<ClientTransport> transports = new ArrayList<ClientTransport>();
/**
* The transport for new outgoing requests. 'this' lock must be held when assigning to
* activeTransport.
*/
private volatile ClientTransport activeTransport;
@GuardedBy("lock")
private boolean shutdown;
@GuardedBy("lock")
private boolean terminated;
private Runnable terminationRunnable;
private long reconnectTimeMillis;
private BackoffPolicy reconnectPolicy;
private volatile Compressor defaultCompressor;
private final ClientTransportProvider transportProvider = new ClientTransportProvider() {
@Override
public ClientTransport get() {
return obtainActiveTransport();
}
};
ChannelImpl(ClientTransportFactory transportFactory, ExecutorService executor,
@Nullable String userAgent, List<ClientInterceptor> interceptors) {
this.transportFactory = transportFactory;
this.executor = executor;
this.userAgent = userAgent;
this.interceptorChannel = ClientInterceptors.intercept(new RealChannel(), interceptors);
scheduledExecutor = SharedResourceHolder.get(TIMER_SERVICE);
}
/** Hack to allow executors to auto-shutdown. Not for general use. */
// TODO(ejona86): Replace with a real API.
void setTerminationRunnable(Runnable runnable) {
this.terminationRunnable = runnable;
}
/**
* Sets the default compression method for this Channel. By default, new calls will use the
* provided compressor. Each individual Call can override this by specifying it in CallOptions.
* If the remote host does not support the message encoding, the call will likely break. There
* is currently no provided way to discover what message encodings the remote host supports.
* @param c The compressor to use. If {@code null} no compression will by performed. This is
* equivalent to using {@link MessageEncoding#NONE}. If not null, the Comressor must be
* threadsafe.
*/
public void setDefaultCompressor(@Nullable Compressor c) {
defaultCompressor = (c != null) ? c : MessageEncoding.NONE;
}
/**
* Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately
* cancelled.
*/
public ChannelImpl shutdown() {
ClientTransport savedActiveTransport;
synchronized (lock) {
if (shutdown) {
return this;
}
shutdown = true;
// After shutdown there are no new calls, so no new cancellation tasks are needed
scheduledExecutor = SharedResourceHolder.release(TIMER_SERVICE, scheduledExecutor);
savedActiveTransport = activeTransport;
if (savedActiveTransport != null) {
activeTransport = null;
} else if (transports.isEmpty()) {
terminated = true;
lock.notifyAll();
if (terminationRunnable != null) {
terminationRunnable.run();
}
}
}
if (savedActiveTransport != null) {
savedActiveTransport.shutdown();
}
return this;
}
/**
* Initiates a forceful shutdown in which preexisting and new calls are cancelled. Although
* forceful, the shutdown process is still not instantaneous; {@link #isTerminated()} will likely
* return {@code false} immediately after this method returns.
*
* <p>NOT YET IMPLEMENTED. This method currently behaves identically to shutdown().
*/
// TODO(ejona86): cancel preexisting calls.
public ChannelImpl shutdownNow() {
shutdown();
return this;
}
/**
* Returns whether the channel is shutdown. Shutdown channels immediately cancel any new calls,
* but may still have some calls being processed.
*
* @see #shutdown()
* @see #isTerminated()
*/
public boolean isShutdown() {
synchronized (lock) {
return shutdown;
}
}
/**
* Waits for the channel to become terminated, giving up if the timeout is reached.
*
* @return whether the channel is terminated, as would be done by {@link #isTerminated()}.
*/
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
synchronized (lock) {
long timeoutNanos = unit.toNanos(timeout);
long endTimeNanos = System.nanoTime() + timeoutNanos;
while (!terminated && (timeoutNanos = endTimeNanos - System.nanoTime()) > 0) {
TimeUnit.NANOSECONDS.timedWait(lock, timeoutNanos);
}
return terminated;
}
}
/**
* Returns whether the channel is terminated. Terminated channels have no running calls and
* relevant resources released (like TCP connections).
*
* @see #isShutdown()
*/
public boolean isTerminated() {
synchronized (lock) {
return terminated;
}
}
/**
* Pings the remote endpoint to verify that the transport is still active. When an acknowledgement
* is received, the given callback will be invoked using the given executor.
*
* <p>If the underlying transport has no mechanism by when to send a ping, this method may throw
* an {@link UnsupportedOperationException}. The operation may
* {@linkplain PingCallback#pingFailed(Throwable) fail} due to transient transport errors. In
* that case, trying again may succeed.
*
* @see ClientTransport#ping(PingCallback, Executor)
*/
@ExperimentalApi
public void ping(final PingCallback callback, final Executor executor) {
try {
obtainActiveTransport().ping(callback, executor);
} catch (final RuntimeException ex) {
executor.execute(new Runnable() {
@Override
public void run() {
callback.pingFailed(ex);
}
});
}
}
/*
* Creates a new outgoing call on the channel.
*/
@Override
public <ReqT, RespT> ClientCall<ReqT, RespT> newCall(MethodDescriptor<ReqT, RespT> method,
CallOptions callOptions) {
boolean hasCodecOverride = callOptions.getCompressor() != null;
if (!hasCodecOverride && defaultCompressor != MessageEncoding.NONE) {
callOptions = callOptions.withCompressor(defaultCompressor);
}
return interceptorChannel.newCall(method, callOptions);
}
private ClientTransport obtainActiveTransport() {
ClientTransport savedActiveTransport = activeTransport;
// If we know there is an active transport and we are not in backoff mode, return quickly.
if (savedActiveTransport != null && !(savedActiveTransport instanceof InactiveTransport)) {
return savedActiveTransport;
}
synchronized (lock) {
if (shutdown) {
return null;
}
savedActiveTransport = activeTransport;
if (savedActiveTransport instanceof InactiveTransport) {
if (System.nanoTime() > TimeUnit.MILLISECONDS.toNanos(reconnectTimeMillis)) {
// The timeout expired, clear the inactive transport and update the shutdown status to
// something that is retryable.
activeTransport = null;
savedActiveTransport = activeTransport;
} else {
// We are still in backoff mode, just return the inactive transport.
return savedActiveTransport;
}
}
if (savedActiveTransport != null) {
return savedActiveTransport;
}
// There is no active transport, or we just finished backoff. Create a new transport.
ClientTransport newActiveTransport = transportFactory.newClientTransport();
transports.add(newActiveTransport);
boolean failed = true;
try {
newActiveTransport.start(new TransportListener(newActiveTransport));
failed = false;
} finally {
if (failed) {
transports.remove(newActiveTransport);
}
}
// It's possible that start() called transportShutdown() and transportTerminated(). If so, we
// wouldn't want to make it the active transport.
if (transports.contains(newActiveTransport)) {
// start() must return before we set activeTransport, since activeTransport is accessed
// without a lock.
activeTransport = newActiveTransport;
}
return newActiveTransport;
}
}
private class RealChannel extends Channel {
@Override
public <ReqT, RespT> ClientCall<ReqT, RespT> newCall(MethodDescriptor<ReqT, RespT> method,
CallOptions callOptions) {
return new ClientCallImpl<ReqT, RespT>(
method,
new SerializingExecutor(executor),
callOptions,
transportProvider,
scheduledExecutor)
.setUserAgent(userAgent);
}
}
private class TransportListener implements ClientTransport.Listener {
private final ClientTransport transport;
public TransportListener(ClientTransport transport) {
this.transport = transport;
}
@Override
public void transportReady() {
synchronized (lock) {
if (activeTransport == transport) {
reconnectPolicy = null;
}
}
}
@Override
public void transportShutdown(Status s) {
synchronized (lock) {
if (activeTransport == transport) {
activeTransport = null;
// This transport listener was attached to the active transport.
if (s.isOk()) {
return;
}
// Alright, something bad has happened.
if (reconnectPolicy == null) {
// This happens the first time something bad has happened.
reconnectPolicy = backoffPolicyProvider.get();
reconnectTimeMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime());
}
activeTransport = new InactiveTransport(s);
reconnectTimeMillis += reconnectPolicy.nextBackoffMillis();
}
}
}
@Override
public void transportTerminated() {
synchronized (lock) {
if (activeTransport == transport) {
log.warning("transportTerminated called without previous transportShutdown");
activeTransport = null;
}
// TODO(notcarl): replace this with something more meaningful
transportShutdown(Status.UNKNOWN.withDescription("transport shutdown for unknown reason"));
transports.remove(transport);
if (shutdown && transports.isEmpty()) {
if (terminated) {
log.warning("transportTerminated called after already terminated");
}
terminated = true;
lock.notifyAll();
if (terminationRunnable != null) {
terminationRunnable.run();
}
}
}
}
}
/**
* Intended for internal use only.
*/
// TODO(johnbcoughlin) make this package private when we can do so with the tests.
@VisibleForTesting
public static final Metadata.Key<Long> TIMEOUT_KEY =
Metadata.Key.of(HttpUtil.TIMEOUT, new TimeoutMarshaller());
// TODO(carl-mastrangelo): move this to internal
public static final Metadata.Key<String> MESSAGE_ENCODING_KEY =
Metadata.Key.of(HttpUtil.MESSAGE_ENCODING, Metadata.ASCII_STRING_MARSHALLER);
/**
* Marshals a microseconds representation of the timeout to and from a string representation,
* consisting of an ASCII decimal representation of a number with at most 8 digits, followed by a
* unit:
* u = microseconds
* m = milliseconds
* S = seconds
* M = minutes
* H = hours
*
* <p>The representation is greedy with respect to precision. That is, 2 seconds will be
* represented as `2000000u`.</p>
*
* <p>See <a href="https://github.com/grpc/grpc-common/blob/master/PROTOCOL-HTTP2.md#requests">the
* request header definition</a></p>
*/
@VisibleForTesting
static class TimeoutMarshaller implements Metadata.AsciiMarshaller<Long> {
@Override
public String toAsciiString(Long timeoutMicros) {
Preconditions.checkArgument(timeoutMicros >= 0, "Negative timeout");
long timeout;
String timeoutUnit;
// the smallest integer with 9 digits
int cutoff = 100000000;
if (timeoutMicros < cutoff) {
timeout = timeoutMicros;
timeoutUnit = "u";
} else if (timeoutMicros / 1000 < cutoff) {
timeout = timeoutMicros / 1000;
timeoutUnit = "m";
} else if (timeoutMicros / (1000 * 1000) < cutoff) {
timeout = timeoutMicros / (1000 * 1000);
timeoutUnit = "S";
} else if (timeoutMicros / (60 * 1000 * 1000) < cutoff) {
timeout = timeoutMicros / (60 * 1000 * 1000);
timeoutUnit = "M";
} else if (timeoutMicros / (60L * 60L * 1000L * 1000L) < cutoff) {
timeout = timeoutMicros / (60L * 60L * 1000L * 1000L);
timeoutUnit = "H";
} else {
throw new IllegalArgumentException("Timeout too large");
}
return Long.toString(timeout) + timeoutUnit;
}
@Override
public Long parseAsciiString(String serialized) {
String valuePart = serialized.substring(0, serialized.length() - 1);
char unit = serialized.charAt(serialized.length() - 1);
long factor;
switch (unit) {
case 'u':
factor = 1; break;
case 'm':
factor = 1000L; break;
case 'S':
factor = 1000L * 1000L; break;
case 'M':
factor = 60L * 1000L * 1000L; break;
case 'H':
factor = 60L * 60L * 1000L * 1000L; break;
default:
throw new IllegalArgumentException(String.format("Invalid timeout unit: %s", unit));
}
return Long.parseLong(valuePart) * factor;
}
}
static final SharedResourceHolder.Resource<ScheduledExecutorService> TIMER_SERVICE =
new SharedResourceHolder.Resource<ScheduledExecutorService>() {
@Override
public ScheduledExecutorService create() {
return Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r);
thread.setDaemon(true);
return thread;
}
});
}
@Override
public void close(ScheduledExecutorService instance) {
instance.shutdown();
}
};
private static final class InactiveTransport implements ClientTransport {
private final Status shutdownStatus;
private InactiveTransport(Status s) {
shutdownStatus = s;
}
@Override
public ClientStream newStream(
MethodDescriptor<?, ?> method, Headers headers, ClientStreamListener listener) {
listener.closed(shutdownStatus, new Metadata());
return new ClientCallImpl.NoopClientStream();
}
@Override
public void start(Listener listener) {
throw new IllegalStateException();
}
@Override
public void ping(final PingCallback callback, Executor executor) {
executor.execute(new Runnable() {
@Override
public void run() {
callback.pingFailed(shutdownStatus.asException());
}
});
}
@Override
public void shutdown() {
// no-op
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.cfeclipse.cfml.preferences;
import java.util.ArrayList;
import java.util.List;
import org.cfeclipse.cfml.editors.CFConfiguration;
import org.cfeclipse.cfml.editors.CFDocumentSetupParticipant;
import org.cfeclipse.cfml.editors.CFMLEditor;
import org.cfeclipse.cfml.editors.ColorManager;
import org.cfeclipse.cfml.editors.ICFDocument;
import org.cfeclipse.cfml.editors.formatters.FormattingPreferences;
import org.cfeclipse.cfml.editors.formatters.CFMLFormatter;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.text.Document;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.source.SourceViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.ui.editors.text.EditorsUI;
import org.eclipse.ui.texteditor.ChainedPreferenceStore;
/*
* The page to configure the code formatter options.
*/
public class CodeFormatterPreferencePage extends AbstractCFEditorPreferencePage {
private SourceViewer fPreviewViewer;
private CFPreviewerUpdater fPreviewerUpdater;
private CFMLEditor fEditor;
@SuppressWarnings("unchecked")
protected OverlayPreferenceStore createOverlayStore() {
List overlayKeys= new ArrayList();
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_WRAP_LONG));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_ALIGN));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.INT, EditorPreferenceConstants.FORMATTER_MAX_LINE_LENGTH));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.P_INSERT_SPACES_FOR_TABS));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.INT, EditorPreferenceConstants.P_TAB_WIDTH));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_CLOSE_TAGS));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_COLLAPSE_WHITESPACE));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_INDENT_ALL_ELEMENTS));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_TIDY_TAGS));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_CHANGE_TAG_CASE));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_CHANGE_TAG_CASE_UPPER));
overlayKeys.add(new OverlayPreferenceStore.OverlayKey(OverlayPreferenceStore.BOOLEAN, EditorPreferenceConstants.FORMATTER_CHANGE_TAG_CASE_LOWER));
OverlayPreferenceStore.OverlayKey[] keys= new OverlayPreferenceStore.OverlayKey[overlayKeys.size()];
overlayKeys.toArray(keys);
return new OverlayPreferenceStore(getPreferenceStore(), keys);
}
/*
* @see PreferencePage#createControl(Composite)
*/
public void createControl(Composite parent) {
super.createControl(parent);
//TODO set help
//WorkbenchHelp.setHelp(getControl(), "ANT_FORMATTER_PREFERENCE_PAGE");
}
protected Control createContents(Composite parent) {
initializeDialogUnits(parent);
getOverlayStore().load();
getOverlayStore().start();
int numColumns= 2;
Composite result= new Composite(parent, SWT.NONE);
GridLayout layout= new GridLayout();
layout.marginHeight= 0;
layout.marginWidth= 0;
result.setLayout(layout);
// Group indentationGroup= createGroup(numColumns, result, CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_0);
//
// String labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_1;
// String[] errorMessages= new String[]{CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_2, CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_3};
// addTextField(indentationGroup, labelText, EditorPreferenceConstants.P_TAB_WIDTH, 3, 0, errorMessages);
//
// labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_4;
// addCheckBox(indentationGroup, labelText, EditorPreferenceConstants.P_INSERT_SPACES_FOR_TABS, 1);
Group wrappingGroup= createGroup(numColumns, result, CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_6);
String labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_7;
String[] errorMessages= new String[]{CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_8, CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_9};
addTextField(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_MAX_LINE_LENGTH, 3, 0, errorMessages);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_10;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_WRAP_LONG, 1);
// labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_5;
// addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_ALIGN, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_11;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_CLOSE_TAGS, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_12;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_FORMAT_SQL, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_13;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_COLLAPSE_WHITESPACE, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_14;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_INDENT_ALL_ELEMENTS, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_15;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_TIDY_TAGS, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_16;
addCheckBox(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_CHANGE_TAG_CASE, 1);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_17;
addRadioButton(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_CHANGE_TAG_CASE_LOWER, 15);
labelText= CFMLPreferencesMessages.CFMLCodeFormatterPreferencePage_18;
addRadioButton(wrappingGroup, labelText, EditorPreferenceConstants.FORMATTER_CHANGE_TAG_CASE_UPPER, 15);
Label label= new Label(result, SWT.LEFT);
label.setText(CFMLPreferencesMessages.CFMLEditorPreferencePage_9);
label.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
Control previewer= createPreviewer(result);
GridData gd= new GridData(GridData.FILL_BOTH);
gd.widthHint= convertWidthInCharsToPixels(20);
gd.heightHint= convertHeightInCharsToPixels(5);
previewer.setLayoutData(gd);
initializeFields();
applyDialogFont(result);
return result;
}
/**
* Convenience method to create a group
*/
private Group createGroup(int numColumns, Composite parent, String text ) {
final Group group= new Group(parent, SWT.NONE);
GridData gd= new GridData(GridData.FILL_HORIZONTAL);
gd.horizontalSpan= numColumns;
gd.widthHint= 0;
group.setLayoutData(gd);
group.setFont(parent.getFont());
final GridLayout layout= new GridLayout(numColumns, false);
group.setLayout(layout);
group.setText(text);
return group;
}
private Control createPreviewer(Composite parent) {
fPreviewViewer = new SourceViewer(parent, null, null, false, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL);
fEditor = new CFMLEditor();
ColorManager colorMan = new ColorManager();
CFConfiguration configuration = new CFConfiguration(colorMan,fEditor);
fPreviewViewer.configure(configuration);
fPreviewViewer.setEditable(false);
Font font= JFaceResources.getFont(JFaceResources.TEXT_FONT);
fPreviewViewer.getTextWidget().setFont(font);
IPreferenceStore store= new ChainedPreferenceStore(new IPreferenceStore[] { getOverlayStore(), EditorsUI.getPreferenceStore() });
fPreviewerUpdater= new CFPreviewerUpdater(fPreviewViewer, configuration, store);
String content= loadPreviewContentFromFile("FormatPreviewCode.txt"); //$NON-NLS-1$
content= formatContent(content, store);
//IDocument document = new Document(content);
ICFDocument document = new ICFDocument(content);
new CFDocumentSetupParticipant().setup(document);
fPreviewViewer.setDocument(document);
return fPreviewViewer.getControl();
}
private String formatContent(String content, IPreferenceStore preferenceStore) {
FormattingPreferences prefs= new FormattingPreferences();
prefs.setPreferenceStore(preferenceStore);
return CFMLFormatter.format(content, prefs);
}
/* (non-Javadoc)
* @see org.cfeclipse.cfml.preferences.AbstractAntEditorPreferencePage#handleDefaults()
*/
protected void handleDefaults() {
}
/* (non-Javadoc)
* @see org.eclipse.jface.dialogs.IDialogPage#dispose()
*/
public void dispose() {
super.dispose();
if (fPreviewerUpdater != null) {
fPreviewerUpdater.dispose();
}
}
}
| |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.ambrose.service.impl;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.Collection;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentSkipListMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.twitter.ambrose.model.DAGNode;
import com.twitter.ambrose.model.Event;
import com.twitter.ambrose.model.Job;
import com.twitter.ambrose.model.PaginatedList;
import com.twitter.ambrose.model.WorkflowSummary;
import com.twitter.ambrose.service.StatsReadService;
import com.twitter.ambrose.service.StatsWriteService;
import com.twitter.ambrose.service.WorkflowIndexReadService;
import com.twitter.ambrose.util.JSONUtil;
/**
* In-memory implementation of both StatsReadService and StatsWriteService. Used when stats
* collection and stats serving are happening within the same VM. This class is intended to run in a
* VM that only handles a single workflow. Hence it ignores workflowId.
* <p/>
* Upon job completion this class can optionally write all json data to disk. This is useful for
* debugging. The written files can also be replayed in the Ambrose UI without re-running the Job
* via the <code>bin/demo</code> script. To write all json data to disk, set the following values as
* system properties using <code>-D</code>:
* <pre>
* <ul>
* <li><code>{@value #DUMP_WORKFLOW_FILE_PARAM}</code> - file in which to write the workflow
* json.</li>
* <li><code>{@value #DUMP_EVENTS_FILE_PARAM}</code> - file in which to write the events
* json.</li>
* </ul>
* </pre>
*/
public class InMemoryStatsService<T extends Job> implements StatsReadService<T>, StatsWriteService<T>,
WorkflowIndexReadService {
private static final Logger LOG = LoggerFactory.getLogger(InMemoryStatsService.class);
private static final String DUMP_WORKFLOW_FILE_PARAM = "ambrose.write.dag.file";
private static final String DUMP_EVENTS_FILE_PARAM = "ambrose.write.events.file";
private final WorkflowSummary summary = new WorkflowSummary(null,
System.getProperty("user.name", "unknown"), "unknown", null, 0, System.currentTimeMillis());
private final PaginatedList<WorkflowSummary> summaries =
new PaginatedList<WorkflowSummary>(ImmutableList.of(summary));
private boolean jobFailed = false;
private Map<String, DAGNode<T>> dagNodeNameMap = Maps.newHashMap();
private SortedMap<Integer, Event> eventMap = new ConcurrentSkipListMap<Integer, Event>();
private Writer workflowWriter;
private Writer eventsWriter;
private boolean eventWritten = false;
public InMemoryStatsService() {
String dumpWorkflowFileName = System.getProperty(DUMP_WORKFLOW_FILE_PARAM);
String dumpEventsFileName = System.getProperty(DUMP_EVENTS_FILE_PARAM);
if (dumpWorkflowFileName != null) {
try {
workflowWriter = new PrintWriter(dumpWorkflowFileName);
} catch (FileNotFoundException e) {
LOG.error("Could not create dag PrintWriter at " + dumpWorkflowFileName, e);
}
}
if (dumpEventsFileName != null) {
try {
eventsWriter = new PrintWriter(dumpEventsFileName);
} catch (FileNotFoundException e) {
LOG.error("Could not create events PrintWriter at " + dumpEventsFileName, e);
}
}
}
@Override
public synchronized void sendDagNodeNameMap(String workflowId,
Map<String, DAGNode<T>> dagNodeNameMap) throws IOException {
this.summary.setId(workflowId);
this.summary.setStatus(WorkflowSummary.Status.RUNNING);
this.summary.setProgress(0);
this.dagNodeNameMap = dagNodeNameMap;
writeJsonDagNodenameMapToDisk(dagNodeNameMap);
}
@Override
public synchronized void pushEvent(String workflowId, Event event) throws IOException {
eventMap.put(event.getId(), event);
switch (event.getType()) {
case WORKFLOW_PROGRESS:
Event.WorkflowProgressEvent workflowProgressEvent = (Event.WorkflowProgressEvent) event;
String progressString =
workflowProgressEvent.getPayload().get(Event.WorkflowProgressField.workflowProgress);
int progress = Integer.parseInt(progressString);
summary.setProgress(progress);
if (progress == 100) {
summary.setStatus(jobFailed
? WorkflowSummary.Status.FAILED
: WorkflowSummary.Status.SUCCEEDED);
}
break;
case JOB_FAILED:
jobFailed = true;
default:
// nothing
}
writeJsonEventToDisk(event);
}
@Override
public synchronized Map<String, DAGNode<T>> getDagNodeNameMap(String workflowId) {
return dagNodeNameMap;
}
@Override
public synchronized Collection<Event> getEventsSinceId(String workflowId, int sinceId) {
int minId = sinceId >= 0 ? sinceId + 1 : sinceId;
return eventMap.tailMap(minId).values();
}
@Override
public Map<String, String> getClusters() throws IOException {
return ImmutableMap.of("default", "default");
}
@Override
public synchronized PaginatedList<WorkflowSummary> getWorkflows(String cluster,
WorkflowSummary.Status status, String userId, int numResults, byte[] startKey)
throws IOException {
return summaries;
}
private void writeJsonDagNodenameMapToDisk(Map<String, DAGNode<T>> dagNodeNameMap)
throws IOException {
if (workflowWriter != null && dagNodeNameMap != null) {
JSONUtil.writeJson(workflowWriter, dagNodeNameMap.values());
}
}
private void writeJsonEventToDisk(Event event) throws IOException {
if (eventsWriter != null && event != null) {
eventsWriter.write(!eventWritten ? "[ " : ", ");
JSONUtil.writeJson(eventsWriter, event);
eventsWriter.flush();
eventWritten = true;
}
}
public void flushJsonToDisk() throws IOException {
if (workflowWriter != null) {
workflowWriter.close();
}
if (eventsWriter != null) {
if (eventWritten) {
eventsWriter.write(" ]\n");
}
eventsWriter.close();
}
}
@Override
public Collection<Event> getEventsSinceId(String workflowId, int sinceId,
int maxEvents) throws IOException {
int minId = sinceId >= 0 ? sinceId + 1 : sinceId;
return Lists.newArrayList(Iterables.limit(eventMap.tailMap(minId).values(), maxEvents));
}
@Override
public void initWriteService(Properties properties) throws IOException {
// Do nothing
}
@Override
public void initReadService(Properties properties) throws IOException {
// Do nothing
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.coders;
import com.google.cloud.dataflow.sdk.util.BufferedElementCountingOutputStream;
import com.google.cloud.dataflow.sdk.util.VarInt;
import com.google.cloud.dataflow.sdk.util.common.ElementByteSizeObservableIterable;
import com.google.cloud.dataflow.sdk.util.common.ElementByteSizeObserver;
import com.google.common.base.Preconditions;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
/**
* An abstract base class with functionality for assembling a
* {@link Coder} for a class that implements {@code Iterable}.
*
* <p>To complete a subclass, implement the {@link #decodeToIterable} method. This superclass
* will decode the elements in the input stream into a {@link List} and then pass them to that
* method to be converted into the appropriate iterable type. Note that this means the input
* iterables must fit into memory.
*
* <p>The format of this coder is as follows:
*
* <ul>
* <li>If the input {@link Iterable} has a known and finite size, then the size is written to the
* output stream in big endian format, followed by all of the encoded elements.</li>
* <li>If the input {@link Iterable} is not known to have a finite size, then each element
* of the input is preceded by {@code true} encoded as a byte (indicating "more data")
* followed by the encoded element, and terminated by {@code false} encoded as a byte.</li>
* </ul>
*
* @param <T> the type of the elements of the {@code Iterable}s being transcoded
* @param <IterableT> the type of the Iterables being transcoded
*/
public abstract class IterableLikeCoder<T, IterableT extends Iterable<T>>
extends StandardCoder<IterableT> {
public Coder<T> getElemCoder() {
return elementCoder;
}
/**
* Builds an instance of {@code IterableT}, this coder's associated {@link Iterable}-like
* subtype, from a list of decoded elements.
*/
protected abstract IterableT decodeToIterable(List<T> decodedElements);
/////////////////////////////////////////////////////////////////////////////
// Internal operations below here.
private final Coder<T> elementCoder;
private final String iterableName;
/**
* Returns the first element in the iterable-like {@code exampleValue} if it is non-empty,
* otherwise returns {@code null}.
*/
protected static <T, IterableT extends Iterable<T>>
List<Object> getInstanceComponentsHelper(IterableT exampleValue) {
for (T value : exampleValue) {
return Arrays.<Object>asList(value);
}
return null;
}
protected IterableLikeCoder(Coder<T> elementCoder, String iterableName) {
Preconditions.checkArgument(elementCoder != null,
"element Coder for IterableLikeCoder must not be null");
Preconditions.checkArgument(iterableName != null,
"iterable name for IterableLikeCoder must not be null");
this.elementCoder = elementCoder;
this.iterableName = iterableName;
}
@Override
public void encode(
IterableT iterable, OutputStream outStream, Context context)
throws IOException, CoderException {
if (iterable == null) {
throw new CoderException("cannot encode a null " + iterableName);
}
Context nestedContext = context.nested();
DataOutputStream dataOutStream = new DataOutputStream(outStream);
if (iterable instanceof Collection) {
// We can know the size of the Iterable. Use an encoding with a
// leading size field, followed by that many elements.
Collection<T> collection = (Collection<T>) iterable;
dataOutStream.writeInt(collection.size());
for (T elem : collection) {
elementCoder.encode(elem, dataOutStream, nestedContext);
}
} else {
// We don't know the size without traversing it so use a fixed size buffer
// and encode as many elements as possible into it before outputting the size followed
// by the elements.
dataOutStream.writeInt(-1);
BufferedElementCountingOutputStream countingOutputStream =
new BufferedElementCountingOutputStream(dataOutStream);
for (T elem : iterable) {
countingOutputStream.markElementStart();
elementCoder.encode(elem, countingOutputStream, nestedContext);
}
countingOutputStream.finish();
}
// Make sure all our output gets pushed to the underlying outStream.
dataOutStream.flush();
}
@Override
public IterableT decode(InputStream inStream, Context context)
throws IOException, CoderException {
Context nestedContext = context.nested();
DataInputStream dataInStream = new DataInputStream(inStream);
int size = dataInStream.readInt();
if (size >= 0) {
List<T> elements = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
elements.add(elementCoder.decode(dataInStream, nestedContext));
}
return decodeToIterable(elements);
} else {
List<T> elements = new ArrayList<>();
long count;
// We don't know the size a priori. Check if we're done with
// each block of elements.
while ((count = VarInt.decodeLong(dataInStream)) > 0) {
while (count > 0) {
elements.add(elementCoder.decode(dataInStream, nestedContext));
count -= 1;
}
}
return decodeToIterable(elements);
}
}
@Override
public List<? extends Coder<?>> getCoderArguments() {
return Arrays.asList(elementCoder);
}
/**
* {@inheritDoc}
*
* @throws NonDeterministicException always.
* Encoding is not deterministic for the general {@link Iterable} case, as it depends
* upon the type of iterable. This may allow two objects to compare as equal
* while the encoding differs.
*/
@Override
public void verifyDeterministic() throws NonDeterministicException {
throw new NonDeterministicException(this,
"IterableLikeCoder can not guarantee deterministic ordering.");
}
/**
* {@inheritDoc}
*
* @return {@code true} if the iterable is of a known class that supports lazy counting
* of byte size, since that requires minimal extra computation.
*/
@Override
public boolean isRegisterByteSizeObserverCheap(
IterableT iterable, Context context) {
return iterable instanceof ElementByteSizeObservableIterable;
}
@Override
public void registerByteSizeObserver(
IterableT iterable, ElementByteSizeObserver observer, Context context)
throws Exception {
if (iterable == null) {
throw new CoderException("cannot encode a null Iterable");
}
Context nestedContext = context.nested();
if (iterable instanceof ElementByteSizeObservableIterable) {
observer.setLazy();
ElementByteSizeObservableIterable<?, ?> observableIterable =
(ElementByteSizeObservableIterable<?, ?>) iterable;
observableIterable.addObserver(
new IteratorObserver(observer, iterable instanceof Collection));
} else {
if (iterable instanceof Collection) {
// We can know the size of the Iterable. Use an encoding with a
// leading size field, followed by that many elements.
Collection<T> collection = (Collection<T>) iterable;
observer.update(4L);
for (T elem : collection) {
elementCoder.registerByteSizeObserver(elem, observer, nestedContext);
}
} else {
// TODO: Update to use an accurate count depending on size and count, currently we
// are under estimating the size by up to 10 bytes per block of data since we are
// not encoding the count prefix which occurs at most once per 64k of data and is upto
// 10 bytes long. Since we include the total count we can upper bound the underestimate
// to be 10 / 65536 ~= 0.0153% of the actual size.
observer.update(4L);
long count = 0;
for (T elem : iterable) {
count += 1;
elementCoder.registerByteSizeObserver(elem, observer, nestedContext);
}
if (count > 0) {
// Update the length based upon the number of counted elements, this helps
// eliminate the case where all the elements are encoded in the first block and
// it is quite short (e.g. Long.MAX_VALUE nulls encoded with VoidCoder).
observer.update(VarInt.getLength(count));
}
// Update with the terminator byte.
observer.update(1L);
}
}
}
/**
* An observer that gets notified when an observable iterator
* returns a new value. This observer just notifies an outerObserver
* about this event. Additionally, the outerObserver is notified
* about additional separators that are transparently added by this
* coder.
*/
private class IteratorObserver implements Observer {
private final ElementByteSizeObserver outerObserver;
private final boolean countable;
public IteratorObserver(ElementByteSizeObserver outerObserver,
boolean countable) {
this.outerObserver = outerObserver;
this.countable = countable;
if (countable) {
// Additional 4 bytes are due to size.
outerObserver.update(4L);
} else {
// Additional 5 bytes are due to size = -1 (4 bytes) and
// hasNext = false (1 byte).
outerObserver.update(5L);
}
}
@Override
public void update(Observable obs, Object obj) {
if (!(obj instanceof Long)) {
throw new AssertionError("unexpected parameter object");
}
if (countable) {
outerObserver.update(obs, obj);
} else {
// Additional 1 byte is due to hasNext = true flag.
outerObserver.update(obs, 1 + (long) obj);
}
}
}
}
| |
/*
* Copyright (C) 2016 Oleg Kan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.simplaapliko.trips.presentation.model;
import android.os.Parcel;
import android.os.Parcelable;
import java.util.Date;
public class PlaceModel extends Model {
public static class Momento implements Parcelable {
private final long id;
private final long tripId;
private final long tripDateId;
private final long labelId;
private final String name;
private final String description;
private final String address;
private final double latitude;
private final double longitude;
private final String notes;
private final String placeId;
private final Date arrivalTimePlan;
private final Date departureTimePlan;
private final Date arrivalTimeFact;
private final Date departureTimeFact;
private final Date tripDate;
private final String colorCode;
private final String label;
public Momento(long id, long tripId, long tripDateId, long labelId, String name,
String description, String address, double latitude, double longitude, String notes,
String placeId, Date arrivalTimePlan, Date departureTimePlan, Date arrivalTimeFact,
Date departureTimeFact, Date tripDate, String colorCode, String label) {
this.id = id;
this.tripId = tripId;
this.tripDateId = tripDateId;
this.labelId = labelId;
this.name = name;
this.description = description;
this.address = address;
this.latitude = latitude;
this.longitude = longitude;
this.notes = notes;
this.placeId = placeId;
this.arrivalTimePlan = arrivalTimePlan;
this.departureTimePlan = departureTimePlan;
this.arrivalTimeFact = arrivalTimeFact;
this.departureTimeFact = departureTimeFact;
this.tripDate = tripDate;
this.colorCode = colorCode;
this.label = label;
}
public long getId() {
return id;
}
public long getTripId() {
return tripId;
}
public long getTripDateId() {
return tripDateId;
}
public long getLabelId() {
return labelId;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public String getAddress() {
return address;
}
public double getLatitude() {
return latitude;
}
public double getLongitude() {
return longitude;
}
public String getNotes() {
return notes;
}
public String getPlaceId() {
return placeId;
}
public Date getArrivalTimePlan() {
return arrivalTimePlan;
}
public Date getDepartureTimePlan() {
return departureTimePlan;
}
public Date getArrivalTimeFact() {
return arrivalTimeFact;
}
public Date getDepartureTimeFact() {
return departureTimeFact;
}
public Date getTripDate() {
return tripDate;
}
public String getColorCode() {
return colorCode;
}
public String getLabel() {
return label;
}
@SuppressWarnings("SimplifiableIfStatement")
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Momento momento = (Momento) o;
if (id != momento.id) return false;
if (tripId != momento.tripId) return false;
if (tripDateId != momento.tripDateId) return false;
if (labelId != momento.labelId) return false;
if (Double.compare(momento.latitude, latitude) != 0) return false;
if (Double.compare(momento.longitude, longitude) != 0) return false;
if (name != null ? !name.equals(momento.name) : momento.name != null) return false;
if (description != null ? !description.equals(momento.description) : momento.description != null)
return false;
if (address != null ? !address.equals(momento.address) : momento.address != null)
return false;
if (notes != null ? !notes.equals(momento.notes) : momento.notes != null)
return false;
if (placeId != null ? !placeId.equals(momento.placeId) : momento.placeId != null)
return false;
if (arrivalTimePlan != null ? !arrivalTimePlan.equals(momento.arrivalTimePlan) : momento.arrivalTimePlan != null)
return false;
if (departureTimePlan != null ? !departureTimePlan.equals(momento.departureTimePlan) : momento.departureTimePlan != null)
return false;
if (arrivalTimeFact != null ? !arrivalTimeFact.equals(momento.arrivalTimeFact) : momento.arrivalTimeFact != null)
return false;
if (departureTimeFact != null ? !departureTimeFact.equals(momento.departureTimeFact) : momento.departureTimeFact != null)
return false;
if (tripDate != null ? !tripDate.equals(momento.tripDate) : momento.tripDate != null)
return false;
if (colorCode != null ? !colorCode.equals(momento.colorCode) : momento.colorCode != null)
return false;
return label != null ? label.equals(momento.label) : momento.label == null;
}
@Override
public int hashCode() {
int result;
long temp;
result = (int) (id ^ (id >>> 32));
result = 31 * result + (int) (tripId ^ (tripId >>> 32));
result = 31 * result + (int) (tripDateId ^ (tripDateId >>> 32));
result = 31 * result + (int) (labelId ^ (labelId >>> 32));
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (description != null ? description.hashCode() : 0);
result = 31 * result + (address != null ? address.hashCode() : 0);
temp = Double.doubleToLongBits(latitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(longitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + (notes != null ? notes.hashCode() : 0);
result = 31 * result + (placeId != null ? placeId.hashCode() : 0);
result = 31 * result + (arrivalTimePlan != null ? arrivalTimePlan.hashCode() : 0);
result = 31 * result + (departureTimePlan != null ? departureTimePlan.hashCode() : 0);
result = 31 * result + (arrivalTimeFact != null ? arrivalTimeFact.hashCode() : 0);
result = 31 * result + (departureTimeFact != null ? departureTimeFact.hashCode() : 0);
result = 31 * result + (tripDate != null ? tripDate.hashCode() : 0);
result = 31 * result + (colorCode != null ? colorCode.hashCode() : 0);
result = 31 * result + (label != null ? label.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "Momento{" +
"id=" + id +
", tripId=" + tripId +
", tripDateId=" + tripDateId +
", labelId=" + labelId +
", name='" + name + '\'' +
", description='" + description + '\'' +
", address='" + address + '\'' +
", latitude=" + latitude +
", longitude=" + longitude +
", notes='" + notes + '\'' +
", placeId='" + placeId + '\'' +
", arrivalTimePlan=" + arrivalTimePlan +
", departureTimePlan=" + departureTimePlan +
", arrivalTimeFact=" + arrivalTimeFact +
", departureTimeFact=" + departureTimeFact +
", tripDate='" + tripDate +
", colorCode='" + colorCode + '\'' +
", label='" + label + '\'' +
'}';
}
// Parcelable
protected Momento(Parcel in) {
id = in.readLong();
tripId = in.readLong();
tripDateId = in.readLong();
labelId = in.readLong();
name = in.readString();
description = in.readString();
address = in.readString();
latitude = in.readDouble();
longitude = in.readDouble();
notes = in.readString();
placeId = in.readString();
long millis = in.readLong();
arrivalTimePlan = millis == -1 ? null : new Date(millis);
millis = in.readLong();
departureTimePlan = millis == -1 ? null : new Date(millis);
millis = in.readLong();
arrivalTimeFact = millis == -1 ? null : new Date(millis);
millis = in.readLong();
departureTimeFact = millis == -1 ? null : new Date(millis);
millis = in.readLong();
tripDate = millis == -1 ? null : new Date(millis);
colorCode = in.readString();
label = in.readString();
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(id);
dest.writeLong(tripId);
dest.writeLong(tripDateId);
dest.writeLong(labelId);
dest.writeString(name);
dest.writeString(description);
dest.writeString(address);
dest.writeDouble(latitude);
dest.writeDouble(longitude);
dest.writeString(notes);
dest.writeString(placeId);
dest.writeLong(arrivalTimePlan == null ? -1 : arrivalTimePlan.getTime());
dest.writeLong(departureTimePlan == null ? -1 : departureTimePlan.getTime());
dest.writeLong(arrivalTimeFact == null ? -1 : arrivalTimeFact.getTime());
dest.writeLong(departureTimeFact == null ? -1 : departureTimeFact.getTime());
dest.writeLong(tripDate == null ? -1 : tripDate.getTime());
dest.writeString(colorCode);
dest.writeString(label);
}
@Override
public int describeContents() {
return 0;
}
public static final Parcelable.Creator<Momento> CREATOR = new Parcelable.Creator<Momento>() {
@Override
public Momento createFromParcel(Parcel in) {
return new Momento(in);
}
@Override
public Momento[] newArray(int size) {
return new Momento[size];
}
};
}
public static class Builder {
private long id;
private long tripId;
private long tripDateId;
private long labelId;
private String name;
private String description;
private String address;
private double latitude;
private double longitude;
private String notes;
private String placeId;
private Date arrivalTimePlan;
private Date departureTimePlan;
private Date arrivalTimeFact;
private Date departureTimeFact;
private Date tripDate;
private String colorCode;
private String label;
public Builder setId(long id) {
this.id = id;
return this;
}
public Builder setTripId(long tripId) {
this.tripId = tripId;
return this;
}
public Builder setTripDateId(long tripDateId) {
this.tripDateId = tripDateId;
return this;
}
public Builder setLabelId(long labelId) {
this.labelId = labelId;
return this;
}
public Builder setName(String name) {
this.name = name;
return this;
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public Builder setAddress(String address) {
this.address = address;
return this;
}
public Builder setLatitude(double latitude) {
this.latitude = latitude;
return this;
}
public Builder setLongitude(double longitude) {
this.longitude = longitude;
return this;
}
public Builder setNotes(String notes) {
this.notes = notes;
return this;
}
public Builder setPlaceId(String placeId) {
this.placeId = placeId;
return this;
}
public Builder setArrivalTimePlan(Date arrivalTimePlan) {
this.arrivalTimePlan = arrivalTimePlan;
return this;
}
public Builder setDepartureTimePlan(Date departureTimePlan) {
this.departureTimePlan = departureTimePlan;
return this;
}
public Builder setArrivalTimeFact(Date arrivalTimeFact) {
this.arrivalTimeFact = arrivalTimeFact;
return this;
}
public Builder setDepartureTimeFact(Date departureTimeFact) {
this.departureTimeFact = departureTimeFact;
return this;
}
public Builder setTripDate(Date tripDate) {
this.tripDate = tripDate;
return this;
}
public Builder setColorCode(String colorCode) {
this.colorCode = colorCode;
return this;
}
public Builder setLabel(String label) {
this.label = label;
return this;
}
public PlaceModel build() {
PlaceModel placeModel = new PlaceModel();
placeModel.setId(id);
placeModel.setTripId(tripId);
placeModel.setTripDateId(tripDateId);
placeModel.setLabelId(labelId);
placeModel.setName(name);
placeModel.setDescription(description);
placeModel.setAddress(address);
placeModel.setLatitude(latitude);
placeModel.setLongitude(longitude);
placeModel.setNotes(notes);
placeModel.setPlaceId(placeId);
placeModel.setArrivalTimePlan(arrivalTimePlan);
placeModel.setDepartureTimePlan(departureTimePlan);
placeModel.setArrivalTimeFact(arrivalTimeFact);
placeModel.setDepartureTimeFact(departureTimeFact);
placeModel.setTripDate(tripDate);
placeModel.setColorCode(colorCode);
placeModel.setLabel(label);
return placeModel;
}
}
private long tripId;
private long tripDateId;
private long labelId;
private String name;
private String description;
private String address;
private double latitude;
private double longitude;
private String notes;
private String placeId;
private Date arrivalTimePlan;
private Date departureTimePlan;
private Date arrivalTimeFact;
private Date departureTimeFact;
private Date tripDate;
private String colorCode;
private String label;
private Momento savedState;
public PlaceModel() {}
public long getTripId() {
return tripId;
}
public void setTripId(long tripId) {
this.tripId = tripId;
}
public long getTripDateId() {
return tripDateId;
}
public void setTripDateId(long tripDateId) {
this.tripDateId = tripDateId;
}
public long getLabelId() {
return labelId;
}
public void setLabelId(long labelId) {
this.labelId = labelId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public double getLatitude() {
return latitude;
}
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public double getLongitude() {
return longitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
public String getNotes() {
return notes;
}
public void setNotes(String notes) {
this.notes = notes;
}
public String getPlaceId() {
return placeId;
}
public void setPlaceId(String placeId) {
this.placeId = placeId;
}
public Date getArrivalTimePlan() {
return arrivalTimePlan;
}
public void setArrivalTimePlan(Date arrivalTimePlan) {
this.arrivalTimePlan = arrivalTimePlan;
}
public Date getDepartureTimePlan() {
return departureTimePlan;
}
public void setDepartureTimePlan(Date departureTimePlan) {
this.departureTimePlan = departureTimePlan;
}
public Date getArrivalTimeFact() {
return arrivalTimeFact;
}
public void setArrivalTimeFact(Date arrivalTimeFact) {
this.arrivalTimeFact = arrivalTimeFact;
}
public Date getDepartureTimeFact() {
return departureTimeFact;
}
public void setDepartureTimeFact(Date departureTimeFact) {
this.departureTimeFact = departureTimeFact;
}
public Date getTripDate() {
return tripDate;
}
public void setTripDate(Date tripDate) {
this.tripDate = tripDate;
}
public String getColorCode() {
return colorCode;
}
public void setColorCode(String colorCode) {
this.colorCode = colorCode;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
@SuppressWarnings("SimplifiableIfStatement")
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
PlaceModel that = (PlaceModel) o;
if (tripId != that.tripId) return false;
if (tripDateId != that.tripDateId) return false;
if (labelId != that.labelId) return false;
if (Double.compare(that.latitude, latitude) != 0) return false;
if (Double.compare(that.longitude, longitude) != 0) return false;
if (name != null ? !name.equals(that.name) : that.name != null) return false;
if (description != null ? !description.equals(that.description) : that.description != null)
return false;
if (address != null ? !address.equals(that.address) : that.address != null) return false;
if (notes != null ? !notes.equals(that.notes) : that.notes != null) return false;
if (placeId != null ? !placeId.equals(that.placeId) : that.placeId != null) return false;
if (arrivalTimePlan != null ? !arrivalTimePlan.equals(that.arrivalTimePlan) : that.arrivalTimePlan != null)
return false;
if (departureTimePlan != null ? !departureTimePlan.equals(that.departureTimePlan) : that.departureTimePlan != null)
return false;
if (arrivalTimeFact != null ? !arrivalTimeFact.equals(that.arrivalTimeFact) : that.arrivalTimeFact != null)
return false;
if (departureTimeFact != null ? !departureTimeFact.equals(that.departureTimeFact) : that.departureTimeFact != null)
return false;
if (tripDate != null ? !tripDate.equals(that.tripDate) : that.tripDate != null)
return false;
if (colorCode != null ? !colorCode.equals(that.colorCode) : that.colorCode != null)
return false;
return label != null ? label.equals(that.label) : that.label == null;
}
@Override
public int hashCode() {
int result = super.hashCode();
long temp;
result = 31 * result + (int) (tripId ^ (tripId >>> 32));
result = 31 * result + (int) (tripDateId ^ (tripDateId >>> 32));
result = 31 * result + (int) (labelId ^ (labelId >>> 32));
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (description != null ? description.hashCode() : 0);
result = 31 * result + (address != null ? address.hashCode() : 0);
temp = Double.doubleToLongBits(latitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(longitude);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + (notes != null ? notes.hashCode() : 0);
result = 31 * result + (placeId != null ? placeId.hashCode() : 0);
result = 31 * result + (arrivalTimePlan != null ? arrivalTimePlan.hashCode() : 0);
result = 31 * result + (departureTimePlan != null ? departureTimePlan.hashCode() : 0);
result = 31 * result + (arrivalTimeFact != null ? arrivalTimeFact.hashCode() : 0);
result = 31 * result + (departureTimeFact != null ? departureTimeFact.hashCode() : 0);
result = 31 * result + (tripDate != null ? tripDate.hashCode() : 0);
result = 31 * result + (colorCode != null ? colorCode.hashCode() : 0);
result = 31 * result + (label != null ? label.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "PlaceModel{" +
"id=" + id +
", tripId=" + tripId +
", tripDateId=" + tripDateId +
", labelId=" + labelId +
", name='" + name + '\'' +
", description='" + description + '\'' +
", address='" + address + '\'' +
", latitude=" + latitude +
", longitude=" + longitude +
", notes='" + notes + '\'' +
", placeId='" + placeId + '\'' +
", arrivalTimePlan=" + arrivalTimePlan +
", departureTimePlan=" + departureTimePlan +
", arrivalTimeFact=" + arrivalTimeFact +
", departureTimeFact=" + departureTimeFact +
", tripDate='" + tripDate +
", colorCode='" + colorCode + '\'' +
", label='" + label + '\'' +
", savedState=" + savedState +
'}';
}
public void saveState() {
setSavedState(newMomento());
}
public void restoreState(Momento momento) {
id = momento.getId();
tripId = momento.getTripId();
tripDateId = momento.getTripDateId();
labelId = momento.getLabelId();
name = momento.getName();
description = momento.getDescription();
latitude = momento.getLatitude();
longitude = momento.getLongitude();
notes = momento.getNotes();
placeId = momento.getPlaceId();
arrivalTimePlan = momento.getArrivalTimePlan();
departureTimePlan = momento.getDepartureTimePlan();
arrivalTimeFact = momento.getArrivalTimeFact();
departureTimeFact = momento.getDepartureTimeFact();
tripDate = momento.getTripDate();
colorCode = momento.getColorCode();
label = momento.getLabel();
}
public boolean isStateChanged() {
Momento newState = newMomento();
return savedState != null && !newState.equals(savedState);
}
public Momento getSavedState() {
return savedState;
}
private void setSavedState(Momento savedState) {
this.savedState = savedState;
}
private Momento newMomento() {
return new Momento(id, tripId, tripDateId, labelId, name, description, address, latitude, longitude,
notes, placeId, arrivalTimePlan, departureTimePlan, arrivalTimeFact, departureTimeFact,
tripDate, colorCode, label);
}
// Parcelable
protected PlaceModel(Parcel in) {
id = in.readLong();
tripId = in.readLong();
tripDateId = in.readLong();
labelId = in.readLong();
name = in.readString();
description = in.readString();
address = in.readString();
latitude = in.readDouble();
longitude = in.readDouble();
notes = in.readString();
placeId = in.readString();
long millis = in.readLong();
arrivalTimePlan = millis == -1 ? null : new Date(millis);
millis = in.readLong();
departureTimePlan = millis == -1 ? null : new Date(millis);
millis = in.readLong();
arrivalTimeFact = millis == -1 ? null : new Date(millis);
millis = in.readLong();
departureTimeFact = millis == -1 ? null : new Date(millis);
millis = in.readLong();
tripDate = millis == -1 ? null : new Date(millis);
colorCode = in.readString();
label = in.readString();
savedState = in.readParcelable(Momento.class.getClassLoader());
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(id);
dest.writeLong(tripId);
dest.writeLong(tripDateId);
dest.writeLong(labelId);
dest.writeString(name);
dest.writeString(description);
dest.writeString(address);
dest.writeDouble(latitude);
dest.writeDouble(longitude);
dest.writeString(notes);
dest.writeString(placeId);
dest.writeLong(arrivalTimePlan == null ? -1 : arrivalTimePlan.getTime());
dest.writeLong(departureTimePlan == null ? -1 : departureTimePlan.getTime());
dest.writeLong(arrivalTimeFact == null ? -1 : arrivalTimeFact.getTime());
dest.writeLong(departureTimeFact == null ? -1 : departureTimeFact.getTime());
dest.writeLong(tripDate == null ? -1 : tripDate.getTime());
dest.writeString(colorCode);
dest.writeString(label);
dest.writeParcelable(savedState, flags);
}
@Override
public int describeContents() {
return 0;
}
public static final Parcelable.Creator<PlaceModel> CREATOR = new Parcelable.Creator<PlaceModel>() {
@Override
public PlaceModel createFromParcel(Parcel in) {
return new PlaceModel(in);
}
@Override
public PlaceModel[] newArray(int size) {
return new PlaceModel[size];
}
};
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.ops;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.common.AutoCloseables;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.config.LogicalPlanPersistence;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.expr.fn.registry.RemoteFunctionRegistry;
import org.apache.drill.exec.expr.holders.ValueHolder;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.planner.sql.DrillOperatorTable;
import org.apache.drill.exec.proto.BitControl.QueryContextInformation;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
import org.apache.drill.exec.proto.UserBitShared.QueryId;
import org.apache.drill.exec.proto.helper.QueryIdHelper;
import org.apache.drill.exec.rpc.user.UserSession;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.server.QueryProfileStoreContext;
import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.options.OptionValue.OptionScope;
import org.apache.drill.exec.server.options.QueryOptionManager;
import org.apache.drill.exec.store.PartitionExplorer;
import org.apache.drill.exec.store.PartitionExplorerImpl;
import org.apache.drill.exec.store.SchemaConfig;
import org.apache.drill.exec.store.SchemaConfig.SchemaConfigInfoProvider;
import org.apache.drill.exec.store.SchemaTreeProvider;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.testing.ExecutionControls;
import org.apache.drill.exec.util.Utilities;
import org.apache.drill.metastore.MetastoreRegistry;
import org.apache.drill.shaded.guava.com.google.common.base.Function;
import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
import io.netty.buffer.DrillBuf;
// TODO - consider re-name to PlanningContext, as the query execution context actually appears
// in fragment contexts
public class QueryContext implements AutoCloseable, OptimizerRulesContext, SchemaConfigInfoProvider {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueryContext.class);
public enum SqlStatementType {OTHER, ANALYZE, CTAS, EXPLAIN, DESCRIBE_TABLE, DESCRIBE_SCHEMA, REFRESH, SELECT, SETOPTION};
private final DrillbitContext drillbitContext;
private final UserSession session;
private final QueryId queryId;
private final QueryOptionManager queryOptions;
private final PlannerSettings plannerSettings;
private final ExecutionControls executionControls;
private final BufferAllocator allocator;
private final BufferManager bufferManager;
private final ContextInformation contextInformation;
private final QueryContextInformation queryContextInfo;
private final ViewExpansionContext viewExpansionContext;
private final SchemaTreeProvider schemaTreeProvider;
private boolean skipProfileWrite;
/** Stores constants and their holders by type */
private final Map<String, Map<MinorType, ValueHolder>> constantValueHolderCache;
private SqlStatementType stmtType;
/*
* Flag to indicate if close has been called, after calling close the first
* time this is set to true and the close method becomes a no-op.
*/
private boolean closed = false;
private DrillOperatorTable table;
public QueryContext(final UserSession session, final DrillbitContext drillbitContext, QueryId queryId) {
this.drillbitContext = drillbitContext;
this.session = session;
this.queryId = queryId;
this.skipProfileWrite = false;
queryOptions = new QueryOptionManager(session.getOptions());
executionControls = new ExecutionControls(queryOptions, drillbitContext.getEndpoint());
plannerSettings = new PlannerSettings(queryOptions, getFunctionRegistry());
plannerSettings.setNumEndPoints(drillbitContext.getBits().size());
// If we do not need to support dynamic UDFs for this query, just use static operator table
// built at the startup. Else, build new operator table from latest version of function registry.
if (queryOptions.getOption(ExecConstants.USE_DYNAMIC_UDFS)) {
this.table = new DrillOperatorTable(drillbitContext.getFunctionImplementationRegistry(), drillbitContext.getOptionManager());
} else {
this.table = drillbitContext.getOperatorTable();
}
// Checking for limit on ResultSet rowcount and if user attempting to override the system value
int sessionMaxRowCount = queryOptions.getOption(ExecConstants.QUERY_MAX_ROWS).num_val.intValue();
int defaultMaxRowCount = queryOptions.getOptionManager(OptionScope.SYSTEM).getOption(ExecConstants.QUERY_MAX_ROWS).num_val.intValue();
int autoLimitRowCount = 0;
if (sessionMaxRowCount > 0 && defaultMaxRowCount > 0) {
autoLimitRowCount = Math.min(sessionMaxRowCount, defaultMaxRowCount);
} else {
autoLimitRowCount = Math.max(sessionMaxRowCount, defaultMaxRowCount);
}
if (autoLimitRowCount == defaultMaxRowCount && defaultMaxRowCount != sessionMaxRowCount) {
// Required to indicate via OptionScope=QueryLevel that session limit is overridden by system limit
queryOptions.setLocalOption(ExecConstants.QUERY_MAX_ROWS, autoLimitRowCount);
}
if (autoLimitRowCount > 0) {
logger.debug("ResultSet size is auto-limited to {} rows [Session: {} / Default: {}]", autoLimitRowCount, sessionMaxRowCount, defaultMaxRowCount);
}
queryContextInfo = Utilities.createQueryContextInfo(session.getDefaultSchemaPath(), session.getSessionId());
contextInformation = new ContextInformation(session.getCredentials(), queryContextInfo);
allocator = drillbitContext.getAllocator().newChildAllocator(
"query:" + QueryIdHelper.getQueryId(queryId),
PlannerSettings.getInitialPlanningMemorySize(),
plannerSettings.getPlanningMemoryLimit());
bufferManager = new BufferManagerImpl(this.allocator);
viewExpansionContext = new ViewExpansionContext(this);
schemaTreeProvider = new SchemaTreeProvider(drillbitContext);
constantValueHolderCache = Maps.newHashMap();
stmtType = null;
}
@Override
public PlannerSettings getPlannerSettings() {
return plannerSettings;
}
public UserSession getSession() { return session; }
@Override
public BufferAllocator getAllocator() { return allocator; }
public QueryId getQueryId( ) { return queryId; }
/**
* Return reference to default schema instance in a schema tree. Each {@link org.apache.calcite.schema.SchemaPlus}
* instance can refer to its parent and its children. From the returned reference to default schema instance,
* clients can traverse the entire schema tree and know the default schema where to look up the tables first.
*
* @return Reference to default schema instance in a schema tree.
*/
public SchemaPlus getNewDefaultSchema() {
final SchemaPlus rootSchema = getRootSchema();
final SchemaPlus defaultSchema = session.getDefaultSchema(rootSchema);
if (defaultSchema == null) {
return rootSchema;
}
return defaultSchema;
}
/**
* Get root schema with schema owner as the user who issued the query that is managed by this QueryContext.
* @return Root of the schema tree.
*/
public SchemaPlus getRootSchema() {
return getRootSchema(getQueryUserName());
}
/**
* Return root schema with schema owner as the given user.
*
* @param userName User who owns the schema tree.
* @return Root of the schema tree.
*/
@Override
public SchemaPlus getRootSchema(final String userName) {
return schemaTreeProvider.createRootSchema(userName, this);
}
/**
* Create and return a {@link org.apache.calcite.schema.SchemaPlus} with given <i>schemaConfig</i> but some schemas (from storage plugins)
* could be initialized later.
* @param schemaConfig
* @return A {@link org.apache.calcite.schema.SchemaPlus} with given <i>schemaConfig</i>.
*/
public SchemaPlus getRootSchema(SchemaConfig schemaConfig) {
return schemaTreeProvider.createRootSchema(schemaConfig);
}
/**
* Create and return a fully initialized SchemaTree with given <i>schemaConfig</i>.
* @param schemaConfig
* @return A fully initialized SchemaTree with given <i>schemaConfig</i>.
*/
public SchemaPlus getFullRootSchema(SchemaConfig schemaConfig) {
return schemaTreeProvider.createFullRootSchema(schemaConfig);
}
/**
* Get the user name of the user who issued the query that is managed by this QueryContext.
* @return The user name of the user who issued the query that is managed by this QueryContext.
*/
@Override
public String getQueryUserName() {
return session.getCredentials().getUserName();
}
public QueryOptionManager getOptions() {
return queryOptions;
}
public ExecutionControls getExecutionControls() {
return executionControls;
}
public DrillbitEndpoint getCurrentEndpoint() {
return drillbitContext.getEndpoint();
}
public StoragePluginRegistry getStorage() {
return drillbitContext.getStorage();
}
public LogicalPlanPersistence getLpPersistence() {
return drillbitContext.getLpPersistence();
}
public Collection<DrillbitEndpoint> getActiveEndpoints() {
return drillbitContext.getBits();
}
public Collection<DrillbitEndpoint> getOnlineEndpoints() {
return drillbitContext.getBits();
}
public DrillConfig getConfig() {
return drillbitContext.getConfig();
}
public QueryProfileStoreContext getProfileStoreContext() {
return drillbitContext.getProfileStoreContext();
}
@Override
public FunctionImplementationRegistry getFunctionRegistry() {
return drillbitContext.getFunctionImplementationRegistry();
}
@Override
public ViewExpansionContext getViewExpansionContext() {
return viewExpansionContext;
}
@Override
public OptionValue getOption(String optionKey) {
return getOptions().getOption(optionKey);
}
public boolean isImpersonationEnabled() {
return getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED);
}
public boolean isUserAuthenticationEnabled() {
return getConfig().getBoolean(ExecConstants.USER_AUTHENTICATION_ENABLED);
}
public boolean isRuntimeFilterEnabled() {
return this.getOption(ExecConstants.HASHJOIN_ENABLE_RUNTIME_FILTER_KEY).bool_val;
}
public DrillOperatorTable getDrillOperatorTable() {
return table;
}
/**
* Re-creates drill operator table to refresh functions list from local function registry.
*/
public void reloadDrillOperatorTable() {
// This is re-trying the query plan on failure so qualifies to reset the SQL statement.
clearSQLStatementType();
table = new DrillOperatorTable(
drillbitContext.getFunctionImplementationRegistry(),
drillbitContext.getOptionManager());
}
public QueryContextInformation getQueryContextInfo() {
return queryContextInfo;
}
public RemoteFunctionRegistry getRemoteFunctionRegistry() {
return drillbitContext.getRemoteFunctionRegistry();
}
@Override
public ContextInformation getContextInformation() {
return contextInformation;
}
@Override
public DrillBuf getManagedBuffer() {
return bufferManager.getManagedBuffer();
}
@Override
public PartitionExplorer getPartitionExplorer() {
return new PartitionExplorerImpl(getRootSchema());
}
@Override
public ValueHolder getConstantValueHolder(String value, MinorType type, Function<DrillBuf, ValueHolder> holderInitializer) {
if (!constantValueHolderCache.containsKey(value)) {
constantValueHolderCache.put(value, Maps.<MinorType, ValueHolder>newHashMap());
}
Map<MinorType, ValueHolder> holdersByType = constantValueHolderCache.get(value);
ValueHolder valueHolder = holdersByType.get(type);
if (valueHolder == null) {
valueHolder = holderInitializer.apply(getManagedBuffer());
holdersByType.put(type, valueHolder);
}
return valueHolder;
}
@Override
public void close() throws Exception {
try {
if (!closed) {
List<AutoCloseable> toClose = Lists.newArrayList();
// TODO(DRILL-1942) the new allocator has this capability built-in, so we can remove bufferManager and
// allocator from the toClose list.
toClose.add(bufferManager);
toClose.add(allocator);
toClose.add(schemaTreeProvider);
AutoCloseables.close(toClose);
}
} finally {
closed = true;
}
}
/**
* @param stmtType : Sets the type {@link SqlStatementType} of the statement e.g. CTAS, ANALYZE
*/
public void setSQLStatementType(SqlStatementType stmtType) {
if (this.stmtType == null) {
this.stmtType = stmtType;
} else {
throw new IllegalStateException(String.format("SQL Statement type is already set to %s", this.stmtType));
}
}
/**
* Clears the type {@link SqlStatementType} of the statement.
*/
public void clearSQLStatementType() {
this.stmtType = null;
}
/**
* @return Get the type {@link SqlStatementType} of the statement e.g. CTAS, ANALYZE
*/
public SqlStatementType getSQLStatementType() {
return stmtType;
}
/**
* Skip writing profile
* @param skipWriting
*/
public void skipWritingProfile(boolean skipWriting) {
this.skipProfileWrite = skipWriting;
}
/**
* @return Check if to skip writing
*/
public boolean isSkipProfileWrite() {
return skipProfileWrite;
}
public MetastoreRegistry getMetastoreRegistry() {
return drillbitContext.getMetastoreRegistry();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53resolver.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A complex type that contains information about a configuration for DNSSEC validation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/route53resolver-2018-04-01/ResolverDnssecConfig"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ResolverDnssecConfig implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ID for a configuration for DNSSEC validation.
* </p>
*/
private String id;
/**
* <p>
* The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
* </p>
*/
private String ownerId;
/**
* <p>
* The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
* </p>
*/
private String resourceId;
/**
* <p>
* The validation status for a DNSSEC configuration. The status can be one of the following:
* </p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* </ul>
*/
private String validationStatus;
/**
* <p>
* The ID for a configuration for DNSSEC validation.
* </p>
*
* @param id
* The ID for a configuration for DNSSEC validation.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The ID for a configuration for DNSSEC validation.
* </p>
*
* @return The ID for a configuration for DNSSEC validation.
*/
public String getId() {
return this.id;
}
/**
* <p>
* The ID for a configuration for DNSSEC validation.
* </p>
*
* @param id
* The ID for a configuration for DNSSEC validation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResolverDnssecConfig withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
* </p>
*
* @param ownerId
* The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
*/
public void setOwnerId(String ownerId) {
this.ownerId = ownerId;
}
/**
* <p>
* The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
* </p>
*
* @return The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
*/
public String getOwnerId() {
return this.ownerId;
}
/**
* <p>
* The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
* </p>
*
* @param ownerId
* The owner account ID of the virtual private cloud (VPC) for a configuration for DNSSEC validation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResolverDnssecConfig withOwnerId(String ownerId) {
setOwnerId(ownerId);
return this;
}
/**
* <p>
* The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
* </p>
*
* @param resourceId
* The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
*/
public void setResourceId(String resourceId) {
this.resourceId = resourceId;
}
/**
* <p>
* The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
* </p>
*
* @return The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
*/
public String getResourceId() {
return this.resourceId;
}
/**
* <p>
* The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
* </p>
*
* @param resourceId
* The ID of the virtual private cloud (VPC) that you're configuring the DNSSEC validation status for.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResolverDnssecConfig withResourceId(String resourceId) {
setResourceId(resourceId);
return this;
}
/**
* <p>
* The validation status for a DNSSEC configuration. The status can be one of the following:
* </p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* </ul>
*
* @param validationStatus
* The validation status for a DNSSEC configuration. The status can be one of the following:</p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* @see ResolverDNSSECValidationStatus
*/
public void setValidationStatus(String validationStatus) {
this.validationStatus = validationStatus;
}
/**
* <p>
* The validation status for a DNSSEC configuration. The status can be one of the following:
* </p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* </ul>
*
* @return The validation status for a DNSSEC configuration. The status can be one of the following:</p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* @see ResolverDNSSECValidationStatus
*/
public String getValidationStatus() {
return this.validationStatus;
}
/**
* <p>
* The validation status for a DNSSEC configuration. The status can be one of the following:
* </p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* </ul>
*
* @param validationStatus
* The validation status for a DNSSEC configuration. The status can be one of the following:</p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResolverDNSSECValidationStatus
*/
public ResolverDnssecConfig withValidationStatus(String validationStatus) {
setValidationStatus(validationStatus);
return this;
}
/**
* <p>
* The validation status for a DNSSEC configuration. The status can be one of the following:
* </p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* </ul>
*
* @param validationStatus
* The validation status for a DNSSEC configuration. The status can be one of the following:</p>
* <ul>
* <li>
* <p>
* <b>ENABLING:</b> DNSSEC validation is being enabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>ENABLED:</b> DNSSEC validation is enabled.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLING:</b> DNSSEC validation is being disabled but is not complete.
* </p>
* </li>
* <li>
* <p>
* <b>DISABLED</b> DNSSEC validation is disabled.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResolverDNSSECValidationStatus
*/
public ResolverDnssecConfig withValidationStatus(ResolverDNSSECValidationStatus validationStatus) {
this.validationStatus = validationStatus.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getOwnerId() != null)
sb.append("OwnerId: ").append(getOwnerId()).append(",");
if (getResourceId() != null)
sb.append("ResourceId: ").append(getResourceId()).append(",");
if (getValidationStatus() != null)
sb.append("ValidationStatus: ").append(getValidationStatus());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ResolverDnssecConfig == false)
return false;
ResolverDnssecConfig other = (ResolverDnssecConfig) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getOwnerId() == null ^ this.getOwnerId() == null)
return false;
if (other.getOwnerId() != null && other.getOwnerId().equals(this.getOwnerId()) == false)
return false;
if (other.getResourceId() == null ^ this.getResourceId() == null)
return false;
if (other.getResourceId() != null && other.getResourceId().equals(this.getResourceId()) == false)
return false;
if (other.getValidationStatus() == null ^ this.getValidationStatus() == null)
return false;
if (other.getValidationStatus() != null && other.getValidationStatus().equals(this.getValidationStatus()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getOwnerId() == null) ? 0 : getOwnerId().hashCode());
hashCode = prime * hashCode + ((getResourceId() == null) ? 0 : getResourceId().hashCode());
hashCode = prime * hashCode + ((getValidationStatus() == null) ? 0 : getValidationStatus().hashCode());
return hashCode;
}
@Override
public ResolverDnssecConfig clone() {
try {
return (ResolverDnssecConfig) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.route53resolver.model.transform.ResolverDnssecConfigMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2004 Clinton Begin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibatis.sqlmap.engine.type;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.*;
import java.util.Calendar;
import java.util.Map;
/**
* A way to make a CallableStatement look like a ResultSet
*/
public class CallableStatementResultSet implements ResultSet {
private CallableStatement cs;
/**
* Constructor to stretch a ResultSet interface over a CallableStatement
*
* @param cs - the CallableStatement
*/
public CallableStatementResultSet(CallableStatement cs) {
this.cs = cs;
}
public boolean absolute(int row) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void afterLast() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void beforeFirst() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void cancelRowUpdates() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void clearWarnings() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void close() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void deleteRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public int findColumn(String columnName) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean first() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public Array getArray(String colName) throws SQLException {
return cs.getArray(colName);
}
public Array getArray(int i) throws SQLException {
return cs.getArray(i);
}
public InputStream getAsciiStream(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public InputStream getAsciiStream(String columnName) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
return cs.getBigDecimal(columnIndex);
}
public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public BigDecimal getBigDecimal(String columnName) throws SQLException {
return cs.getBigDecimal(columnName);
}
public BigDecimal getBigDecimal(String columnName, int scale) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public InputStream getBinaryStream(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public InputStream getBinaryStream(String columnName) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public Blob getBlob(String colName) throws SQLException {
return cs.getBlob(colName);
}
public Blob getBlob(int i) throws SQLException {
return cs.getBlob(i);
}
public boolean getBoolean(int columnIndex) throws SQLException {
return cs.getBoolean(columnIndex);
}
public boolean getBoolean(String columnName) throws SQLException {
return cs.getBoolean(columnName);
}
public byte getByte(int columnIndex) throws SQLException {
return cs.getByte(columnIndex);
}
public byte getByte(String columnName) throws SQLException {
return cs.getByte(columnName);
}
public byte[] getBytes(int columnIndex) throws SQLException {
return cs.getBytes(columnIndex);
}
public byte[] getBytes(String columnName) throws SQLException {
return cs.getBytes(columnName);
}
public Reader getCharacterStream(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public Reader getCharacterStream(String columnName) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public Clob getClob(String colName) throws SQLException {
return cs.getClob(colName);
}
public Clob getClob(int i) throws SQLException {
return cs.getClob(i);
}
public int getConcurrency() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public String getCursorName() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public Date getDate(int columnIndex) throws SQLException {
return cs.getDate(columnIndex);
}
public Date getDate(int columnIndex, Calendar cal) throws SQLException {
return cs.getDate(columnIndex, cal);
}
public Date getDate(String columnName) throws SQLException {
return cs.getDate(columnName);
}
public Date getDate(String columnName, Calendar cal) throws SQLException {
return cs.getDate(columnName, cal);
}
public double getDouble(int columnIndex) throws SQLException {
return cs.getDouble(columnIndex);
}
public double getDouble(String columnName) throws SQLException {
return cs.getDouble(columnName);
}
public int getFetchDirection() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public int getFetchSize() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public float getFloat(int columnIndex) throws SQLException {
return cs.getFloat(columnIndex);
}
public float getFloat(String columnName) throws SQLException {
return cs.getFloat(columnName);
}
public int getInt(int columnIndex) throws SQLException {
return cs.getInt(columnIndex);
}
public int getInt(String columnName) throws SQLException {
return cs.getInt(columnName);
}
public long getLong(int columnIndex) throws SQLException {
return cs.getLong(columnIndex);
}
public long getLong(String columnName) throws SQLException {
return cs.getLong(columnName);
}
public ResultSetMetaData getMetaData() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public Object getObject(String colName, Map map) throws SQLException {
return cs.getObject(colName, map);
}
public Object getObject(int columnIndex) throws SQLException {
return cs.getObject(columnIndex);
}
public Object getObject(String columnName) throws SQLException {
return cs.getObject(columnName);
}
public Object getObject(int i, Map map) throws SQLException {
return cs.getObject(i, map);
}
public Ref getRef(String colName) throws SQLException {
return cs.getRef(colName);
}
public Ref getRef(int i) throws SQLException {
return cs.getRef(i);
}
public int getRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public short getShort(int columnIndex) throws SQLException {
return cs.getShort(columnIndex);
}
public short getShort(String columnName) throws SQLException {
return cs.getShort(columnName);
}
public Statement getStatement() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public String getString(int columnIndex) throws SQLException {
return cs.getString(columnIndex);
}
public String getString(String columnName) throws SQLException {
return cs.getString(columnName);
}
public Time getTime(int columnIndex) throws SQLException {
return cs.getTime(columnIndex);
}
public Time getTime(int columnIndex, Calendar cal) throws SQLException {
return cs.getTime(columnIndex, cal);
}
public Time getTime(String columnName) throws SQLException {
return cs.getTime(columnName);
}
public Time getTime(String columnName, Calendar cal) throws SQLException {
return cs.getTime(columnName, cal);
}
public Timestamp getTimestamp(int columnIndex) throws SQLException {
return cs.getTimestamp(columnIndex);
}
public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
return cs.getTimestamp(columnIndex, cal);
}
public Timestamp getTimestamp(String columnName) throws SQLException {
return cs.getTimestamp(columnName);
}
public Timestamp getTimestamp(String columnName, Calendar cal) throws SQLException {
return cs.getTimestamp(columnName, cal);
}
public int getType() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public InputStream getUnicodeStream(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public InputStream getUnicodeStream(String columnName) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public URL getURL(int columnIndex) throws SQLException {
return cs.getURL(columnIndex);
}
public URL getURL(String columnName) throws SQLException {
return cs.getURL(columnName);
}
public SQLWarning getWarnings() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void insertRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean isAfterLast() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean isBeforeFirst() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean isFirst() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean isLast() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean last() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void moveToCurrentRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void moveToInsertRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean next() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean previous() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void refreshRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean relative(int rows) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean rowDeleted() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean rowInserted() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean rowUpdated() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void setFetchDirection(int direction) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void setFetchSize(int rows) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateArray(int columnIndex, Array x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateArray(String columnName, Array x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateAsciiStream(String columnName, InputStream x, int length) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBigDecimal(String columnName, BigDecimal x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBinaryStream(String columnName, InputStream x, int length) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBlob(int columnIndex, Blob x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBlob(String columnName, Blob x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBoolean(String columnName, boolean x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateByte(int columnIndex, byte x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateByte(String columnName, byte x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBytes(int columnIndex, byte x[]) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateBytes(String columnName, byte x[]) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateCharacterStream(String columnName, Reader reader, int length) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateClob(int columnIndex, Clob x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateClob(String columnName, Clob x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateDate(int columnIndex, Date x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateDate(String columnName, Date x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateDouble(int columnIndex, double x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateDouble(String columnName, double x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateFloat(int columnIndex, float x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateFloat(String columnName, float x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateInt(int columnIndex, int x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateInt(String columnName, int x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateLong(int columnIndex, long x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateLong(String columnName, long x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateNull(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateNull(String columnName) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateObject(int columnIndex, Object x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateObject(int columnIndex, Object x, int scale) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateObject(String columnName, Object x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateObject(String columnName, Object x, int scale) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateRef(int columnIndex, Ref x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateRef(String columnName, Ref x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateRow() throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateShort(int columnIndex, short x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateShort(String columnName, short x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateString(int columnIndex, String x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateString(String columnName, String x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateTime(int columnIndex, Time x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateTime(String columnName, Time x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public void updateTimestamp(String columnName, Timestamp x) throws SQLException {
throw new UnsupportedOperationException("CallableStatement does not support this method.");
}
public boolean wasNull() throws SQLException {
return cs.wasNull();
}
/* (non-Javadoc)
* @see java.sql.Wrapper#unwrap(java.lang.Class)
*/
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.Wrapper#isWrapperFor(java.lang.Class)
*/
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO Auto-generated method stub
return false;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getRowId(int)
*/
@Override
public RowId getRowId(int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getRowId(java.lang.String)
*/
@Override
public RowId getRowId(String columnLabel) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateRowId(int, java.sql.RowId)
*/
@Override
public void updateRowId(int columnIndex, RowId x) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateRowId(java.lang.String, java.sql.RowId)
*/
@Override
public void updateRowId(String columnLabel, RowId x) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getHoldability()
*/
@Override
public int getHoldability() throws SQLException {
// TODO Auto-generated method stub
return 0;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#isClosed()
*/
@Override
public boolean isClosed() throws SQLException {
// TODO Auto-generated method stub
return false;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNString(int, java.lang.String)
*/
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNString(java.lang.String, java.lang.String)
*/
@Override
public void updateNString(String columnLabel, String nString)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNClob(int, java.sql.NClob)
*/
@Override
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNClob(java.lang.String, java.sql.NClob)
*/
@Override
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getNClob(int)
*/
@Override
public NClob getNClob(int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getNClob(java.lang.String)
*/
@Override
public NClob getNClob(String columnLabel) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getSQLXML(int)
*/
@Override
public SQLXML getSQLXML(int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getSQLXML(java.lang.String)
*/
@Override
public SQLXML getSQLXML(String columnLabel) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateSQLXML(int, java.sql.SQLXML)
*/
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateSQLXML(java.lang.String, java.sql.SQLXML)
*/
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getNString(int)
*/
@Override
public String getNString(int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getNString(java.lang.String)
*/
@Override
public String getNString(String columnLabel) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getNCharacterStream(int)
*/
@Override
public Reader getNCharacterStream(int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getNCharacterStream(java.lang.String)
*/
@Override
public Reader getNCharacterStream(String columnLabel) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNCharacterStream(int, java.io.Reader, long)
*/
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNCharacterStream(java.lang.String, java.io.Reader, long)
*/
@Override
public void updateNCharacterStream(String columnLabel, Reader reader,
long length) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateAsciiStream(int, java.io.InputStream, long)
*/
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBinaryStream(int, java.io.InputStream, long)
*/
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateCharacterStream(int, java.io.Reader, long)
*/
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateAsciiStream(java.lang.String, java.io.InputStream, long)
*/
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBinaryStream(java.lang.String, java.io.InputStream, long)
*/
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateCharacterStream(java.lang.String, java.io.Reader, long)
*/
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBlob(int, java.io.InputStream, long)
*/
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBlob(java.lang.String, java.io.InputStream, long)
*/
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateClob(int, java.io.Reader, long)
*/
@Override
public void updateClob(int columnIndex, Reader reader, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateClob(java.lang.String, java.io.Reader, long)
*/
@Override
public void updateClob(String columnLabel, Reader reader, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNClob(int, java.io.Reader, long)
*/
@Override
public void updateNClob(int columnIndex, Reader reader, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNClob(java.lang.String, java.io.Reader, long)
*/
@Override
public void updateNClob(String columnLabel, Reader reader, long length)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNCharacterStream(int, java.io.Reader)
*/
@Override
public void updateNCharacterStream(int columnIndex, Reader x)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNCharacterStream(java.lang.String, java.io.Reader)
*/
@Override
public void updateNCharacterStream(String columnLabel, Reader reader)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateAsciiStream(int, java.io.InputStream)
*/
@Override
public void updateAsciiStream(int columnIndex, InputStream x)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBinaryStream(int, java.io.InputStream)
*/
@Override
public void updateBinaryStream(int columnIndex, InputStream x)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateCharacterStream(int, java.io.Reader)
*/
@Override
public void updateCharacterStream(int columnIndex, Reader x)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateAsciiStream(java.lang.String, java.io.InputStream)
*/
@Override
public void updateAsciiStream(String columnLabel, InputStream x)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBinaryStream(java.lang.String, java.io.InputStream)
*/
@Override
public void updateBinaryStream(String columnLabel, InputStream x)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateCharacterStream(java.lang.String, java.io.Reader)
*/
@Override
public void updateCharacterStream(String columnLabel, Reader reader)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBlob(int, java.io.InputStream)
*/
@Override
public void updateBlob(int columnIndex, InputStream inputStream)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateBlob(java.lang.String, java.io.InputStream)
*/
@Override
public void updateBlob(String columnLabel, InputStream inputStream)
throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateClob(int, java.io.Reader)
*/
@Override
public void updateClob(int columnIndex, Reader reader) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateClob(java.lang.String, java.io.Reader)
*/
@Override
public void updateClob(String columnLabel, Reader reader) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNClob(int, java.io.Reader)
*/
@Override
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#updateNClob(java.lang.String, java.io.Reader)
*/
@Override
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
// TODO Auto-generated method stub
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getObject(int, java.lang.Class)
*/
@Override
public <T> T getObject(int columnIndex, Class<T> type) throws SQLException {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see java.sql.ResultSet#getObject(java.lang.String, java.lang.Class)
*/
@Override
public <T> T getObject(String columnLabel, Class<T> type) throws SQLException {
// TODO Auto-generated method stub
return null;
}
}
| |
package de.ktran.anno1404warenrechner.data;
import android.content.SharedPreferences;
import android.support.annotation.NonNull;
import com.google.gson.Gson;
import org.greenrobot.eventbus.EventBus;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Singleton;
import de.ktran.anno1404warenrechner.App;
import de.ktran.anno1404warenrechner.R;
import de.ktran.anno1404warenrechner.event.ChainsDetailResultEvent;
import de.ktran.anno1404warenrechner.event.ChainsResultEvent;
import de.ktran.anno1404warenrechner.event.DataResponseEvent;
import de.ktran.anno1404warenrechner.event.GameListResultEvent;
import de.ktran.anno1404warenrechner.event.GameNameChangedEvent;
import de.ktran.anno1404warenrechner.event.MaterialResultsEvent;
import de.ktran.anno1404warenrechner.event.PopulationResultEvent;
import de.ktran.anno1404warenrechner.helpers.JavaCompat;
@Singleton
public class DataManager {
private final Gson gson;
private final App app;
private final SharedPreferences prefs;
private final EventBus bus;
private final List<Game> games = new ArrayList<>();
private final List<ProductionBuilding> registeredGoods = new ArrayList<>();
private static int last_id = 0;
public DataManager(App app, SharedPreferences prefs, Gson gson, EventBus bus) {
this.prefs = prefs;
this.app = app;
this.gson = gson;
this.bus = bus;
readFromStorage();
}
private void readFromStorage() {
final Set<String> data = prefs.getStringSet(app.getString(R.string.prefkey_data), new HashSet<>());
for (String json : data) {
final Game game = gson.fromJson(json, Game.class);
this.games.add(game);
last_id = Math.max(last_id, game.getId());
}
}
private void commitChanges() {
final Set<String> saveString = new HashSet<>();
JavaCompat.forEach(games, item -> saveString.add(gson.toJson(item)));
prefs.edit().putStringSet(app.getString(R.string.prefkey_data), saveString).apply();
}
public Game getGameById(int id) {
for (Game g : games) {
if (g.getId() == id) return g;
}
throw new IllegalStateException("Game with given id not found.");
}
private void onPopulationChange(Game game) {
postResult(new PopulationResultEvent(game));
fetchNeedsChainsResults(game);
onGameMetaDataChanged();
}
private void onGameMetaDataChanged() {
postResult(new GameListResultEvent(this.getSortedList()));
}
private void onProductionChainChanged(Game game, Goods goods) {
if (goods.getType() == Goods.Type.NEEDS || goods.getType() == Goods.Type.INTERMEDIARY)
fetchNeedsChainsResults(game);
}
/**
* Sets population count. Negative amount implies that the given amount concerns built houses
* @param game Game
* @param p PopulationType type
* @param amount Amount (signed)
*/
public void setPopulation(Game game, PopulationType p, int amount) {
if (amount < 0) {
game.population().setHouseCount(p, Math.abs(amount));
} else {
game.population().setPopulationCount(p, amount);
}
commitChanges();
onPopulationChange(game);
}
public boolean setBonus(Game game, ProductionBuilding building, int bonus) {
if (!game.setBonus(building, bonus)) return false;
commitChanges();
onProductionChainChanged(game, building.getProduces());
if (Goods.isMaterial(building.getProduces())) postResult(new MaterialResultsEvent(game));
JavaCompat.forEach(registeredGoods, b -> DataManager.this.fetchChainsDetailResults(
b.getProduces(), game)); //@workaround
return true;
}
public void setMaterialProduction(Game game, ProductionBuilding building, int value) {
if (game.getMaterialProductionCount(building) == value) return;
game.setOtherGoods(building, value);
commitChanges();
fetchChainsDetailResults(building.getProduces(), game);
postResult(new MaterialResultsEvent(game));
}
public void setBeggarPrince(Game game, int rank) {
if (rank == game.getBeggarPrince()) return;
game.setBeggarPrince(rank);
onPopulationChange(game);
}
public void setEnvoysFavour(Game game, int rank) {
if (rank == game.getEnvoysFavour()) return;
game.setEnvoysFavour(rank);
onPopulationChange(game);
}
public void createGame() {
last_id = last_id + 1;
games.add(
Game.newGame(last_id, app.getString(R.string.generic_game_title, last_id))
);
commitChanges();
onGameMetaDataChanged();
}
/**
* Remove game from list synchronously.
* @param id game_id
*/
public void removeGame(int id) {
final Iterator<Game> it = games.iterator();
while (it.hasNext()) {
if (it.next().getId() == id) {
it.remove();
break;
}
}
commitChanges();
onGameMetaDataChanged();
}
public void gameOpened(Game game) {
game.gameTouched();
commitChanges();
onGameMetaDataChanged();
}
public void setGameTitle(Game game, String gameTitle) {
game.setName(gameTitle);
onGameMetaDataChanged();
bus.post(new GameNameChangedEvent(game));
}
/**
* Get sorted list. Preferred async.
* @return Date-sorted list of games
*/
private List<Game> getSortedList() {
final ArrayList<Game> res = new ArrayList<>(games);
Collections.sort(res, (o1, o2) -> - (o1.getLastOpened().compareTo(o2.getLastOpened())));
return res;
}
public void fetchNeedsChainsResults(@NonNull final Game game) {
Task.doAsync(() -> {
final Logic logic = new Logic(game);
postResult(
new ChainsResultEvent(game, logic.calculateAllNeedsChains())
);
});
}
public void fetchChainsDetailResults(@NonNull final Goods goods, @NonNull Game game) {
Task.doAsync(() -> {
final Logic logic = new Logic(game);
postResult(
new ChainsDetailResultEvent(logic.calculateChainWithDependencies(goods), goods)
);
});
}
public void fetchGameList() {
Task.doAsync(() -> postResult(new GameListResultEvent(getSortedList())));
}
public void changeTotalCountOccidental(final Game game, final int totalCount, final int maxPop) {
Task.doAsync(() -> {
Logic logic = new Logic(game);
Map<PopulationType, Integer> res = logic.calculateAscensionRightsOccidental(Math.abs(totalCount), maxPop);
for (Map.Entry<PopulationType, Integer> e : res.entrySet()) {
game.population().setHouseCount(e.getKey(), e.getValue());
}
commitChanges();
onPopulationChange(game);
});
}
public void changeTotalCountOriental(final Game game, final int totalCount, final int maxPop) {
Task.doAsync(() -> {
Logic logic = new Logic(game);
Map<PopulationType, Integer> res = logic.calculateAscensionRightsOriental(Math.abs(totalCount), maxPop);
for (Map.Entry<PopulationType, Integer> e : res.entrySet()) {
game.population().setHouseCount(e.getKey(), e.getValue());
}
commitChanges();
onPopulationChange(game);
});
}
private void postResult(DataResponseEvent event) {
bus.postSticky(event);
}
public void registerUpdate(ProductionBuilding chain) {
registeredGoods.add(chain);
}
public void unregisterUpdate(ProductionBuilding chain) {
registeredGoods.remove(chain);
}
}
| |
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.table;
import java.sql.ResultSetMetaData;
import org.h2.api.ErrorCode;
import org.h2.command.Parser;
import org.h2.engine.Constants;
import org.h2.engine.Mode;
import org.h2.engine.Session;
import org.h2.expression.ConditionAndOr;
import org.h2.expression.Expression;
import org.h2.expression.ExpressionVisitor;
import org.h2.expression.SequenceValue;
import org.h2.expression.ValueExpression;
import org.h2.message.DbException;
import org.h2.result.Row;
import org.h2.schema.Schema;
import org.h2.schema.Sequence;
import org.h2.util.MathUtils;
import org.h2.util.StringUtils;
import org.h2.value.DataType;
import org.h2.value.Value;
import org.h2.value.ValueDate;
import org.h2.value.ValueInt;
import org.h2.value.ValueLong;
import org.h2.value.ValueNull;
import org.h2.value.ValueString;
import org.h2.value.ValueTime;
import org.h2.value.ValueTimestamp;
import org.h2.value.ValueUuid;
/**
* This class represents a column in a table.
*/
public class Column {
/**
* The name of the rowid pseudo column.
*/
public static final String ROWID = "_ROWID_";
/**
* This column is not nullable.
*/
public static final int NOT_NULLABLE =
ResultSetMetaData.columnNoNulls;
/**
* This column is nullable.
*/
public static final int NULLABLE =
ResultSetMetaData.columnNullable;
/**
* It is not know whether this column is nullable.
*/
public static final int NULLABLE_UNKNOWN =
ResultSetMetaData.columnNullableUnknown;
private final int type;
private long precision;
private int scale;
private int displaySize;
private Table table;
private String name;
private int columnId;
private boolean nullable = true;
private Expression defaultExpression;
private Expression checkConstraint;
private String checkConstraintSQL;
private String originalSQL;
private boolean autoIncrement;
private long start;
private long increment;
private boolean convertNullToDefault;
private Sequence sequence;
private boolean isComputed;
private TableFilter computeTableFilter;
private int selectivity;
private SingleColumnResolver resolver;
private String comment;
private boolean primaryKey;
public Column(String name, int type) {
this(name, type, -1, -1, -1);
}
public Column(String name, int type, long precision, int scale,
int displaySize) {
this.name = name;
this.type = type;
if (precision == -1 && scale == -1 && displaySize == -1) {
DataType dt = DataType.getDataType(type);
precision = dt.defaultPrecision;
scale = dt.defaultScale;
displaySize = dt.defaultDisplaySize;
}
this.precision = precision;
this.scale = scale;
this.displaySize = displaySize;
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
} else if (!(o instanceof Column)) {
return false;
}
Column other = (Column) o;
if (table == null || other.table == null ||
name == null || other.name == null) {
return false;
}
if (table != other.table) {
return false;
}
return name.equals(other.name);
}
@Override
public int hashCode() {
if (table == null || name == null) {
return 0;
}
return table.getId() ^ name.hashCode();
}
public Column getClone() {
Column newColumn = new Column(name, type, precision, scale, displaySize);
newColumn.copy(this);
return newColumn;
}
/**
* Convert a value to this column's type.
*
* @param v the value
* @return the value
*/
public Value convert(Value v) {
try {
return v.convertTo(type);
} catch (DbException e) {
if (e.getErrorCode() == ErrorCode.DATA_CONVERSION_ERROR_1) {
String target = (table == null ? "" : table.getName() + ": ") +
getCreateSQL();
throw DbException.get(
ErrorCode.DATA_CONVERSION_ERROR_1,
v.getSQL() + " (" + target + ")");
}
throw e;
}
}
boolean getComputed() {
return isComputed;
}
/**
* Compute the value of this computed column.
*
* @param session the session
* @param row the row
* @return the value
*/
synchronized Value computeValue(Session session, Row row) {
computeTableFilter.setSession(session);
computeTableFilter.set(row);
return defaultExpression.getValue(session);
}
/**
* Set the default value in the form of a computed expression of other
* columns.
*
* @param expression the computed expression
*/
public void setComputedExpression(Expression expression) {
this.isComputed = true;
this.defaultExpression = expression;
}
/**
* Set the table and column id.
*
* @param table the table
* @param columnId the column index
*/
public void setTable(Table table, int columnId) {
this.table = table;
this.columnId = columnId;
}
public Table getTable() {
return table;
}
/**
* Set the default expression.
*
* @param session the session
* @param defaultExpression the default expression
*/
public void setDefaultExpression(Session session,
Expression defaultExpression) {
// also to test that no column names are used
if (defaultExpression != null) {
defaultExpression = defaultExpression.optimize(session);
if (defaultExpression.isConstant()) {
defaultExpression = ValueExpression.get(
defaultExpression.getValue(session));
}
}
this.defaultExpression = defaultExpression;
}
public int getColumnId() {
return columnId;
}
public String getSQL() {
return Parser.quoteIdentifier(name);
}
public String getName() {
return name;
}
public int getType() {
return type;
}
public long getPrecision() {
return precision;
}
public void setPrecision(long p) {
precision = p;
}
public int getDisplaySize() {
return displaySize;
}
public int getScale() {
return scale;
}
public void setNullable(boolean b) {
nullable = b;
}
/**
* Validate the value, convert it if required, and update the sequence value
* if required. If the value is null, the default value (NULL if no default
* is set) is returned. Check constraints are validated as well.
*
* @param session the session
* @param value the value or null
* @return the new or converted value
*/
public Value validateConvertUpdateSequence(Session session, Value value) {
if (value == null) {
if (defaultExpression == null) {
value = ValueNull.INSTANCE;
} else {
synchronized (this) {
value = defaultExpression.getValue(session).convertTo(type);
}
if (primaryKey) {
session.setLastIdentity(value);
}
}
}
Mode mode = session.getDatabase().getMode();
if (value == ValueNull.INSTANCE) {
if (convertNullToDefault) {
synchronized (this) {
value = defaultExpression.getValue(session).convertTo(type);
}
}
if (value == ValueNull.INSTANCE && !nullable) {
if (mode.convertInsertNullToZero) {
DataType dt = DataType.getDataType(type);
if (dt.decimal) {
value = ValueInt.get(0).convertTo(type);
} else if (dt.type == Value.TIMESTAMP) {
value = ValueTimestamp.fromMillis(session.getTransactionStart());
} else if (dt.type == Value.TIME) {
value = ValueTime.fromNanos(0);
} else if (dt.type == Value.DATE) {
value = ValueDate.fromMillis(session.getTransactionStart());
} else {
value = ValueString.get("").convertTo(type);
}
} else {
throw DbException.get(ErrorCode.NULL_NOT_ALLOWED, name);
}
}
}
if (checkConstraint != null) {
resolver.setValue(value);
Value v;
synchronized (this) {
v = checkConstraint.getValue(session);
}
// Both TRUE and NULL are ok
if (Boolean.FALSE.equals(v.getBoolean())) {
throw DbException.get(
ErrorCode.CHECK_CONSTRAINT_VIOLATED_1,
checkConstraint.getSQL());
}
}
value = value.convertScale(mode.convertOnlyToSmallerScale, scale);
if (precision > 0) {
if (!value.checkPrecision(precision)) {
String s = value.getTraceSQL();
if (s.length() > 127) {
s = s.substring(0, 128) + "...";
}
throw DbException.get(ErrorCode.VALUE_TOO_LONG_2,
getCreateSQL(), s + " (" + value.getPrecision() + ")");
}
}
updateSequenceIfRequired(session, value);
return value;
}
private void updateSequenceIfRequired(Session session, Value value) {
if (sequence != null) {
long current = sequence.getCurrentValue();
long inc = sequence.getIncrement();
long now = value.getLong();
boolean update = false;
if (inc > 0 && now > current) {
update = true;
} else if (inc < 0 && now < current) {
update = true;
}
if (update) {
sequence.modify(now + inc, null, null, null);
session.setLastIdentity(ValueLong.get(now));
sequence.flush(session);
}
}
}
/**
* Convert the auto-increment flag to a sequence that is linked with this
* table.
*
* @param session the session
* @param schema the schema where the sequence should be generated
* @param id the object id
* @param temporary true if the sequence is temporary and does not need to
* be stored
*/
public void convertAutoIncrementToSequence(Session session, Schema schema,
int id, boolean temporary) {
if (!autoIncrement) {
DbException.throwInternalError();
}
if ("IDENTITY".equals(originalSQL)) {
originalSQL = "BIGINT";
} else if ("SERIAL".equals(originalSQL)) {
originalSQL = "INT";
}
String sequenceName;
while (true) {
ValueUuid uuid = ValueUuid.getNewRandom();
String s = uuid.getString();
s = s.replace('-', '_').toUpperCase();
sequenceName = "SYSTEM_SEQUENCE_" + s;
if (schema.findSequence(sequenceName) == null) {
break;
}
}
Sequence seq = new Sequence(schema, id, sequenceName, start, increment);
if (temporary) {
seq.setTemporary(true);
} else {
session.getDatabase().addSchemaObject(session, seq);
}
setAutoIncrement(false, 0, 0);
SequenceValue seqValue = new SequenceValue(seq);
setDefaultExpression(session, seqValue);
setSequence(seq);
}
/**
* Prepare all expressions of this column.
*
* @param session the session
*/
public void prepareExpression(Session session) {
if (defaultExpression != null) {
computeTableFilter = new TableFilter(session, table, null, false, null);
defaultExpression.mapColumns(computeTableFilter, 0);
defaultExpression = defaultExpression.optimize(session);
}
}
public String getCreateSQL() {
StringBuilder buff = new StringBuilder();
if (name != null) {
buff.append(Parser.quoteIdentifier(name)).append(' ');
}
if (originalSQL != null) {
buff.append(originalSQL);
} else {
buff.append(DataType.getDataType(type).name);
switch (type) {
case Value.DECIMAL:
buff.append('(').append(precision).append(", ").append(scale).append(')');
break;
case Value.BYTES:
case Value.STRING:
case Value.STRING_IGNORECASE:
case Value.STRING_FIXED:
if (precision < Integer.MAX_VALUE) {
buff.append('(').append(precision).append(')');
}
break;
default:
}
}
if (defaultExpression != null) {
String sql = defaultExpression.getSQL();
if (sql != null) {
if (isComputed) {
buff.append(" AS ").append(sql);
} else if (defaultExpression != null) {
buff.append(" DEFAULT ").append(sql);
}
}
}
if (!nullable) {
buff.append(" NOT NULL");
}
if (convertNullToDefault) {
buff.append(" NULL_TO_DEFAULT");
}
if (sequence != null) {
buff.append(" SEQUENCE ").append(sequence.getSQL());
}
if (selectivity != 0) {
buff.append(" SELECTIVITY ").append(selectivity);
}
if (comment != null) {
buff.append(" COMMENT ").append(StringUtils.quoteStringSQL(comment));
}
if (checkConstraint != null) {
buff.append(" CHECK ").append(checkConstraintSQL);
}
return buff.toString();
}
public boolean isNullable() {
return nullable;
}
public void setOriginalSQL(String original) {
originalSQL = original;
}
public String getOriginalSQL() {
return originalSQL;
}
public Expression getDefaultExpression() {
return defaultExpression;
}
public boolean isAutoIncrement() {
return autoIncrement;
}
/**
* Set the autoincrement flag and related properties of this column.
*
* @param autoInc the new autoincrement flag
* @param start the sequence start value
* @param increment the sequence increment
*/
public void setAutoIncrement(boolean autoInc, long start, long increment) {
this.autoIncrement = autoInc;
this.start = start;
this.increment = increment;
this.nullable = false;
if (autoInc) {
convertNullToDefault = true;
}
}
public void setConvertNullToDefault(boolean convert) {
this.convertNullToDefault = convert;
}
/**
* Rename the column. This method will only set the column name to the new
* value.
*
* @param newName the new column name
*/
public void rename(String newName) {
this.name = newName;
}
public void setSequence(Sequence sequence) {
this.sequence = sequence;
}
public Sequence getSequence() {
return sequence;
}
/**
* Get the selectivity of the column. Selectivity 100 means values are
* unique, 10 means every distinct value appears 10 times on average.
*
* @return the selectivity
*/
public int getSelectivity() {
return selectivity == 0 ? Constants.SELECTIVITY_DEFAULT : selectivity;
}
/**
* Set the new selectivity of a column.
*
* @param selectivity the new value
*/
public void setSelectivity(int selectivity) {
selectivity = selectivity < 0 ? 0 : (selectivity > 100 ? 100 : selectivity);
this.selectivity = selectivity;
}
/**
* Add a check constraint expression to this column. An existing check
* constraint constraint is added using AND.
*
* @param session the session
* @param expr the (additional) constraint
*/
public void addCheckConstraint(Session session, Expression expr) {
if (expr == null) {
return;
}
resolver = new SingleColumnResolver(this);
synchronized (this) {
String oldName = name;
if (name == null) {
name = "VALUE";
}
expr.mapColumns(resolver, 0);
name = oldName;
}
expr = expr.optimize(session);
resolver.setValue(ValueNull.INSTANCE);
// check if the column is mapped
synchronized (this) {
expr.getValue(session);
}
if (checkConstraint == null) {
checkConstraint = expr;
} else {
checkConstraint = new ConditionAndOr(ConditionAndOr.AND, checkConstraint, expr);
}
checkConstraintSQL = getCheckConstraintSQL(session, name);
}
/**
* Remove the check constraint if there is one.
*/
public void removeCheckConstraint() {
checkConstraint = null;
checkConstraintSQL = null;
}
/**
* Get the check constraint expression for this column if set.
*
* @param session the session
* @param asColumnName the column name to use
* @return the constraint expression
*/
public Expression getCheckConstraint(Session session, String asColumnName) {
if (checkConstraint == null) {
return null;
}
Parser parser = new Parser(session);
String sql;
synchronized (this) {
String oldName = name;
name = asColumnName;
sql = checkConstraint.getSQL();
name = oldName;
}
Expression expr = parser.parseExpression(sql);
return expr;
}
String getDefaultSQL() {
return defaultExpression == null ? null : defaultExpression.getSQL();
}
int getPrecisionAsInt() {
return MathUtils.convertLongToInt(precision);
}
DataType getDataType() {
return DataType.getDataType(type);
}
/**
* Get the check constraint SQL snippet.
*
* @param session the session
* @param asColumnName the column name to use
* @return the SQL snippet
*/
String getCheckConstraintSQL(Session session, String asColumnName) {
Expression constraint = getCheckConstraint(session, asColumnName);
return constraint == null ? "" : constraint.getSQL();
}
public void setComment(String comment) {
this.comment = comment;
}
public String getComment() {
return comment;
}
public void setPrimaryKey(boolean primaryKey) {
this.primaryKey = primaryKey;
}
/**
* Visit the default expression, the check constraint, and the sequence (if
* any).
*
* @param visitor the visitor
* @return true if every visited expression returned true, or if there are
* no expressions
*/
boolean isEverything(ExpressionVisitor visitor) {
if (visitor.getType() == ExpressionVisitor.GET_DEPENDENCIES) {
if (sequence != null) {
visitor.getDependencies().add(sequence);
}
}
if (defaultExpression != null && !defaultExpression.isEverything(visitor)) {
return false;
}
if (checkConstraint != null && !checkConstraint.isEverything(visitor)) {
return false;
}
return true;
}
public boolean isPrimaryKey() {
return primaryKey;
}
@Override
public String toString() {
return name;
}
/**
* Check whether the new column is of the same type and not more restricted
* than this column.
*
* @param newColumn the new (target) column
* @return true if the new column is compatible
*/
public boolean isWideningConversion(Column newColumn) {
if (type != newColumn.type) {
return false;
}
if (precision > newColumn.precision) {
return false;
}
if (scale != newColumn.scale) {
return false;
}
if (nullable && !newColumn.nullable) {
return false;
}
if (convertNullToDefault != newColumn.convertNullToDefault) {
return false;
}
if (primaryKey != newColumn.primaryKey) {
return false;
}
if (autoIncrement || newColumn.autoIncrement) {
return false;
}
if (checkConstraint != null || newColumn.checkConstraint != null) {
return false;
}
if (convertNullToDefault || newColumn.convertNullToDefault) {
return false;
}
if (defaultExpression != null || newColumn.defaultExpression != null) {
return false;
}
if (isComputed || newColumn.isComputed) {
return false;
}
return true;
}
/**
* Copy the data of the source column into the current column.
*
* @param source the source column
*/
public void copy(Column source) {
checkConstraint = source.checkConstraint;
checkConstraintSQL = source.checkConstraintSQL;
displaySize = source.displaySize;
name = source.name;
precision = source.precision;
scale = source.scale;
// table is not set
// columnId is not set
nullable = source.nullable;
defaultExpression = source.defaultExpression;
originalSQL = source.originalSQL;
// autoIncrement, start, increment is not set
convertNullToDefault = source.convertNullToDefault;
sequence = source.sequence;
comment = source.comment;
computeTableFilter = source.computeTableFilter;
isComputed = source.isComputed;
selectivity = source.selectivity;
primaryKey = source.primaryKey;
}
}
| |
/**
* Copyright 2017 Equipment & Tool Institute
*/
package net.soliddesign.iumpr.controllers;
import static net.soliddesign.iumpr.controllers.Controller.Ending.ABORTED;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import javax.swing.JOptionPane;
import net.soliddesign.iumpr.modules.BannerModule;
import net.soliddesign.iumpr.modules.BannerModule.Type;
import net.soliddesign.iumpr.modules.ComparisonModule;
import net.soliddesign.iumpr.modules.DTCModule;
import net.soliddesign.iumpr.modules.DateTimeModule;
import net.soliddesign.iumpr.modules.DiagnosticReadinessModule;
import net.soliddesign.iumpr.modules.EngineSpeedModule;
import net.soliddesign.iumpr.modules.VehicleInformationModule;
/**
* The {@link Controller} that Collects the Vehicle Information and write the
* Data Plate to the report. This corresponds to Function B in the process
* document
*
* @author Matt Gumbel (matt@soliddesign.net)
*
*/
public class DataPlateController extends Controller {
private final DTCModule dtcModule;
/**
* Constructor
*/
public DataPlateController() {
this(Executors.newSingleThreadScheduledExecutor(), new EngineSpeedModule(), new BannerModule(Type.DATA_PLATE),
new DateTimeModule(), new VehicleInformationModule(), new DiagnosticReadinessModule(), new DTCModule(),
new ComparisonModule());
}
/**
* Constructor exposed for testing
*
* @param executor
* the {@link ScheduledExecutorService}
* @param engineSpeedModule
* the {@link EngineSpeedModule}
* @param bannerModule
* the {@link BannerModule}
* @param dateTimeModule
* the {@link DateTimeModule}
* @param vehicleInformationModule
* the {@link VehicleInformationModule}
* @param diagnosticReadinessModule
* the {@link DiagnosticReadinessModule}
* @param dtcModule
* the {@link DTCModule}
* @param comparisonModule
* the {@link ComparisonModule}
*/
public DataPlateController(ScheduledExecutorService executor, EngineSpeedModule engineSpeedModule,
BannerModule bannerModule, DateTimeModule dateTimeModule, VehicleInformationModule vehicleInformationModule,
DiagnosticReadinessModule diagnosticReadinessModule, DTCModule dtcModule,
ComparisonModule comparisonModule) {
super(executor, engineSpeedModule, bannerModule, dateTimeModule, vehicleInformationModule,
diagnosticReadinessModule, comparisonModule);
this.dtcModule = dtcModule;
}
@Override
protected int getTotalSteps() {
return 24 + 4; // +4 is for the compareToVehicle;
}
@Override
protected void run() throws Throwable {
dtcModule.setJ1939(getJ1939());
// Step 1 is handled in the super
// Step 2 is done in the UI
// Step 3 is handled by the adapter
// Step 4 we are assuming SA0 is Function0
// Steps 5 - 11 is done by the the super
// Steps 12 & 13
incrementProgress("Generating Header");
getBannerModule().reportHeader(getListener());
// Step 14A - File name and access
addBlankLineToReport();
incrementProgress("Gathering File Information");
getReportFileModule().reportFileInformation(getListener());
// Step 14B - Connection Speed
addBlankLineToReport();
incrementProgress("Reading Connection Speed");
getVehicleInformationModule().reportConnectionSpeed(getListener());
// Steps 15-17 - Address Claim
addBlankLineToReport();
incrementProgress("Address Claim");
getVehicleInformationModule().reportAddressClaim(getListener());
// Steps 18/19
addBlankLineToReport();
incrementProgress("Requesting VIN");
getVehicleInformationModule().reportVin(getListener());
// Steps 18/19
addBlankLineToReport();
incrementProgress("Requesting Calibration Information");
getVehicleInformationModule().reportCalibrationInformation(getListener());
// Steps 20/21
addBlankLineToReport();
incrementProgress("Requesting DM21");
getDiagnosticReadinessModule().reportDM21(getListener(), getReportFileModule().getMinutesSinceCodeClear());
// Steps 22 (23 is missing?) 24
addBlankLineToReport();
incrementProgress("Requesting HD OBD Modules");
List<Integer> obdModules = getDiagnosticReadinessModule().getOBDModules(getListener());
if (obdModules.isEmpty()) {
getListener().onMessage("No HD OBD Modules were detected.", "No HD OBD Modules",
JOptionPane.ERROR_MESSAGE);
setEnding(ABORTED);
}
// Step 25
addBlankLineToReport();
incrementProgress("Requesting Component Identification");
getVehicleInformationModule().reportComponentIdentification(getListener());
// Step 26 we already did in Step 14
// Steps 27 & 28 we are skipping as this was handled by the UI
// Steps 29-33
addBlankLineToReport();
if (getReportFileModule().isNewFile()) {
if (!getEngineSpeedModule().isEngineNotRunning()) {
getListener().onUrgentMessage("Please turn the Engine OFF with Key ON.", "Adjust Key Switch",
JOptionPane.WARNING_MESSAGE);
while (!getEngineSpeedModule().isEngineNotRunning()) {
updateProgress("Waiting for Key ON, Engine OFF...");
Thread.sleep(500);
}
}
incrementProgress("Clearing Active Codes");
boolean dm11Response = dtcModule.reportDM11(getListener(), obdModules);
if (!dm11Response) {
getListener().onMessage("The Diagnostic Trouble Codes were unable to be cleared.",
"Clearing DTCs Failed", JOptionPane.ERROR_MESSAGE);
setEnding(ABORTED);
}
} else {
incrementProgress("Existing file; Codes Not Cleared");
getListener().onResult("Existing file; Codes Not Cleared");
}
// Step 34
addBlankLineToReport();
incrementProgress("Requesting DM5");
boolean dm5Response = getDiagnosticReadinessModule().reportDM5(getListener());
if (!dm5Response) {
// Step 38 Abort if no response
getListener().onMessage("There were no DM5s received.", "Communications Error",
JOptionPane.ERROR_MESSAGE);
setEnding(ABORTED);
}
// Step 35
addBlankLineToReport();
incrementProgress("Requesting DM26");
boolean dm26Response = getDiagnosticReadinessModule().reportDM26(getListener());
if (!dm26Response) {
// Step 38 Abort if no response
getListener().onMessage("There were no DM26s received.", "Communications Error",
JOptionPane.ERROR_MESSAGE);
setEnding(ABORTED);
}
// Step 36
addBlankLineToReport();
incrementProgress("Requesting DM20");
boolean dm20Response = getDiagnosticReadinessModule().reportDM20(getListener());
if (!dm20Response) {
// Step 38 Abort if no response
getListener().onMessage("There were no DM20s received.", "Communications Error",
JOptionPane.ERROR_MESSAGE);
setEnding(ABORTED);
}
// Step 37 is handled automatically by our design
// Step 39 Save DM5/DM20 - by including them in the report, they are
boolean dtcsPresent = false;
// Step 40
addBlankLineToReport();
incrementProgress("Requesting DM6");
dtcsPresent |= dtcModule.reportDM6(getListener());
// Step 40
addBlankLineToReport();
incrementProgress("Requesting DM12");
dtcsPresent |= dtcModule.reportDM12(getListener());
// Step 40
addBlankLineToReport();
incrementProgress("Requesting DM23");
dtcsPresent |= dtcModule.reportDM23(getListener());
// Step 40
addBlankLineToReport();
incrementProgress("Requesting DM28");
dtcsPresent |= dtcModule.reportDM28(getListener());
// Step 41-42 - Notify the user but complete report
if (dtcsPresent) {
getListener().onMessage("There were Diagnostic Trouble Codes reported.", "DTCs Exist",
JOptionPane.WARNING_MESSAGE);
}
// Step 43
addBlankLineToReport();
incrementProgress("Requesting DM21");
getDiagnosticReadinessModule().reportDM21(getListener(), getReportFileModule().getMinutesSinceCodeClear());
// Step 44
addBlankLineToReport();
incrementProgress("Reading Vehicle Distance");
getVehicleInformationModule().reportVehicleDistance(getListener());
// Step 45
addBlankLineToReport();
incrementProgress("Requesting Engine Hours");
getVehicleInformationModule().reportEngineHours(getListener());
// Step 46
addBlankLineToReport();
incrementProgress("Generating Quality Information");
getReportFileModule().reportAndResetCommunicationQuality(getListener());
// Step 47
getReportFileModule().setNewFile(false);
// Step 48 is handled by the super
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test.functional;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.admin.TableOperations;
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.metadata.MetadataTable;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.minicluster.ServerType;
import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
import org.apache.accumulo.minicluster.impl.ProcessReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterators;
public class DurabilityIT extends ConfigurableMacIT {
private static final Logger log = LoggerFactory.getLogger(DurabilityIT.class);
@Override
public void configure(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSite) {
hadoopCoreSite.set("fs.file.impl", RawLocalFileSystem.class.getName());
cfg.setProperty(Property.INSTANCE_ZK_TIMEOUT, "15s");
cfg.setNumTservers(1);
}
static final long N = 100000;
private String[] init() throws Exception {
String[] tableNames = getUniqueNames(4);
Connector c = getConnector();
TableOperations tableOps = c.tableOperations();
createTable(tableNames[0]);
createTable(tableNames[1]);
createTable(tableNames[2]);
createTable(tableNames[3]);
// default is sync
tableOps.setProperty(tableNames[1], Property.TABLE_DURABILITY.getKey(), "flush");
tableOps.setProperty(tableNames[2], Property.TABLE_DURABILITY.getKey(), "log");
tableOps.setProperty(tableNames[3], Property.TABLE_DURABILITY.getKey(), "none");
return tableNames;
}
private void cleanup(String[] tableNames) throws Exception {
Connector c = getConnector();
for (String tableName : tableNames) {
c.tableOperations().delete(tableName);
}
}
private void createTable(String tableName) throws Exception {
TableOperations tableOps = getConnector().tableOperations();
tableOps.create(tableName);
}
@Test(timeout = 2 * 60 * 1000)
public void testWriteSpeed() throws Exception {
TableOperations tableOps = getConnector().tableOperations();
String tableNames[] = init();
// write some gunk, delete the table to keep that table from messing with the performance numbers of successive calls
// sync
long t0 = writeSome(tableNames[0], N);
tableOps.delete(tableNames[0]);
// flush
long t1 = writeSome(tableNames[1], N);
tableOps.delete(tableNames[1]);
// log
long t2 = writeSome(tableNames[2], N);
tableOps.delete(tableNames[2]);
// none
long t3 = writeSome(tableNames[3], N);
tableOps.delete(tableNames[3]);
System.out.println(String.format("sync %d flush %d log %d none %d", t0, t1, t2, t3));
assertTrue("flush should be faster than sync", t0 > t1);
assertTrue("log should be faster than flush", t1 > t2);
assertTrue("no durability should be faster than log", t2 > t3);
}
@Test(timeout = 4 * 60 * 1000)
public void testSync() throws Exception {
String tableNames[] = init();
// sync table should lose nothing
writeSome(tableNames[0], N);
restartTServer();
assertEquals(N, readSome(tableNames[0]));
cleanup(tableNames);
}
@Test(timeout = 4 * 60 * 1000)
public void testFlush() throws Exception {
String tableNames[] = init();
// flush table won't lose anything since we're not losing power/dfs
writeSome(tableNames[1], N);
restartTServer();
assertEquals(N, readSome(tableNames[1]));
cleanup(tableNames);
}
@Test(timeout = 4 * 60 * 1000)
public void testLog() throws Exception {
String tableNames[] = init();
// we're probably going to lose something the the log setting
writeSome(tableNames[2], N);
restartTServer();
long numResults = readSome(tableNames[2]);
assertTrue("Expected " + N + " >= " + numResults, N >= numResults);
cleanup(tableNames);
}
@Test(timeout = 4 * 60 * 1000)
public void testNone() throws Exception {
String tableNames[] = init();
// probably won't get any data back without logging
writeSome(tableNames[3], N);
restartTServer();
long numResults = readSome(tableNames[3]);
assertTrue("Expected " + N + " >= " + numResults, N >= numResults);
cleanup(tableNames);
}
@Test(timeout = 4 * 60 * 1000)
public void testIncreaseDurability() throws Exception {
Connector c = getConnector();
String tableName = getUniqueNames(1)[0];
c.tableOperations().create(tableName);
c.tableOperations().setProperty(tableName, Property.TABLE_DURABILITY.getKey(), "none");
writeSome(tableName, N);
restartTServer();
long numResults = readSome(tableName);
assertTrue("Expected " + N + " >= " + numResults, N >= numResults);
c.tableOperations().setProperty(tableName, Property.TABLE_DURABILITY.getKey(), "sync");
writeSome(tableName, N);
restartTServer();
assertTrue(N == readSome(tableName));
}
private static Map<String,String> map(Iterable<Entry<String,String>> entries) {
Map<String,String> result = new HashMap<>();
for (Entry<String,String> entry : entries) {
result.put(entry.getKey(), entry.getValue());
}
return result;
}
@Test(timeout = 4 * 60 * 1000)
public void testMetaDurability() throws Exception {
Connector c = getConnector();
String tableName = getUniqueNames(1)[0];
c.instanceOperations().setProperty(Property.TABLE_DURABILITY.getKey(), "none");
Map<String,String> props = map(c.tableOperations().getProperties(MetadataTable.NAME));
assertEquals("sync", props.get(Property.TABLE_DURABILITY.getKey()));
c.tableOperations().create(tableName);
props = map(c.tableOperations().getProperties(tableName));
assertEquals("none", props.get(Property.TABLE_DURABILITY.getKey()));
restartTServer();
assertTrue(c.tableOperations().exists(tableName));
}
private long readSome(String table) throws Exception {
return Iterators.size(getConnector().createScanner(table, Authorizations.EMPTY).iterator());
}
private void restartTServer() throws Exception {
for (ProcessReference proc : cluster.getProcesses().get(ServerType.TABLET_SERVER)) {
cluster.killProcess(ServerType.TABLET_SERVER, proc);
}
cluster.start();
}
private long writeSome(String table, long count) throws Exception {
int iterations = 5;
long[] attempts = new long[iterations];
for (int attempt = 0; attempt < iterations; attempt++) {
long now = System.currentTimeMillis();
Connector c = getConnector();
BatchWriter bw = c.createBatchWriter(table, null);
for (int i = 1; i < count + 1; i++) {
Mutation m = new Mutation("" + i);
m.put("", "", "");
bw.addMutation(m);
if (i % (Math.max(1, count / 100)) == 0) {
bw.flush();
}
}
bw.close();
attempts[attempt] = System.currentTimeMillis() - now;
}
Arrays.sort(attempts);
log.info("Attempt durations: {}", Arrays.toString(attempts));
// Return the median duration
return attempts[2];
}
}
| |
/*
* Licensed to waterwave under one or more contributor
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package waterwave.net.nio;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import shui.common.buffer.BufferPoolNIO;
import waterwave.net.nio.define.NioClientDataDealer;
import waterwave.net.nio.define.NioDataDealerFactory;
public class NioClient extends NioService {
final static int TIMEOUT = 3000 * 1000;
final static int SO_RCVBUF = 32 * 1024;
final static int SO_SNDBUF = 32 * 1024;
DispatcherReader d;
protected NioDataDealerFactory nioDataDealerFactory;
private BufferPoolNIO bp;
public NioClient(ExecutorService es, BufferPoolNIO bp, NioDataDealerFactory nioDataDealerFactory) throws IOException {
// ExecutorService channelWorkers = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors(), Executors.defaultThreadFactory());
this.nioDataDealerFactory = nioDataDealerFactory;
this.bp = bp;
d = new DispatcherReader(es);
d.start();
}
public NioClientChannel createConnect(InetAddress ip, int port, NioClientDataDealer dealer) throws IOException {
SocketChannel sc = connect0(ip, port);
if(dealer == null) {
dealer = nioDataDealerFactory.getNioClientDataDealer();
}
NioClientChannel c = new NioClientChannel(sc, bp, dealer);
return c;
}
public NioClientChannel connect(final NioClientChannel nc) throws IOException {
SocketChannel sc = nc.sc;
// //log.log(1 "BioClient: connect finish", s);
d.register(sc, SelectionKey.OP_CONNECT, nc);
return nc;
}
private SocketChannel connect0(InetAddress ip, int port) throws IOException {
SocketChannel s = SocketChannel.open();
s.configureBlocking(false);
s.connect(new InetSocketAddress(ip, port));
return s;
}
public class DispatcherReader extends Dispatcher {
protected Selector s;
long reactCount;
private final ExecutorService es;
public DispatcherReader(ExecutorService es) throws IOException {
this.setName("clientDisp");
this.es = es;
s = Selector.open();
}
private Object gate = new Object();
public void register(SocketChannel sc, int ops, Object o) throws ClosedChannelException {
synchronized (gate) {
//log.log(1, "wakeup!!");
s.wakeup();
sc.register(s, ops, o);
}
}
private void registerNosync(SocketChannel sc, int ops, Object o) throws ClosedChannelException {
sc.register(s, ops, o);
}
private void read(NioClientChannel cc) {
ByteBuffer b = bp.allocate();
int r = cc.read(b);
//
if(r > 0 ){
es.submit(new ReadHandler(cc, b, r));
}
}
private void write(NioClientChannel cc) {
ByteBuffer b = bp.allocate();
cc.write(b);
}
private void connect(NioClientChannel attr) throws IOException {
SocketChannel sc = attr.sc;
//
sc.finishConnect();
setSocket(sc);
registerNosync(sc, SelectionKey.OP_READ, attr);
}
@Override
public void run() {
log.log(5, "Client DispatcherSingle start");
final Selector selector = this.s;
for (;;) {
// ++reactCount;
try {
dispatch(selector);
} catch (Throwable e) {
e.printStackTrace();
log.log(7, "Reader:", e);
}
}
}
private void dispatch(Selector selector) {
++reactCount;
try {
// selector.select(20000L);
selector.select();
////log.log(1, "--->client selector:" + selector.keys());
// register(selector);
Set<SelectionKey> keys = selector.selectedKeys();
//log.log(1, "--->client keys:" + keys);
//TODO
//showKeys(keys);
try {
Iterator<SelectionKey> it = keys.iterator();
for (; it.hasNext();) {
SelectionKey key = it.next();
it.remove();
Object conn = key.attachment();
if (conn != null && key.isValid()) {
// TODO
//log.log(1, "--->conn:" + conn);
int readyOps = key.readyOps();
if ((readyOps & SelectionKey.OP_CONNECT) != 0) {
// keys.remove(key);
connect((NioClientChannel) conn);
} else if ((readyOps & SelectionKey.OP_READ) != 0) {
// keys.remove(key);
read((NioClientChannel) conn);
} else if ((readyOps & SelectionKey.OP_WRITE) != 0) {
// keys.remove(key);
write((NioClientChannel) conn);
} else {
key.cancel();
}
} else {
key.cancel();
}
}
} finally {
keys.clear();
}
//
synchronized (gate) {
}
} catch (Throwable e) {
e.printStackTrace();
log.log(7, "Reader:", e);
}
}
}
final static class ReadHandler extends NioHandler implements Runnable {
private final NioClientChannel nsc;
private final ByteBuffer b;
private final int r;
public ReadHandler(NioClientChannel nsc, ByteBuffer b, int r) {
super();
this.nsc = nsc;
this.b = b;
this.r = r;
}
@Override
public void run() {
//
//
//log.log(1, "client read start");
nsc.read(b, r);
}
}
private final static void setSocket(SocketChannel channel) throws SocketException {
Socket socket = channel.socket();
socket.setReceiveBufferSize(SO_RCVBUF);
socket.setSendBufferSize(SO_SNDBUF);
socket.setTcpNoDelay(true);
socket.setKeepAlive(true);
}
public static void main(String[] args) {
}
@Override
public void init(Properties pp) {
// TODO Auto-generated method stub
}
@Override
public void onExit() {
// TODO Auto-generated method stub
}
@Override
public void onTime() {
// TODO Auto-generated method stub
}
}
| |
/*================================================================================
Copyright (c) 2013 Steve Jin. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of copyright holders nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
* @author Steve Jin (http://www.doublecloud.org)
* @version 5.1
*/
@SuppressWarnings("all")
public class VirtualMachineConfigSpec extends DynamicData {
public String changeVersion;
public String name;
public String version;
public String uuid;
public String instanceUuid;
public long[] npivNodeWorldWideName;
public long[] npivPortWorldWideName;
public String npivWorldWideNameType;
public Short npivDesiredNodeWwns;
public Short npivDesiredPortWwns;
public Boolean npivTemporaryDisabled;
public Boolean npivOnNonRdmDisks;
public String npivWorldWideNameOp;
public String locationId;
public String guestId;
public String alternateGuestName;
public String annotation;
public VirtualMachineFileInfo files;
public ToolsConfigInfo tools;
public VirtualMachineFlagInfo flags;
public VirtualMachineConsolePreferences consolePreferences;
public VirtualMachineDefaultPowerOpInfo powerOpInfo;
public Integer numCPUs;
public Integer numCoresPerSocket;
public Long memoryMB;
public Boolean memoryHotAddEnabled;
public Boolean cpuHotAddEnabled;
public Boolean cpuHotRemoveEnabled;
public Boolean virtualICH7MPresent;
public Boolean virtualSMCPresent;
public VirtualDeviceConfigSpec[] deviceChange;
public ResourceAllocationInfo cpuAllocation;
public ResourceAllocationInfo memoryAllocation;
public LatencySensitivity latencySensitivity;
public VirtualMachineAffinityInfo cpuAffinity;
public VirtualMachineAffinityInfo memoryAffinity;
public VirtualMachineNetworkShaperInfo networkShaper;
public VirtualMachineCpuIdInfoSpec[] cpuFeatureMask;
public OptionValue[] extraConfig;
public String swapPlacement;
public VirtualMachineBootOptions bootOptions;
public VmConfigSpec vAppConfig;
public FaultToleranceConfigInfo ftInfo;
public Boolean vAppConfigRemoved;
public Boolean vAssertsEnabled;
public Boolean changeTrackingEnabled;
public String firmware;
public Integer maxMksConnections;
public Boolean guestAutoLockEnabled;
public ManagedByInfo managedBy;
public Boolean memoryReservationLockedToMax;
public Boolean nestedHVEnabled;
public Boolean vPMCEnabled;
public ScheduledHardwareUpgradeInfo scheduledHardwareUpgradeInfo;
public VirtualMachineProfileSpec[] vmProfile;
public String getChangeVersion() {
return this.changeVersion;
}
public String getName() {
return this.name;
}
public String getVersion() {
return this.version;
}
public String getUuid() {
return this.uuid;
}
public String getInstanceUuid() {
return this.instanceUuid;
}
public long[] getNpivNodeWorldWideName() {
return this.npivNodeWorldWideName;
}
public long[] getNpivPortWorldWideName() {
return this.npivPortWorldWideName;
}
public String getNpivWorldWideNameType() {
return this.npivWorldWideNameType;
}
public Short getNpivDesiredNodeWwns() {
return this.npivDesiredNodeWwns;
}
public Short getNpivDesiredPortWwns() {
return this.npivDesiredPortWwns;
}
public Boolean getNpivTemporaryDisabled() {
return this.npivTemporaryDisabled;
}
public Boolean getNpivOnNonRdmDisks() {
return this.npivOnNonRdmDisks;
}
public String getNpivWorldWideNameOp() {
return this.npivWorldWideNameOp;
}
public String getLocationId() {
return this.locationId;
}
public String getGuestId() {
return this.guestId;
}
public String getAlternateGuestName() {
return this.alternateGuestName;
}
public String getAnnotation() {
return this.annotation;
}
public VirtualMachineFileInfo getFiles() {
return this.files;
}
public ToolsConfigInfo getTools() {
return this.tools;
}
public VirtualMachineFlagInfo getFlags() {
return this.flags;
}
public VirtualMachineConsolePreferences getConsolePreferences() {
return this.consolePreferences;
}
public VirtualMachineDefaultPowerOpInfo getPowerOpInfo() {
return this.powerOpInfo;
}
public Integer getNumCPUs() {
return this.numCPUs;
}
public Integer getNumCoresPerSocket() {
return this.numCoresPerSocket;
}
public Long getMemoryMB() {
return this.memoryMB;
}
public Boolean getMemoryHotAddEnabled() {
return this.memoryHotAddEnabled;
}
public Boolean getCpuHotAddEnabled() {
return this.cpuHotAddEnabled;
}
public Boolean getCpuHotRemoveEnabled() {
return this.cpuHotRemoveEnabled;
}
public Boolean getVirtualICH7MPresent() {
return this.virtualICH7MPresent;
}
public Boolean getVirtualSMCPresent() {
return this.virtualSMCPresent;
}
public VirtualDeviceConfigSpec[] getDeviceChange() {
return this.deviceChange;
}
public ResourceAllocationInfo getCpuAllocation() {
return this.cpuAllocation;
}
public ResourceAllocationInfo getMemoryAllocation() {
return this.memoryAllocation;
}
public LatencySensitivity getLatencySensitivity() {
return this.latencySensitivity;
}
public VirtualMachineAffinityInfo getCpuAffinity() {
return this.cpuAffinity;
}
public VirtualMachineAffinityInfo getMemoryAffinity() {
return this.memoryAffinity;
}
public VirtualMachineNetworkShaperInfo getNetworkShaper() {
return this.networkShaper;
}
public VirtualMachineCpuIdInfoSpec[] getCpuFeatureMask() {
return this.cpuFeatureMask;
}
public OptionValue[] getExtraConfig() {
return this.extraConfig;
}
public String getSwapPlacement() {
return this.swapPlacement;
}
public VirtualMachineBootOptions getBootOptions() {
return this.bootOptions;
}
public VmConfigSpec getVAppConfig() {
return this.vAppConfig;
}
public FaultToleranceConfigInfo getFtInfo() {
return this.ftInfo;
}
public Boolean getVAppConfigRemoved() {
return this.vAppConfigRemoved;
}
public Boolean getVAssertsEnabled() {
return this.vAssertsEnabled;
}
public Boolean getChangeTrackingEnabled() {
return this.changeTrackingEnabled;
}
public String getFirmware() {
return this.firmware;
}
public Integer getMaxMksConnections() {
return this.maxMksConnections;
}
public Boolean getGuestAutoLockEnabled() {
return this.guestAutoLockEnabled;
}
public ManagedByInfo getManagedBy() {
return this.managedBy;
}
public Boolean getMemoryReservationLockedToMax() {
return this.memoryReservationLockedToMax;
}
public Boolean getNestedHVEnabled() {
return this.nestedHVEnabled;
}
public Boolean getVPMCEnabled() {
return this.vPMCEnabled;
}
public ScheduledHardwareUpgradeInfo getScheduledHardwareUpgradeInfo() {
return this.scheduledHardwareUpgradeInfo;
}
public VirtualMachineProfileSpec[] getVmProfile() {
return this.vmProfile;
}
public void setChangeVersion(String changeVersion) {
this.changeVersion=changeVersion;
}
public void setName(String name) {
this.name=name;
}
public void setVersion(String version) {
this.version=version;
}
public void setUuid(String uuid) {
this.uuid=uuid;
}
public void setInstanceUuid(String instanceUuid) {
this.instanceUuid=instanceUuid;
}
public void setNpivNodeWorldWideName(long[] npivNodeWorldWideName) {
this.npivNodeWorldWideName=npivNodeWorldWideName;
}
public void setNpivPortWorldWideName(long[] npivPortWorldWideName) {
this.npivPortWorldWideName=npivPortWorldWideName;
}
public void setNpivWorldWideNameType(String npivWorldWideNameType) {
this.npivWorldWideNameType=npivWorldWideNameType;
}
public void setNpivDesiredNodeWwns(Short npivDesiredNodeWwns) {
this.npivDesiredNodeWwns=npivDesiredNodeWwns;
}
public void setNpivDesiredPortWwns(Short npivDesiredPortWwns) {
this.npivDesiredPortWwns=npivDesiredPortWwns;
}
public void setNpivTemporaryDisabled(Boolean npivTemporaryDisabled) {
this.npivTemporaryDisabled=npivTemporaryDisabled;
}
public void setNpivOnNonRdmDisks(Boolean npivOnNonRdmDisks) {
this.npivOnNonRdmDisks=npivOnNonRdmDisks;
}
public void setNpivWorldWideNameOp(String npivWorldWideNameOp) {
this.npivWorldWideNameOp=npivWorldWideNameOp;
}
public void setLocationId(String locationId) {
this.locationId=locationId;
}
public void setGuestId(String guestId) {
this.guestId=guestId;
}
public void setAlternateGuestName(String alternateGuestName) {
this.alternateGuestName=alternateGuestName;
}
public void setAnnotation(String annotation) {
this.annotation=annotation;
}
public void setFiles(VirtualMachineFileInfo files) {
this.files=files;
}
public void setTools(ToolsConfigInfo tools) {
this.tools=tools;
}
public void setFlags(VirtualMachineFlagInfo flags) {
this.flags=flags;
}
public void setConsolePreferences(VirtualMachineConsolePreferences consolePreferences) {
this.consolePreferences=consolePreferences;
}
public void setPowerOpInfo(VirtualMachineDefaultPowerOpInfo powerOpInfo) {
this.powerOpInfo=powerOpInfo;
}
public void setNumCPUs(Integer numCPUs) {
this.numCPUs=numCPUs;
}
public void setNumCoresPerSocket(Integer numCoresPerSocket) {
this.numCoresPerSocket=numCoresPerSocket;
}
public void setMemoryMB(Long memoryMB) {
this.memoryMB=memoryMB;
}
public void setMemoryHotAddEnabled(Boolean memoryHotAddEnabled) {
this.memoryHotAddEnabled=memoryHotAddEnabled;
}
public void setCpuHotAddEnabled(Boolean cpuHotAddEnabled) {
this.cpuHotAddEnabled=cpuHotAddEnabled;
}
public void setCpuHotRemoveEnabled(Boolean cpuHotRemoveEnabled) {
this.cpuHotRemoveEnabled=cpuHotRemoveEnabled;
}
public void setVirtualICH7MPresent(Boolean virtualICH7MPresent) {
this.virtualICH7MPresent=virtualICH7MPresent;
}
public void setVirtualSMCPresent(Boolean virtualSMCPresent) {
this.virtualSMCPresent=virtualSMCPresent;
}
public void setDeviceChange(VirtualDeviceConfigSpec[] deviceChange) {
this.deviceChange=deviceChange;
}
public void setCpuAllocation(ResourceAllocationInfo cpuAllocation) {
this.cpuAllocation=cpuAllocation;
}
public void setMemoryAllocation(ResourceAllocationInfo memoryAllocation) {
this.memoryAllocation=memoryAllocation;
}
public void setLatencySensitivity(LatencySensitivity latencySensitivity) {
this.latencySensitivity=latencySensitivity;
}
public void setCpuAffinity(VirtualMachineAffinityInfo cpuAffinity) {
this.cpuAffinity=cpuAffinity;
}
public void setMemoryAffinity(VirtualMachineAffinityInfo memoryAffinity) {
this.memoryAffinity=memoryAffinity;
}
public void setNetworkShaper(VirtualMachineNetworkShaperInfo networkShaper) {
this.networkShaper=networkShaper;
}
public void setCpuFeatureMask(VirtualMachineCpuIdInfoSpec[] cpuFeatureMask) {
this.cpuFeatureMask=cpuFeatureMask;
}
public void setExtraConfig(OptionValue[] extraConfig) {
this.extraConfig=extraConfig;
}
public void setSwapPlacement(String swapPlacement) {
this.swapPlacement=swapPlacement;
}
public void setBootOptions(VirtualMachineBootOptions bootOptions) {
this.bootOptions=bootOptions;
}
public void setVAppConfig(VmConfigSpec vAppConfig) {
this.vAppConfig=vAppConfig;
}
public void setFtInfo(FaultToleranceConfigInfo ftInfo) {
this.ftInfo=ftInfo;
}
public void setVAppConfigRemoved(Boolean vAppConfigRemoved) {
this.vAppConfigRemoved=vAppConfigRemoved;
}
public void setVAssertsEnabled(Boolean vAssertsEnabled) {
this.vAssertsEnabled=vAssertsEnabled;
}
public void setChangeTrackingEnabled(Boolean changeTrackingEnabled) {
this.changeTrackingEnabled=changeTrackingEnabled;
}
public void setFirmware(String firmware) {
this.firmware=firmware;
}
public void setMaxMksConnections(Integer maxMksConnections) {
this.maxMksConnections=maxMksConnections;
}
public void setGuestAutoLockEnabled(Boolean guestAutoLockEnabled) {
this.guestAutoLockEnabled=guestAutoLockEnabled;
}
public void setManagedBy(ManagedByInfo managedBy) {
this.managedBy=managedBy;
}
public void setMemoryReservationLockedToMax(Boolean memoryReservationLockedToMax) {
this.memoryReservationLockedToMax=memoryReservationLockedToMax;
}
public void setNestedHVEnabled(Boolean nestedHVEnabled) {
this.nestedHVEnabled=nestedHVEnabled;
}
public void setVPMCEnabled(Boolean vPMCEnabled) {
this.vPMCEnabled=vPMCEnabled;
}
public void setScheduledHardwareUpgradeInfo(ScheduledHardwareUpgradeInfo scheduledHardwareUpgradeInfo) {
this.scheduledHardwareUpgradeInfo=scheduledHardwareUpgradeInfo;
}
public void setVmProfile(VirtualMachineProfileSpec[] vmProfile) {
this.vmProfile=vmProfile;
}
}
| |
/*
* Copyright (c) 2002-2012, the original author or authors.
*
* This software is distributable under the BSD license. See the terms of the
* BSD license in the documentation provided with this software.
*
* http://www.opensource.org/licenses/bsd-license.php
*/
package scala.tools.jline.internal;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CoderResult;
import java.nio.charset.CodingErrorAction;
import java.nio.charset.MalformedInputException;
import java.nio.charset.UnmappableCharacterException;
/**
*
* NOTE for JLine: the default InputStreamReader that comes from the JRE
* usually read more bytes than needed from the input stream, which
* is not usable in a character per character model used in the console.
* We thus use the harmony code which only reads the minimal number of bytes,
* with a modification to ensure we can read larger characters (UTF-16 has
* up to 4 bytes, and UTF-32, rare as it is, may have up to 8).
*/
/**
* A class for turning a byte stream into a character stream. Data read from the
* source input stream is converted into characters by either a default or a
* provided character converter. The default encoding is taken from the
* "file.encoding" system property. {@code InputStreamReader} contains a buffer
* of bytes read from the source stream and converts these into characters as
* needed. The buffer size is 8K.
*
* @see OutputStreamWriter
*/
public class InputStreamReader extends Reader {
private InputStream in;
private static final int BUFFER_SIZE = 8192;
private boolean endOfInput = false;
String encoding;
CharsetDecoder decoder;
ByteBuffer bytes = ByteBuffer.allocate(BUFFER_SIZE);
/**
* Constructs a new {@code InputStreamReader} on the {@link InputStream}
* {@code in}. This constructor sets the character converter to the encoding
* specified in the "file.encoding" property and falls back to ISO 8859_1
* (ISO-Latin-1) if the property doesn't exist.
*
* @param in
* the input stream from which to read characters.
*/
public InputStreamReader(InputStream in) {
super(in);
this.in = in;
// FIXME: This should probably use Configuration.getFileEncoding()
encoding = System.getProperty("file.encoding", "ISO8859_1"); //$NON-NLS-1$//$NON-NLS-2$
decoder = Charset.forName(encoding).newDecoder().onMalformedInput(
CodingErrorAction.REPLACE).onUnmappableCharacter(
CodingErrorAction.REPLACE);
bytes.limit(0);
}
/**
* Constructs a new InputStreamReader on the InputStream {@code in}. The
* character converter that is used to decode bytes into characters is
* identified by name by {@code enc}. If the encoding cannot be found, an
* UnsupportedEncodingException error is thrown.
*
* @param in
* the InputStream from which to read characters.
* @param enc
* identifies the character converter to use.
* @throws NullPointerException
* if {@code enc} is {@code null}.
* @throws UnsupportedEncodingException
* if the encoding specified by {@code enc} cannot be found.
*/
public InputStreamReader(InputStream in, final String enc)
throws UnsupportedEncodingException {
super(in);
if (enc == null) {
throw new NullPointerException();
}
this.in = in;
try {
decoder = Charset.forName(enc).newDecoder().onMalformedInput(
CodingErrorAction.REPLACE).onUnmappableCharacter(
CodingErrorAction.REPLACE);
} catch (IllegalArgumentException e) {
throw (UnsupportedEncodingException)
new UnsupportedEncodingException(enc).initCause(e);
}
bytes.limit(0);
}
/**
* Constructs a new InputStreamReader on the InputStream {@code in} and
* CharsetDecoder {@code dec}.
*
* @param in
* the source InputStream from which to read characters.
* @param dec
* the CharsetDecoder used by the character conversion.
*/
public InputStreamReader(InputStream in, CharsetDecoder dec) {
super(in);
dec.averageCharsPerByte();
this.in = in;
decoder = dec;
bytes.limit(0);
}
/**
* Constructs a new InputStreamReader on the InputStream {@code in} and
* Charset {@code charset}.
*
* @param in
* the source InputStream from which to read characters.
* @param charset
* the Charset that defines the character converter
*/
public InputStreamReader(InputStream in, Charset charset) {
super(in);
this.in = in;
decoder = charset.newDecoder().onMalformedInput(
CodingErrorAction.REPLACE).onUnmappableCharacter(
CodingErrorAction.REPLACE);
bytes.limit(0);
}
/**
* Closes this reader. This implementation closes the source InputStream and
* releases all local storage.
*
* @throws IOException
* if an error occurs attempting to close this reader.
*/
@Override
public void close() throws IOException {
synchronized (lock) {
decoder = null;
if (in != null) {
in.close();
in = null;
}
}
}
/**
* Returns the name of the encoding used to convert bytes into characters.
* The value {@code null} is returned if this reader has been closed.
*
* @return the name of the character converter or {@code null} if this
* reader is closed.
*/
public String getEncoding() {
if (!isOpen()) {
return null;
}
return encoding;
}
/**
* Reads a single character from this reader and returns it as an integer
* with the two higher-order bytes set to 0. Returns -1 if the end of the
* reader has been reached. The byte value is either obtained from
* converting bytes in this reader's buffer or by first filling the buffer
* from the source InputStream and then reading from the buffer.
*
* @return the character read or -1 if the end of the reader has been
* reached.
* @throws IOException
* if this reader is closed or some other I/O error occurs.
*/
@Override
public int read() throws IOException {
synchronized (lock) {
if (!isOpen()) {
throw new IOException("InputStreamReader is closed.");
}
char buf[] = new char[4];
return read(buf, 0, 4) != -1 ? Character.codePointAt(buf, 0) : -1;
}
}
/**
* Reads at most {@code length} characters from this reader and stores them
* at position {@code offset} in the character array {@code buf}. Returns
* the number of characters actually read or -1 if the end of the reader has
* been reached. The bytes are either obtained from converting bytes in this
* reader's buffer or by first filling the buffer from the source
* InputStream and then reading from the buffer.
*
* @param buf
* the array to store the characters read.
* @param offset
* the initial position in {@code buf} to store the characters
* read from this reader.
* @param length
* the maximum number of characters to read.
* @return the number of characters read or -1 if the end of the reader has
* been reached.
* @throws IndexOutOfBoundsException
* if {@code offset < 0} or {@code length < 0}, or if
* {@code offset + length} is greater than the length of
* {@code buf}.
* @throws IOException
* if this reader is closed or some other I/O error occurs.
*/
@Override
public int read(char[] buf, int offset, int length) throws IOException {
synchronized (lock) {
if (!isOpen()) {
throw new IOException("InputStreamReader is closed.");
}
if (offset < 0 || offset > buf.length - length || length < 0) {
throw new IndexOutOfBoundsException();
}
if (length == 0) {
return 0;
}
CharBuffer out = CharBuffer.wrap(buf, offset, length);
CoderResult result = CoderResult.UNDERFLOW;
// bytes.remaining() indicates number of bytes in buffer
// when 1-st time entered, it'll be equal to zero
boolean needInput = !bytes.hasRemaining();
while (out.hasRemaining()) {
// fill the buffer if needed
if (needInput) {
try {
if ((in.available() == 0)
&& (out.position() > offset)) {
// we could return the result without blocking read
break;
}
} catch (IOException e) {
// available didn't work so just try the read
}
int to_read = bytes.capacity() - bytes.limit();
int off = bytes.arrayOffset() + bytes.limit();
int was_red = in.read(bytes.array(), off, to_read);
if (was_red == -1) {
endOfInput = true;
break;
} else if (was_red == 0) {
break;
}
bytes.limit(bytes.limit() + was_red);
needInput = false;
}
// decode bytes
result = decoder.decode(bytes, out, false);
if (result.isUnderflow()) {
// compact the buffer if no space left
if (bytes.limit() == bytes.capacity()) {
bytes.compact();
bytes.limit(bytes.position());
bytes.position(0);
}
needInput = true;
} else {
break;
}
}
if (result == CoderResult.UNDERFLOW && endOfInput) {
result = decoder.decode(bytes, out, true);
decoder.flush(out);
decoder.reset();
}
if (result.isMalformed()) {
throw new MalformedInputException(result.length());
} else if (result.isUnmappable()) {
throw new UnmappableCharacterException(result.length());
}
return out.position() - offset == 0 ? -1 : out.position() - offset;
}
}
/*
* Answer a boolean indicating whether or not this InputStreamReader is
* open.
*/
private boolean isOpen() {
return in != null;
}
/**
* Indicates whether this reader is ready to be read without blocking. If
* the result is {@code true}, the next {@code read()} will not block. If
* the result is {@code false} then this reader may or may not block when
* {@code read()} is called. This implementation returns {@code true} if
* there are bytes available in the buffer or the source stream has bytes
* available.
*
* @return {@code true} if the receiver will not block when {@code read()}
* is called, {@code false} if unknown or blocking will occur.
* @throws IOException
* if this reader is closed or some other I/O error occurs.
*/
@Override
public boolean ready() throws IOException {
synchronized (lock) {
if (in == null) {
throw new IOException("InputStreamReader is closed.");
}
try {
return bytes.hasRemaining() || in.available() > 0;
} catch (IOException e) {
return false;
}
}
}
}
| |
package org.seqcode.projects.seqview.components;
import java.io.*;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.table.*;
import org.seqcode.data.motifdb.*;
import java.awt.image.BufferedImage;
import javax.imageio.ImageIO;
public class MotifDisplayPane extends JSplitPane {
private MotifSelectPanel selectPanel;
private JTable table;
private MotifDrawingTableModel model;
public static void main(String args[]) {
final JFrame frame = new JFrame();
final MotifDisplayPane mdp = new MotifDisplayPane();
frame.setContentPane(mdp);
frame.setSize(800,800);
frame.setLocation(50,50);
JMenuBar jmb = new JMenuBar();
JMenu filemenu = new JMenu("File");
jmb.add(filemenu);
JMenuItem item;
filemenu.add (item = new JMenuItem("Close"));
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
frame.dispose();
}
});
JMenu imagemenu = new JMenu("Image");
jmb.add(imagemenu);
imagemenu.add(item = new JMenuItem("Save All"));
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mdp.saveAll();
}
});
jmb.add(new SeqViewToolsMenu(null));
frame.setJMenuBar(jmb);
frame.pack();
frame.setVisible(true);
}
public MotifDisplayPane() {
super(JSplitPane.HORIZONTAL_SPLIT);
setDividerLocation(.5);
selectPanel = new MotifSelectPanel();
selectPanel.retrieveData();
selectPanel.updateComponents();
selectPanel.filter();
JPanel buttonPanel = new JPanel();
JButton addButton = new JButton("Show Motifs");
final MotifDisplayPane mdp = this;
addButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mdp.showMotifs();
}
});
Dimension buttonSize = new Dimension(30,20);
addButton.setMaximumSize(buttonSize);
buttonPanel.setLayout(new GridBagLayout());
buttonPanel.add(addButton);
JPanel leftside = new JPanel();
leftside.setLayout(new BorderLayout());
leftside.add(selectPanel,BorderLayout.CENTER);
leftside.add(buttonPanel,BorderLayout.SOUTH);
model = new MotifDrawingTableModel();
table = new JTable(model);
table.setRowHeight(100);
table.setDefaultRenderer(WeightMatrix.class,new MotifDrawingRenderer());
table.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON3) {
int row = table.rowAtPoint(e.getPoint());
WeightMatrix wm = model.getObject(row);
JFileChooser chooser;
chooser = new JFileChooser(new File(System.getProperty("user.dir")));
int v = chooser.showSaveDialog(null);
if(v == JFileChooser.APPROVE_OPTION) {
try {
File f = chooser.getSelectedFile();
BufferedImage im =
new BufferedImage(800, 200, BufferedImage.TYPE_INT_RGB);
Graphics g = im.getGraphics();
Graphics2D g2 = (Graphics2D)g;
g2.setRenderingHints(new RenderingHints(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON));
WeightMatrixPainter wmp = new WeightMatrixPainter();
g2.setColor(Color.WHITE);
g2.fillRect(0,0,800,200);
wmp.paint(wm,g2,0,0,800,200);
ImageIO.write(im, "png", f);
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
}
});
JScrollPane drawingPanel = new JScrollPane(table);
add(new JScrollPane(leftside));
add(drawingPanel);
}
public void showMotifs() {
model.clear();
for (WeightMatrix m : selectPanel.getObjects()) {
model.addObject(m);
}
}
public void saveAll() {
JFileChooser chooser;
chooser = new JFileChooser(new File(System.getProperty("user.dir")));
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int v = chooser.showSaveDialog(null);
if(v == JFileChooser.APPROVE_OPTION) {
try {
File directory = chooser.getSelectedFile();
for (WeightMatrix wm : selectPanel.getObjects()) {
String name = wm.toString().replaceAll("\\W","_");
File outfile = new File(directory,name + ".png");
BufferedImage im =
new BufferedImage(800, 200, BufferedImage.TYPE_INT_RGB);
Graphics g = im.getGraphics();
Graphics2D g2 = (Graphics2D)g;
g2.setRenderingHints(new RenderingHints(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON));
WeightMatrixPainter wmp = new WeightMatrixPainter();
g2.setColor(Color.WHITE);
g2.fillRect(0,0,800,200);
wmp.paint(wm,g2,0,0,800,200);
ImageIO.write(im, "png", outfile);
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
}
class MotifDrawingTableModel extends MotifTableModel {
public int getColumnCount() {
// return 2;
return 1;
}
public Class getColumnClass(int i) {
// if (i == 0) {
// return String.class;
// }
// if (i == 1) {
return WeightMatrix.class;
// }
// return null;
}
public String getColumnName(int i) {
return "";
}
public Object getValueAt(int row, int c) {
// if (c == 0) {
// return getWeightMatrix(row).toString();
// } else if (c == 1){
return getObject(row);
// } else {
// return null;
// }
}
}
class MotifDrawingRenderer implements TableCellRenderer {
public Component getTableCellRendererComponent(JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row,
int column) {
System.err.println("Creating MotifCellRenderer for " + row + "," + column);
return new MotifCellRenderer(table,value,row,column);
}
}
class MotifCellRenderer extends JPanel {
private JTable table;
private Object value;
private int row, column;
private WeightMatrixPainter painter;
public MotifCellRenderer (JTable table, Object value, int row, int column) {
this.table = table;
this.value = value;
this.row = row;
this.column = column;
painter = new WeightMatrixPainter();
}
public void paintComponent(Graphics g) {
if (value instanceof WeightMatrix) {
Rectangle d = table.getCellRect(row,column,false);
System.err.println("painting in " + d);
painter.paint((WeightMatrix)value,
g,
// (int)d.getX(),
// (int)d.getY(),
// (int)(d.getX() + d.getWidth()),
// (int)(d.getY() + d.getHeight()));
0,0,(int)d.getWidth(),(int)d.getHeight());
}
}
public void paintComponent(Graphics g, int x, int y, int width, int height) {
if (value instanceof WeightMatrix) {
painter.paint((WeightMatrix)value,
g,
x,y,x+width,y+height);
}
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.broad.igv.track;
import htsjdk.tribble.AsciiFeatureCodec;
import htsjdk.tribble.Feature;
import htsjdk.variant.vcf.VCFHeader;
import org.apache.log4j.Logger;
import org.broad.igv.bbfile.BBFileReader;
import org.broad.igv.bigwig.BigWigDataSource;
import org.broad.igv.blast.BlastMapping;
import org.broad.igv.blast.BlastParser;
import org.broad.igv.data.*;
import org.broad.igv.data.cufflinks.*;
import org.broad.igv.data.expression.ExpressionDataset;
import org.broad.igv.data.expression.ExpressionFileParser;
import org.broad.igv.data.seg.*;
import org.broad.igv.exceptions.DataLoadException;
import org.broad.igv.feature.BasePairFileUtils;
import org.broad.igv.feature.GisticFileParser;
import org.broad.igv.feature.MutationTrackLoader;
import org.broad.igv.feature.ShapeFileUtils;
import org.broad.igv.feature.basepair.BasePairTrack;
import org.broad.igv.bedpe.BedPEParser;
import org.broad.igv.bedpe.InteractionTrack;
import org.broad.igv.feature.bionano.SMAPParser;
import org.broad.igv.feature.bionano.SMAPRenderer;
import org.broad.igv.feature.dranger.DRangerParser;
import org.broad.igv.feature.dsi.DSIRenderer;
import org.broad.igv.feature.dsi.DSITrack;
import org.broad.igv.feature.genome.GenbankParser;
import org.broad.igv.feature.genome.Genome;
import org.broad.igv.feature.genome.GenomeManager;
import org.broad.igv.feature.gff.GFFFeatureSource;
import org.broad.igv.feature.sprite.ClusterParser;
import org.broad.igv.feature.sprite.ClusterTrack;
import org.broad.igv.feature.tribble.CodecFactory;
import org.broad.igv.feature.tribble.FeatureFileHeader;
import org.broad.igv.feature.tribble.GFFCodec;
import org.broad.igv.feature.tribble.TribbleIndexNotFoundException;
import org.broad.igv.goby.GobyAlignmentQueryReader;
import org.broad.igv.goby.GobyCountArchiveDataSource;
import org.broad.igv.google.Ga4ghAPIHelper;
import org.broad.igv.google.GoogleUtils;
import org.broad.igv.gwas.*;
import org.broad.igv.lists.GeneList;
import org.broad.igv.lists.GeneListManager;
import org.broad.igv.maf.MultipleAlignmentTrack;
import org.broad.igv.methyl.MethylTrack;
import org.broad.igv.prefs.PreferencesManager;
import org.broad.igv.renderer.HeatmapRenderer;
import org.broad.igv.renderer.MutationRenderer;
import org.broad.igv.renderer.PointsRenderer;
import org.broad.igv.sam.*;
import org.broad.igv.sam.reader.IndexNotFoundException;
import org.broad.igv.tdf.TDFDataSource;
import org.broad.igv.tdf.TDFReader;
import org.broad.igv.ui.IGV;
import org.broad.igv.ui.util.ConfirmDialog;
import org.broad.igv.ui.util.ConvertFileDialog;
import org.broad.igv.ui.util.ConvertOptions;
import org.broad.igv.ui.util.MessageUtils;
import org.broad.igv.util.*;
import org.broad.igv.variant.VariantTrack;
import org.broad.igv.variant.util.PedigreeUtils;
import java.io.IOException;
import java.util.*;
import static org.broad.igv.prefs.Constants.*;
/**
* User: jrobinso
* Date: Feb 14, 2010
*/
public class TrackLoader {
private static Logger log = Logger.getLogger(TrackLoader.class);
private static Collection<? extends Class> NOLogExceptions = Arrays.asList(TribbleIndexNotFoundException.class);
/**
* Switches on various attributes of locator (mainly locator path extension and whether the locator is indexed)
* to call the appropriate loading method.
*
* @param locator
* @param genome
* @return
*/
public List<Track> load(ResourceLocator locator, Genome genome) throws DataLoadException {
final String path = locator.getPath().trim();
// Check if the AWS credentials are still valid. If not, re-login and renew pre-signed urls
if (AmazonUtils.isAwsS3Path(path)) {
AmazonUtils.checkLogin();
}
log.info("Loading resource, path " + path);
try {
String typeString = locator.getTypeString();
if (typeString.endsWith(".tbi")) {
MessageUtils.showMessage("<html><b>Error:</b>File type '.tbi' is not recognized. If this is a 'tabix' index <br>" +
" load the associated gzipped file, which should have an extension of '.gz'");
}
//This list will hold all new tracks created for this locator
List<Track> newTracks = new ArrayList<Track>();
if (typeString.endsWith(".gmt")) {
loadGMT(locator);
} else if (typeString.endsWith(".vcf.list")) {
loadVCFListFile(locator, newTracks, genome);
} else if (typeString.endsWith(".trio")) {
loadTrioData(locator);
} else if (typeString.endsWith(".gct") || typeString.endsWith("res") || typeString.endsWith("tab")) {
loadGctFile(locator, newTracks, genome);
} else if (typeString.endsWith(".gbk") || typeString.endsWith(".gb")) {
loadGbkFile(locator, newTracks, genome);
} else if (typeString.endsWith(".cn") || typeString.endsWith(".xcn") || typeString.endsWith(".snp") ||
typeString.endsWith(".igv") || typeString.endsWith(".loh")) {
loadIGVFile(locator, newTracks, genome);
} else if (typeString.endsWith(".cbs") || typeString.endsWith(".seg") ||
typeString.endsWith("glad") || typeString.endsWith("birdseye_canary_calls")
|| typeString.endsWith(".seg.zip")) {
loadSegFile(locator, newTracks, genome);
} else if (typeString.endsWith(".gistic")) {
loadGisticFile(locator, newTracks);
} else if (typeString.contains(".tabblastn") || typeString.endsWith(".orthologs")) {
loadBlastMapping(locator, newTracks);
} else if (isAlignmentTrack(typeString) ||
(path.startsWith("http") && path.contains("/query.cgi?"))) {
loadAlignmentsTrack(locator, newTracks, genome);
} else if (typeString.endsWith(".shape") || typeString.endsWith(".map")) {
convertLoadShapeFile(locator, newTracks, genome);
} else if (typeString.endsWith(".wig") || typeString.endsWith(".bedgraph") || typeString.endsWith(".bdg") ||
typeString.endsWith("cpg.txt") || typeString.endsWith(".expr")) {
loadWigFile(locator, newTracks, genome);
} else if (typeString.endsWith("fpkm_tracking") || typeString.endsWith("gene_exp.diff") ||
typeString.endsWith("cds_exp.diff")) {
loadCufflinksFile(locator, newTracks, genome);
} else if (typeString.contains(".dranger")) {
loadDRangerFile(locator, newTracks, genome);
} else if (typeString.endsWith(".ewig.tdf") || (typeString.endsWith(".ewig.ibf"))) {
loadEwigIBFFile(locator, newTracks, genome);
} else if (typeString.endsWith(".bw") || typeString.endsWith(".bb") || typeString.endsWith(".bigwig") ||
typeString.endsWith(".bigbed")) {
loadBWFile(locator, newTracks, genome);
} else if (typeString.endsWith(".ibf") || typeString.endsWith(".tdf")) {
loadTDFFile(locator, newTracks, genome);
} else if (typeString.endsWith(".counts")) {
loadGobyCountsArchive(locator, newTracks, genome);
} else if (WiggleParser.isWiggle(locator)) {
loadWigFile(locator, newTracks, genome);
} else if (typeString.endsWith(".maf.dict")) {
loadMultipleAlignmentTrack(locator, newTracks, genome);
} else if (typeString.endsWith("mage-tab") || ExpressionFileParser.parsableMAGE_TAB(locator)) {
locator.setDescription("MAGE_TAB");
loadGctFile(locator, newTracks, genome);
} else if (typeString.endsWith(".db") || typeString.endsWith(".dbn")) {
convertLoadStructureFile(locator, newTracks, genome, "dotBracket");
} else if (typeString.endsWith(".ct")) {
convertLoadStructureFile(locator, newTracks, genome, "connectTable");
} else if (typeString.endsWith(".dp")) {
convertLoadStructureFile(locator, newTracks, genome, "pairingProb");
} else if (typeString.endsWith(".bp")) {
loadBasePairFile(locator, newTracks, genome);
} else if (GWASParser.isGWASFile(typeString)) {
loadGWASFile(locator, newTracks, genome);
} else if (GobyAlignmentQueryReader.supportsFileType(path)) {
loadAlignmentsTrack(locator, newTracks, genome);
} else if (typeString.endsWith(".list")) {
// This should be deprecated
loadListFile(locator, newTracks, genome);
} else if (typeString.endsWith(".smap")) {
loadSMAPFile(locator, newTracks, genome);
} else if (typeString.endsWith("dsi")) {
loadDSIFile(locator, newTracks, genome);
} else if (typeString.endsWith("bedpe") || typeString.endsWith("_clusters")) {
loadBedPEFile(locator, newTracks, genome);
} else if (typeString.endsWith("clusters")) {
loadClusterFile(locator, newTracks, genome);
} else if (CodecFactory.hasCodec(locator, genome) && !forceNotTribble(typeString)) {
loadTribbleFile(locator, newTracks, genome);
} else if (MutationTrackLoader.isMutationAnnotationFile(locator)) {
loadMutFile(locator, newTracks, genome); // Must be tried before ".maf" test below
} else if (typeString.endsWith(".maf")) {
loadMultipleAlignmentTrack(locator, newTracks, genome);
} else if (AttributeManager.isSampleInfoFile(locator)) {
// This might be a sample information file.
AttributeManager.getInstance().loadSampleInfo(locator);
} else {
MessageUtils.showMessage("<html>Unknown file type: " + path + "<br>Check file extension");
}
// Track line
if (newTracks.size() > 0) {
TrackProperties tp = null;
String trackLine = locator.getTrackLine();
if (trackLine != null) {
tp = new TrackProperties();
ParsingUtils.parseTrackLine(trackLine, tp);
}
for (Track track : newTracks) {
if (locator.getFeatureInfoURL() != null) {
track.setUrl(locator.getFeatureInfoURL());
}
if (tp != null) {
track.setProperties(tp);
}
if (locator.getColor() != null) {
track.setColor(locator.getColor());
}
if (locator.getSampleId() != null) {
track.setSampleId(locator.getSampleId());
}
}
}
return newTracks;
} catch (Exception e) {
if (!NOLogExceptions.contains(e.getClass())) {
log.error(e.getMessage(), e);
}
throw new DataLoadException(e.getMessage());
}
}
public static boolean isAlignmentTrack(String typeString) {
return typeString.endsWith(".sam") || typeString.endsWith(".bam") || typeString.endsWith(".cram") ||
typeString.endsWith(".sam.list") || typeString.endsWith(".bam.list") ||
typeString.endsWith(".aligned") || typeString.endsWith(".sai") ||
typeString.endsWith(".bai") || typeString.endsWith(".csi") || typeString.equals("alist") ||
typeString.equals(Ga4ghAPIHelper.RESOURCE_TYPE);
}
private void loadSMAPFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
List<Feature> features = SMAPParser.parseFeatures(locator, genome);
FeatureCollectionSource src = new FeatureCollectionSource(features, genome);
FeatureTrack track = new FeatureTrack(locator, locator.getName(), src);
track.setRendererClass(SMAPRenderer.class);
track.setDisplayMode(Track.DisplayMode.EXPANDED);
newTracks.add(track);
}
private boolean forceNotTribble(String typeString) {
List<String> nonTribble = Arrays.asList("fpkm_tracking", "exp_diff", "_exp.diff");
for (String s : nonTribble) {
if (typeString.endsWith(s)) {
return true;
}
}
return false;
}
private void loadGMT(ResourceLocator locator) throws IOException {
List<GeneList> lists = GeneListManager.getInstance().loadGMTFile(locator.getPath());
if (lists.size() == 1) {
GeneList gl = lists.get(0);
IGV.getInstance().setGeneList(gl, true);
} else {
MessageUtils.showMessage("Loaded " + lists.size() + " gene lists.");
}
}
private void loadVCF(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException {
TribbleFeatureSource src = TribbleFeatureSource.getFeatureSource(locator, genome);
VCFHeader header = (VCFHeader) src.getHeader();
// Test if the input VCF file contains methylation rate data:
// This is determined by testing for the presence of two sample format fields: MR and GB, used in the
// rendering of methylation rate.
// MR is the methylation rate on a scale of 0 to 100% and GB is the number of bases that pass
// filter for the position. GB is needed to avoid displaying positions for which limited coverage
// prevents reliable estimation of methylation rate.
boolean enableMethylationRateSupport = (header.getFormatHeaderLine("MR") != null &&
header.getFormatHeaderLine("GB") != null);
List<String> allSamples = new ArrayList(header.getGenotypeSamples());
VariantTrack t = new VariantTrack(locator, src, allSamples, enableMethylationRateSupport);
// VCF tracks handle their own margin
t.setMargin(0);
newTracks.add(t);
}
private void loadVCFListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException {
TribbleListFeatureSource src = new TribbleListFeatureSource(locator.getPath(), genome);
VCFHeader header = (VCFHeader) src.getHeader();
// Test if the input VCF file contains methylation rate data:
// This is determined by testing for the presence of two sample format fields: MR and GB, used in the
// rendering of methylation rate.
// MR is the methylation rate on a scale of 0 to 100% and GB is the number of bases that pass
// filter for the position. GB is needed to avoid displaying positions for which limited coverage
// prevents reliable estimation of methylation rate.
boolean enableMethylationRateSupport = (header.getFormatHeaderLine("MR") != null &&
header.getFormatHeaderLine("GB") != null);
List<String> allSamples = new ArrayList(header.getGenotypeSamples());
VariantTrack t = new VariantTrack(locator, src, allSamples, enableMethylationRateSupport);
// VCF tracks handle their own margin
t.setMargin(0);
newTracks.add(t);
}
private void loadBlastMapping(ResourceLocator locator, List<Track> newTracks) {
List<BlastMapping> mappings = (new BlastParser()).parse(locator.getPath());
List<htsjdk.tribble.Feature> features = new ArrayList<htsjdk.tribble.Feature>(mappings.size());
features.addAll(mappings);
Genome genome = GenomeManager.getInstance().getCurrentGenome();
FeatureTrack track = new FeatureTrack(locator, new FeatureCollectionSource(features, genome));
track.setName(locator.getTrackName());
// track.setRendererClass(AlignmentBlockRenderer.class);
newTracks.add(track);
}
private void loadDRangerFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
DRangerParser parser = new DRangerParser();
newTracks.addAll(parser.loadTracks(locator, genome));
}
private void loadBedPEFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
BedPEParser.Dataset features = BedPEParser.parse(locator, genome);
newTracks.add(new InteractionTrack(locator, features, genome));
}
private void loadClusterFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
ClusterParser.ClusterSet features = ClusterParser.parse(locator.getPath());
newTracks.add(new ClusterTrack(locator, features, genome));
}
/**
* Load the input file as a feature, mutation, or maf (multiple alignment) file.
*
* @param locator
* @param newTracks
*/
private void loadTribbleFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException {
String typeString = locator.getTypeString();
// Mutation (mut, maf, vcf) files are handled special. Check here, rather than depend on order in giant case statement.
if (MutationTrackLoader.isMutationAnnotationFile(locator)) {
loadMutFile(locator, newTracks, genome); // Must be tried before generic "loadIndexed" below
} else if (VariantTrack.isVCF(typeString)) {
loadVCF(locator, newTracks, genome);
} else {
TribbleFeatureSource tribbleFeatureSource = TribbleFeatureSource.getFeatureSource(locator, genome);
FeatureSource src;
if(GFFFeatureSource.isGFF(locator.getPath())) {
GFFCodec codec = (GFFCodec) CodecFactory.getCodec(locator, genome);
src = new GFFFeatureSource(tribbleFeatureSource, codec.getVersion()) ;
} else {
src = tribbleFeatureSource;
}
// Create feature source and track
FeatureTrack t = new FeatureTrack(locator, src);
t.setName(locator.getTrackName());
//t.setRendererClass(BasicTribbleRenderer.class);
// Set track properties from header
Object header = tribbleFeatureSource.getHeader();
if (header != null && header instanceof FeatureFileHeader) {
FeatureFileHeader ffh = (FeatureFileHeader) header;
if (ffh.getTrackType() != null) {
t.setTrackType(ffh.getTrackType());
}
if (ffh.getTrackProperties() != null) {
TrackProperties tp = ffh.getTrackProperties();
t.setProperties(tp);
t.setTrackLine(tp.getTrackLine());
}
if (ffh.getTrackType() == TrackType.REPMASK) {
t.setHeight(15);
}
}
if (locator.getPath().contains(".narrowPeak") ||
locator.getPath().contains(".broadPeak") ||
locator.getPath().contains(".gappedPeak")||
locator.getPath().contains(".regionPeak") ) {
t.setUseScore(true);
}
newTracks.add(t);
}
}
private void loadDSIFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException {
TribbleFeatureSource tribbleFeatureSource = TribbleFeatureSource.getFeatureSource(locator, genome);
// Create feature source and track
DSITrack t = new DSITrack(locator, tribbleFeatureSource);
t.setName(locator.getTrackName());
//t.setRendererClass(BasicTribbleRenderer.class);
// Set track properties from header
Object header = tribbleFeatureSource.getHeader();
if (header != null && header instanceof TrackProperties) {
TrackProperties tp = (TrackProperties) header;
t.setProperties(tp);
t.setTrackLine(tp.getTrackLine());
}
t.setRendererClass(DSIRenderer.class);
newTracks.add(t);
}
/**
* Load GWAS PLINK result file
*
* @param locator
* @param newTracks
* @throws IOException
*/
private void loadGWASFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
GWASParser gwasParser = new GWASParser(locator, genome);
Map<String, List<GWASFeature>> gwasData = gwasParser.parse();
GWASTrack gwasTrack = new GWASTrack(locator, locator.getPath(), locator.getFileName(), gwasData, gwasParser.getColumnHeaders(), genome);
newTracks.add(gwasTrack);
}
private void loadGctFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
if (locator.isLocal()) {
if (!checkSize(locator)) {
return;
}
}
ExpressionFileParser parser = null;
ExpressionDataset ds = null;
parser = new ExpressionFileParser(locator, null, genome);
ds = parser.createDataset();
if (ds.isEmpty()) {
String message = "The probes in the file <br> " + locator.getPath() + "<br>" +
"could not be mapped to genomic positions. This can be corrected by specify a probe mapping<br>" +
"file from the Preferences window (Probes tab), or by specifing the genomic positions in the<br>" +
"expression data file. Please see the user guide for more details.";
MessageUtils.showMessage(message);
} else {
ds.setName(locator.getTrackName());
ds.setNormalized(true);
ds.setLogValues(true);
/*
* File outputFile = new File(IGV.DEFAULT_USER_DIRECTORY, file.getName() + ".h5");
* OverlappingProcessor proc = new OverlappingProcessor(ds);
* proc.setZoomMax(0);
* proc.process(outputFile.getAbsolutePath());
* loadH5File(outputFile, messages, attributeList, group);
*/
// Counter for generating ID
TrackProperties trackProperties = ds.getTrackProperties();
String path = locator.getPath();
for (String trackName : ds.getTrackNames()) {
DatasetDataSource dataSource = new DatasetDataSource(trackName, ds, genome);
String trackId = path + "_" + trackName;
Track track = new DataSourceTrack(locator, trackId, trackName, dataSource);
track.setRendererClass(HeatmapRenderer.class);
track.setProperties(trackProperties);
newTracks.add(track);
}
}
}
/**
* Load features from a genbank (.gbk)file. This method ignores the fasta section. To define a genome from
* a genbank file use GenomeManager.
*
* @param newTracks
* @param genome
* @throws IOException
*/
private void loadGbkFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
GenbankParser genbankParser = new GenbankParser(locator.getPath());
genbankParser.readFeatures(false);
FeatureCollectionSource src = new FeatureCollectionSource(genbankParser.getFeatures(), genome);
FeatureTrack track = new FeatureTrack(locator, src);
newTracks.add(track);
}
private void loadIGVFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
if (locator.isLocal()) {
if (!checkSize(locator)) {
return;
}
}
String dsName = locator.getTrackName();
IGVDataset ds = new IGVDataset(locator, genome);
ds.setName(dsName);
TrackProperties trackProperties = ds.getTrackProperties();
String path = locator.getPath();
TrackType type = ds.getType();
for (String trackName : ds.getTrackNames()) {
DatasetDataSource dataSource = new DatasetDataSource(trackName, ds, genome);
String trackId = path + "_" + trackName;
DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource);
// track.setRendererClass(HeatmapRenderer.class);
track.setTrackType(ds.getType());
track.setProperties(trackProperties);
if (type == TrackType.ALLELE_FREQUENCY) {
track.setRendererClass(PointsRenderer.class);
track.setHeight(40);
}
newTracks.add(track);
}
}
private void loadCufflinksFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
final String path = locator.getPath();
final String s = path.toLowerCase();
List<DataTrack> cuffTracks = new ArrayList<DataTrack>();
if (s.endsWith("fpkm_tracking")) {
FPKMTrackingCodec codec = new FPKMTrackingCodec(path);
List<FPKMValue> values = CufflinksParser.parse(codec, path);
for (int sampleIndex = 0; sampleIndex < codec.getNumSamples(); sampleIndex++) {
CufflinksDataSource ds = new CufflinksDataSource(sampleIndex, values, genome);
String supId = String.format("q%02d", sampleIndex);
DataTrack track = new DataSourceTrack(locator, locator.getPath() + " " + supId, locator.getTrackName() + " " + supId, ds);
cuffTracks.add(track);
}
} else if (s.endsWith("gene_exp.diff") || s.endsWith("cds_exp.diff")) {
AsciiFeatureCodec<ExpDiffValue> codec = new ExpDiffCodec(path);
List<ExpDiffValue> values = CufflinksParser.parse(codec, path);
CufflinksDataSource ds = new CufflinksDataSource(values, genome);
DataTrack track = new DataSourceTrack(locator, locator.getPath(), locator.getTrackName(), ds);
cuffTracks.add(track);
} else {
throw new RuntimeException("Unsupported file type: " + path);
}
for (DataTrack track : cuffTracks) {
track.setTrackType(TrackType.FPKM);
CufflinksTrack.setCufflinksScale(track);
newTracks.add(track);
}
}
private static boolean checkSize(ResourceLocator locator) {
if (!PreferencesManager.getPreferences().getAsBoolean(SHOW_SIZE_WARNING)) {
return true;
}
final String path = locator.getPath();
long size = FileUtils.getLength(path);
int maxSize = 200000000; // 200 mb
if (path.endsWith(".gz") || path.endsWith(".bgz")) {
maxSize /= 4;
}
if (size > maxSize) {
String message = "The file " + path + " is large (" + (size / 1000000) + " mb). It is recommended " +
"that large files be converted to the binary <i>.tdf</i> format using the IGVTools " +
"<b>toTDF</b> command. Loading unconverted ascii fies of this size can lead to poor " +
"performance or unresponsiveness (freezing). " +
"<br><br>IGVTools can be launched from the <b>Tools</b> menu or separately as a " +
"command line program. See the user guide for more details.<br><br>Click <b>Continue</b> " +
"to continue loading, or <b>Cancel</b> to skip this file.";
return ConfirmDialog.optionallyShowConfirmDialog(message, SHOW_SIZE_WARNING, true);
}
return true;
}
private void loadDOTFile(ResourceLocator locator, List<Track> newTracks) {
//GraphTrack gt = new GraphTrack(locator);
//gt.setHeight(80);
//newTracks.add(gt);
}
private void loadWigFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
if (locator.isLocal()) {
if (!checkSize(locator)) {
return;
}
}
WiggleDataset ds = (new WiggleParser(locator, genome)).parse();
TrackProperties props = ds.getTrackProperties();
// In case of conflict between the resource locator display name and the track properties name,
// use the resource locator
String name = props == null ? null : props.getName();
String label = locator.getName();
if (name == null) {
name = locator.getFileName();
} else if (label != null) {
props.setName(label); // erase name rom track properties
}
String path = locator.getPath();
boolean multiTrack = ds.getTrackNames().length > 1;
for (String heading : ds.getTrackNames()) {
String trackId = multiTrack ? path + "_" + heading : path;
String trackName = multiTrack ? heading : name;
DatasetDataSource dataSource = new DatasetDataSource(trackId, ds, genome);
DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource);
String displayName = (label == null || multiTrack) ? heading : label;
track.setName(displayName);
track.setProperties(props);
track.setTrackType(ds.getType());
if (ds.getType() == TrackType.EXPR) {
track.setWindowFunction(WindowFunction.none);
}
newTracks.add(track);
}
}
public void loadTDFFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
log.debug("Loading TDF file " + locator.getPath());
TDFReader reader = TDFReader.getReader(locator);
TrackType type = reader.getTrackType();
TrackProperties props = null;
String trackLine = reader.getTrackLine();
if (trackLine != null && trackLine.length() > 0) {
props = new TrackProperties();
ParsingUtils.parseTrackLine(trackLine, props);
}
// In case of conflict between the resource locator display name and the track properties name,
// use the resource locator
String name = locator.getName();
if (name != null && props != null) {
props.setName(name);
}
if (name == null) {
name = props == null ? locator.getTrackName() : props.getName();
}
int trackNumber = 0;
String path = locator.getPath();
boolean multiTrack = reader.getTrackNames().length > 1;
for (String heading : reader.getTrackNames()) {
String trackId = multiTrack ? path + "_" + heading : path;
String trackName = multiTrack ? heading : name;
final DataSource dataSource = locator.getPath().endsWith(".counts") ?
new GobyCountArchiveDataSource(locator) :
new TDFDataSource(reader, trackNumber, heading, genome);
DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource);
String displayName = (name == null || multiTrack) ? heading : name;
track.setName(displayName);
track.setTrackType(type);
if (props != null) {
track.setProperties(props);
}
newTracks.add(track);
trackNumber++;
}
}
public void loadBWFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
String trackName = locator.getTrackName();
String trackId = locator.getPath();
String path = locator.getPath();
BBFileReader reader = new BBFileReader(path);
BigWigDataSource bigwigSource = new BigWigDataSource(reader, genome);
if (reader.isBigWigFile()) {
DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, bigwigSource);
newTracks.add(track);
} else if (reader.isBigBedFile()) {
if (locator.getPath().contains("RRBS_cpgMethylation") ||
locator.getPath().contains("BiSeq_cpgMethylation") ||
(reader.getAutoSql() != null && reader.getAutoSql().startsWith("table BisulfiteSeq"))) {
loadMethylTrack(locator, reader, newTracks, genome);
} else {
FeatureTrack track = new FeatureTrack(locator, trackId, trackName, bigwigSource);
newTracks.add(track);
}
} else {
throw new RuntimeException("Unknown BIGWIG type: " + locator.getPath());
}
}
private void loadMethylTrack(ResourceLocator locator, BBFileReader reader, List<Track> newTracks, Genome genome) throws IOException {
MethylTrack track = new MethylTrack(locator, reader, genome);
newTracks.add(track);
}
private void loadGobyCountsArchive(ResourceLocator locator, List<Track> newTracks, Genome genome) {
if (log.isDebugEnabled()) {
log.debug("Loading Goby counts archive: " + locator.toString());
}
String trackId = locator.getSampleId() + " coverage";
String trackName = locator.getFileName();
final DataSource dataSource = new GobyCountArchiveDataSource(locator);
DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource);
newTracks.add(track);
}
private void loadEwigIBFFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
TDFReader reader = TDFReader.getReader(locator.getPath());
TrackProperties props = null;
String trackLine = reader.getTrackLine();
if (trackLine != null && trackLine.length() > 0) {
props = new TrackProperties();
ParsingUtils.parseTrackLine(trackLine, props);
}
EWigTrack track = new EWigTrack(locator, genome);
if (props != null) {
track.setProperties(props);
}
track.setName(locator.getTrackName());
newTracks.add(track);
}
private void loadListFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
try {
FeatureSource source = new FeatureDirSource(locator, genome);
FeatureTrack track = new FeatureTrack(locator, source);
track.setName(locator.getTrackName());
track.setVisibilityWindow(0);
newTracks.add(track);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private void loadGisticFile(ResourceLocator locator, List<Track> newTracks) {
GisticTrack track = GisticFileParser.loadData(locator);
track.setName(locator.getTrackName());
newTracks.add(track);
}
private void loadMultipleAlignmentTrack(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
MultipleAlignmentTrack t = new MultipleAlignmentTrack(locator, genome);
t.setName("Multiple Alignments");
newTracks.add(t);
}
private void loadAlignmentsTrack(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException {
try {
String dsName = locator.getTrackName();
// If the user tried to load the index, look for the file (this is a common mistake)
if (locator.getTypeString().endsWith(".sai") ||
locator.getTypeString().endsWith(".bai") ||
locator.getTypeString().endsWith(".csi")) {
MessageUtils.showMessage("<html><b>ERROR:</b> Loading SAM/BAM index files are not supported: " + locator.getPath() +
"<br>Load the SAM or BAM file directly. ");
return;
}
AlignmentDataManager dataManager = new AlignmentDataManager(locator, genome);
// Check that alignments we loaded actually match some data. Many BAM files will contain some sequences
// not represented in the genome, buf if there are no matches warn the user.
List<String> seqNames = dataManager.getSequenceNames();
if (seqNames != null && seqNames.size() > 0) {
if (!dataManager.hasMatchingSequences()) {
showMismatchSequenceNameMessage(locator.getPath(), genome, seqNames);
}
}
if (locator.getTypeString().endsWith("bam") || locator.getTypeString().endsWith("cram")) {
if (!dataManager.hasIndex()) {
MessageUtils.showMessage("<html>Could not load index file for: " +
locator.getPath() + "<br> An index file is required for SAM & BAM files.");
return;
}
}
AlignmentTrack alignmentTrack = new AlignmentTrack(locator, dataManager, genome); // parser.loadTrack(locator, dsName)
alignmentTrack.setName(dsName);
alignmentTrack.setVisible(PreferencesManager.getPreferences().getAsBoolean(SAM_SHOW_ALIGNMENT_TRACK));
// Create coverage track
CoverageTrack covTrack = new CoverageTrack(locator, dsName + " Coverage", alignmentTrack, genome);
covTrack.setVisible(PreferencesManager.getPreferences().getAsBoolean(SAM_SHOW_COV_TRACK));
newTracks.add(covTrack);
covTrack.setDataManager(dataManager);
dataManager.setCoverageTrack(covTrack);
alignmentTrack.setCoverageTrack(covTrack);
// Search for precalculated coverage data
// Skip for GA4GH & SU2C resources
if (!(Ga4ghAPIHelper.RESOURCE_TYPE.equals(locator.getType()) ||
locator.getPath().contains("dataformat=.bam") ||
GoogleUtils.isGoogleCloud(locator.getPath()))) {
String covPath = locator.getCoverage();
if (covPath == null) {
boolean bypassFileAutoDiscovery = PreferencesManager.getPreferences().getAsBoolean(BYPASS_FILE_AUTO_DISCOVERY);
String path = locator.getPath();
if (!bypassFileAutoDiscovery && !path.contains("/query.cgi?")) {
covPath = path + ".tdf";
}
}
if (covPath != null && !covPath.equals(".")) {
if (FileUtils.resourceExists(covPath)) {
log.debug("Loading TDF for coverage: " + covPath);
try {
TDFReader reader = TDFReader.getReader(covPath);
TDFDataSource ds = new TDFDataSource(reader, 0, dsName + " coverage", genome);
covTrack.setDataSource(ds);
} catch (Exception e) {
log.error("Error loading coverage TDF file", e);
}
}
}
}
boolean showSpliceJunctionTrack = PreferencesManager.getPreferences().getAsBoolean(SAM_SHOW_JUNCTION_TRACK);
SpliceJunctionTrack spliceJunctionTrack = new SpliceJunctionTrack(locator,
dsName + " Junctions", dataManager, alignmentTrack, SpliceJunctionTrack.StrandOption.BOTH);
spliceJunctionTrack.setHeight(60);
spliceJunctionTrack.setVisible(showSpliceJunctionTrack);
newTracks.add(spliceJunctionTrack);
alignmentTrack.setSpliceJunctionTrack(spliceJunctionTrack);
newTracks.add(alignmentTrack);
log.debug("Alignment track loaded");
} catch (IndexNotFoundException e) {
MessageUtils.showMessage("<html>Could not find the index file for <br><br> " + e.getSamFile() +
"<br><br>Note: The index file can be created using igvtools and must be in the same directory as the .sam file.");
}
}
private void showMismatchSequenceNameMessage(String filename, Genome genome, List<String> seqNames) {
StringBuffer message = new StringBuffer();
message.append("<html>File: " + filename +
"<br>does not contain any sequence names which match the current genome.");
message.append("<br><br>File: ");
int n = 0;
for (String sn : seqNames) {
message.append(sn + ", ");
n++;
if (n > 3) {
message.append(" ...");
break;
}
}
message.append("<br>Genome: ");
n = 0;
for (String cn : genome.getAllChromosomeNames()) {
message.append(cn + ", ");
n++;
if (n > 3) {
message.append(" ...");
break;
}
}
MessageUtils.showMessage(message.toString());
}
/**
* Load a mutation file (".mut" or ".maf").
*
* @param locator
* @param newTracks
*/
private void loadMutFile(ResourceLocator locator, List<Track> newTracks, Genome genome) throws IOException, TribbleIndexNotFoundException {
MutationTrackLoader loader = new MutationTrackLoader();
List<FeatureTrack> mutationTracks = loader.loadMutationTracks(locator, genome);
for (FeatureTrack track : mutationTracks) {
track.setTrackType(TrackType.MUTATION);
track.setRendererClass(MutationRenderer.class);
newTracks.add(track);
}
}
private void loadSegFile(ResourceLocator locator, List<Track> newTracks, Genome genome) {
// TODO - -handle remote resource
SegmentedDataSet ds;
String path = locator.getPath().toLowerCase();
if (path.endsWith("seg.zip")) {
ds = new SegmentedBinaryDataSet(locator);
} else {
SegmentFileParser parser = new SegmentFileParser(locator);
ds = parser.loadSegments(locator, genome);
}
loadSegTrack(locator, newTracks, genome, ds);
}
/**
* Add the provided SegmentedDataSet to the list of tracks,
* set other relevant properties
*
* @param locator
* @param newTracks
* @param genome
* @param ds
*/
private void loadSegTrack(ResourceLocator locator, List<Track> newTracks, Genome genome, SegmentedDataSet ds) {
String path = locator.getPath();
TrackProperties props = null;
if (ds instanceof SegmentedAsciiDataSet) {
props = ((SegmentedAsciiDataSet) ds).getTrackProperties();
}
// The "freq" track. TODO - make this optional
if ((ds.getType() == TrackType.COPY_NUMBER || ds.getType() == TrackType.CNV) &&
ds.getSampleNames().size() > 1) {
FreqData fd = new FreqData(ds, genome);
String freqTrackId = path;
String freqTrackName = "CNV Summary";
CNFreqTrack freqTrack = new CNFreqTrack(locator, freqTrackId, freqTrackName, fd);
if (props != null) {
freqTrack.setProperties(props);
}
newTracks.add(freqTrack);
}
for (String trackName : ds.getSampleNames()) {
String trackId = path + "_" + trackName;
SegmentedDataSource dataSource = new SegmentedDataSource(trackName, ds);
DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource);
track.setRendererClass(HeatmapRenderer.class);
track.setTrackType(ds.getType());
if (props != null) {
track.setProperties(props);
}
newTracks.add(track);
}
}
private void loadTrioData(ResourceLocator locator) throws IOException {
PedigreeUtils.parseTrioFile(locator.getPath());
}
/**
* Convert an RNA chemical reactivity file (.shape, .map) into a .wig file
* and load.
*/
private void convertLoadShapeFile(ResourceLocator locator,
List<Track> newTracks,
Genome genome) throws IOException {
String inPath = locator.getPath();
String fileName = locator.getFileName();
String outPath = inPath + ".wig";
String message = "The chemical reactivity file <br> " + fileName + "<br> needs to be converted to IGV chromosome <br>" +
"coordinates and .wig format before loading. <br><br>Click <b>Continue</b> " +
"to save converted file to <br> " + fileName + ".wig" +
"<br>and load with the selected options, or <b>Cancel</b> to skip this<br>file.";
ConvertOptions opts = ConvertFileDialog.showConvertFileDialog(message);
if (opts.doConvert) {
ShapeFileUtils.shapeToWigFile(inPath, outPath, opts.chrom, opts.strand, opts.start);
loadWigFile(new ResourceLocator(outPath), newTracks, genome);
}
}
/**
* Convert various RNA structure formats to a more easily parseable format
* in genomic coordinates, then load converted file.
*/
private void convertLoadStructureFile(ResourceLocator locator,
List<Track> newTracks,
Genome genome,
String fileType) throws IOException {
String inPath = locator.getPath();
String fileName = locator.getFileName();
String outPath = inPath + ".bp";
String message = "The RNA structure file <br> " + fileName + "<br> needs to be converted to IGV chromosome <br>" +
"coordinates and .bp format before loading. <br><br>Click <b>Continue</b> " +
"to save converted file to <br> " + fileName + ".bp" +
"<br>and load with the selected options, or <b>Cancel</b> to skip this<br>file.";
ConvertOptions opts = ConvertFileDialog.showConvertFileDialog(message);
if (opts.doConvert) {
if (fileType == "connectTable") {
BasePairFileUtils.connectTableToBasePairFile(inPath, outPath, opts.chrom, opts.strand, opts.start);
} else if (fileType == "pairingProb") {
BasePairFileUtils.pairingProbToBasePairFile(inPath, outPath, opts.chrom, opts.strand, opts.start);
} else if (fileType == "dotBracket") {
BasePairFileUtils.dotBracketToBasePairFile(inPath, outPath, opts.chrom, opts.strand, opts.start);
}
loadBasePairFile(new ResourceLocator(outPath), newTracks, genome);
}
}
private void loadBasePairFile(ResourceLocator locator,
List<Track> newTracks,
Genome genome) throws IOException {
String name = locator.getTrackName();
String path = locator.getPath();
String id = path + "_" + name;
newTracks.add(new BasePairTrack(locator, id, name, genome));
}
public static boolean isIndexed(ResourceLocator locator, Genome genome) {
// Checking for the index is expensive over HTTP. First see if this is an indexable format by fetching the codec
String fullPath = locator.getPath();
String pathNoQuery = locator.getURLPath();
if (!CodecFactory.hasCodec(locator, genome)) {
return false;
}
String indexExtension = pathNoQuery.endsWith("gz") ? ".tbi" : ".idx";
String indexPath = fullPath + indexExtension;
if (HttpUtils.isRemoteURL(fullPath)) {
//Handle query string, if it exists
String[] toks = fullPath.split("\\?", 2);
if (toks.length == 2) {
indexPath = String.format("%s%s?%s", toks[0], indexExtension, toks[1]);
}
}
return FileUtils.resourceExists(indexPath);
}
public static TrackProperties getTrackProperties(Object header) {
try {
FeatureFileHeader ffHeader = (FeatureFileHeader) header;
if (ffHeader != null) {
return ffHeader.getTrackProperties();
} else {
return null;
}
} catch (ClassCastException e) {
return null;
}
}
}
| |
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.utils;
import com.sun.mail.smtp.SMTPTransport;
import org.apache.log4j.Logger;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.activation.FileDataSource;
import javax.mail.*;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import java.io.File;
import java.io.InputStream;
import java.net.SocketTimeoutException;
import java.util.*;
public class EmailMessage {
private final Logger logger = Logger.getLogger(EmailMessage.class);
private static String protocol = "smtp";
private List<String> _toAddress = new ArrayList<String>();
private String _mailHost;
private String _mailUser;
private String _mailPassword;
private String _subject;
private String _fromAddress;
private String _mimeType = "text/plain";
private String _tls;
private long _totalAttachmentSizeSoFar;
private boolean _usesAuth = true;
private boolean _enableAttachementEmbedment = true;
private StringBuffer _body = new StringBuffer();
private static int _mailTimeout = 10000;
private static int _connectionTimeout = 10000;
private static long _totalAttachmentMaxSizeInByte = 1024 * 1024 * 1024; // 1
// GB
private ArrayList<BodyPart> _attachments = new ArrayList<BodyPart>();
public EmailMessage() {
this("localhost", "", "");
}
public EmailMessage(String host, String user, String password) {
_mailUser = user;
_mailHost = host;
_mailPassword = password;
}
public static void setTimeout(int timeoutMillis) {
_mailTimeout = timeoutMillis;
}
public static void setConnectionTimeout(int timeoutMillis) {
_connectionTimeout = timeoutMillis;
}
public static void setTotalAttachmentMaxSize(long sizeInBytes) {
if (sizeInBytes < 1) {
throw new IllegalArgumentException(
"attachment max size can't be 0 or negative");
}
_totalAttachmentMaxSizeInByte = sizeInBytes;
}
public EmailMessage setMailHost(String host) {
_mailHost = host;
return this;
}
public EmailMessage setMailUser(String user) {
_mailUser = user;
return this;
}
public EmailMessage enableAttachementEmbedment(boolean toEnable) {
_enableAttachementEmbedment = toEnable;
return this;
}
public EmailMessage setMailPassword(String password) {
_mailPassword = password;
return this;
}
public EmailMessage addAllToAddress(Collection<? extends String> addresses) {
_toAddress.addAll(addresses);
return this;
}
public EmailMessage addToAddress(String address) {
_toAddress.add(address);
return this;
}
public EmailMessage setSubject(String subject) {
_subject = subject;
return this;
}
public EmailMessage setFromAddress(String fromAddress) {
_fromAddress = fromAddress;
return this;
}
public EmailMessage setTLS(String tls) {
_tls = tls;
return this;
}
public EmailMessage setAuth(boolean auth) {
_usesAuth = auth;
return this;
}
public EmailMessage addAttachment(File file) throws MessagingException {
return addAttachment(file.getName(), file);
}
public EmailMessage addAttachment(String attachmentName, File file)
throws MessagingException {
_totalAttachmentSizeSoFar += file.length();
if (_totalAttachmentSizeSoFar > _totalAttachmentMaxSizeInByte) {
throw new MessageAttachmentExceededMaximumSizeException(
"Adding attachment '" + attachmentName
+ "' will exceed the allowed maximum size of "
+ _totalAttachmentMaxSizeInByte);
}
BodyPart attachmentPart = new MimeBodyPart();
DataSource fileDataSource = new FileDataSource(file);
attachmentPart.setDataHandler(new DataHandler(fileDataSource));
attachmentPart.setFileName(attachmentName);
_attachments.add(attachmentPart);
return this;
}
public EmailMessage addAttachment(String attachmentName, InputStream stream)
throws MessagingException {
BodyPart attachmentPart = new MimeBodyPart(stream);
attachmentPart.setFileName(attachmentName);
_attachments.add(attachmentPart);
return this;
}
private void checkSettings() {
if (_mailHost == null) {
throw new RuntimeException("Mail host not set.");
}
if (_fromAddress == null || _fromAddress.length() == 0) {
throw new RuntimeException("From address not set.");
}
if (_subject == null) {
throw new RuntimeException("Subject cannot be null");
}
if (_toAddress.size() == 0) {
throw new RuntimeException("T");
}
}
public void sendEmail() throws MessagingException {
checkSettings();
Properties props = new Properties();
if (_usesAuth) {
props.put("mail." + protocol + ".auth", "true");
props.put("mail.user", _mailUser);
props.put("mail.password", _mailPassword);
} else {
props.put("mail." + protocol + ".auth", "false");
}
props.put("mail." + protocol + ".host", _mailHost);
props.put("mail." + protocol + ".timeout", _mailTimeout);
props.put("mail." + protocol + ".connectiontimeout", _connectionTimeout);
props.put("mail.smtp.starttls.enable", _tls);
props.put("mail.smtp.ssl.trust", _mailHost);
Session session = Session.getInstance(props, null);
Message message = new MimeMessage(session);
InternetAddress from = new InternetAddress(_fromAddress, false);
message.setFrom(from);
for (String toAddr : _toAddress)
message.addRecipient(Message.RecipientType.TO, new InternetAddress(
toAddr, false));
message.setSubject(_subject);
message.setSentDate(new Date());
if (_attachments.size() > 0) {
MimeMultipart multipart =
this._enableAttachementEmbedment ? new MimeMultipart("related")
: new MimeMultipart();
BodyPart messageBodyPart = new MimeBodyPart();
messageBodyPart.setContent(_body.toString(), _mimeType);
multipart.addBodyPart(messageBodyPart);
// Add attachments
for (BodyPart part : _attachments) {
multipart.addBodyPart(part);
}
message.setContent(multipart);
} else {
message.setContent(_body.toString(), _mimeType);
}
// Transport transport = session.getTransport();
SMTPTransport t = (SMTPTransport) session.getTransport(protocol);
try {
connectToSMTPServer(t);
} catch (MessagingException ste) {
if (ste.getCause() instanceof SocketTimeoutException) {
try {
// retry on SocketTimeoutException
connectToSMTPServer(t);
logger.info("Email retry on SocketTimeoutException succeeded");
} catch (MessagingException me) {
logger.error("Email retry on SocketTimeoutException failed", me);
throw me;
}
} else {
logger.error("Encountered issue while connecting to email server", ste);
throw ste;
}
}
try {
t.sendMessage(message, message.getRecipients(Message.RecipientType.TO));
t.close();
} catch (SendFailedException e) {
logger.error("Error sending message " + message);
Arrays.asList(message.getRecipients(Message.RecipientType.TO)).forEach(address -> logger.error("Email: " + address));
logger.error(e);
//pass
}
}
private void connectToSMTPServer(SMTPTransport t) throws MessagingException {
if (_usesAuth) {
t.connect(_mailHost, _mailUser, _mailPassword);
} else {
t.connect();
}
}
public void setBody(String body) {
setBody(body, _mimeType);
}
public void setBody(String body, String mimeType) {
_body = new StringBuffer(body);
_mimeType = mimeType;
}
public EmailMessage setMimeType(String mimeType) {
_mimeType = mimeType;
return this;
}
public EmailMessage println(Object str) {
_body.append(str);
return this;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.config.json;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.logging.log4j.core.config.AbstractConfiguration;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.ConfigurationSource;
import org.apache.logging.log4j.core.config.FileConfigurationMonitor;
import org.apache.logging.log4j.core.config.Node;
import org.apache.logging.log4j.core.config.Reconfigurable;
import org.apache.logging.log4j.core.config.plugins.util.PluginType;
import org.apache.logging.log4j.core.config.plugins.util.ResolverUtil;
import org.apache.logging.log4j.core.config.status.StatusConfiguration;
import org.apache.logging.log4j.core.util.Patterns;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Creates a Node hierarchy from a JSON file.
*/
public class JsonConfiguration extends AbstractConfiguration implements Reconfigurable {
private static final long serialVersionUID = 1L;
private static final String[] VERBOSE_CLASSES = new String[] { ResolverUtil.class.getName() };
private final List<Status> status = new ArrayList<Status>();
private JsonNode root;
public JsonConfiguration(final ConfigurationSource configSource) {
super(configSource);
final File configFile = configSource.getFile();
byte[] buffer;
try {
final InputStream configStream = configSource.getInputStream();
try {
buffer = toByteArray(configStream);
} finally {
configStream.close();
}
final InputStream is = new ByteArrayInputStream(buffer);
root = getObjectMapper().readTree(is);
if (root.size() == 1) {
for (final JsonNode node : root) {
root = node;
}
}
processAttributes(rootNode, root);
final StatusConfiguration statusConfig = new StatusConfiguration().withVerboseClasses(VERBOSE_CLASSES)
.withStatus(getDefaultStatus());
for (final Map.Entry<String, String> entry : rootNode.getAttributes().entrySet()) {
final String key = entry.getKey();
final String value = getStrSubstitutor().replace(entry.getValue());
// TODO: this duplicates a lot of the XmlConfiguration constructor
if ("status".equalsIgnoreCase(key)) {
statusConfig.withStatus(value);
} else if ("dest".equalsIgnoreCase(key)) {
statusConfig.withDestination(value);
} else if ("shutdownHook".equalsIgnoreCase(key)) {
isShutdownHookEnabled = !"disable".equalsIgnoreCase(value);
} else if ("verbose".equalsIgnoreCase(entry.getKey())) {
statusConfig.withVerbosity(value);
} else if ("packages".equalsIgnoreCase(key)) {
pluginPackages.addAll(Arrays.asList(value.split(Patterns.COMMA_SEPARATOR)));
} else if ("name".equalsIgnoreCase(key)) {
setName(value);
} else if ("monitorInterval".equalsIgnoreCase(key)) {
final int intervalSeconds = Integer.parseInt(value);
if (intervalSeconds > 0 && configFile != null) {
monitor = new FileConfigurationMonitor(this, configFile, listeners, intervalSeconds);
}
} else if ("advertiser".equalsIgnoreCase(key)) {
createAdvertiser(value, configSource, buffer, "application/json");
}
}
statusConfig.initialize();
if (getName() == null) {
setName(configSource.getLocation());
}
} catch (final Exception ex) {
LOGGER.error("Error parsing {}", configSource.getLocation(), ex);
}
}
protected ObjectMapper getObjectMapper() {
return new ObjectMapper().configure(JsonParser.Feature.ALLOW_COMMENTS, true);
}
@Override
public void setup() {
final Iterator<Map.Entry<String, JsonNode>> iter = root.fields();
final List<Node> children = rootNode.getChildren();
while (iter.hasNext()) {
final Map.Entry<String, JsonNode> entry = iter.next();
final JsonNode n = entry.getValue();
if (n.isObject()) {
LOGGER.debug("Processing node for object {}", entry.getKey());
children.add(constructNode(entry.getKey(), rootNode, n));
} else if (n.isArray()) {
LOGGER.error("Arrays are not supported at the root configuration.");
}
}
LOGGER.debug("Completed parsing configuration");
if (status.size() > 0) {
for (final Status s : status) {
LOGGER.error("Error processing element " + s.name + ": " + s.errorType);
}
}
}
@Override
public Configuration reconfigure() {
try {
final ConfigurationSource source = getConfigurationSource().resetInputStream();
if (source == null) {
return null;
}
return new JsonConfiguration(source);
} catch (final IOException ex) {
LOGGER.error("Cannot locate file {}", getConfigurationSource(), ex);
}
return null;
}
private Node constructNode(final String name, final Node parent, final JsonNode jsonNode) {
final PluginType<?> type = pluginManager.getPluginType(name);
final Node node = new Node(parent, name, type);
processAttributes(node, jsonNode);
final Iterator<Map.Entry<String, JsonNode>> iter = jsonNode.fields();
final List<Node> children = node.getChildren();
while (iter.hasNext()) {
final Map.Entry<String, JsonNode> entry = iter.next();
final JsonNode n = entry.getValue();
if (n.isArray() || n.isObject()) {
if (type == null) {
status.add(new Status(name, n, ErrorType.CLASS_NOT_FOUND));
}
if (n.isArray()) {
LOGGER.debug("Processing node for array {}", entry.getKey());
for (int i = 0; i < n.size(); ++i) {
final String pluginType = getType(n.get(i), entry.getKey());
final PluginType<?> entryType = pluginManager.getPluginType(pluginType);
final Node item = new Node(node, entry.getKey(), entryType);
processAttributes(item, n.get(i));
if (pluginType.equals(entry.getKey())) {
LOGGER.debug("Processing {}[{}]", entry.getKey(), i);
} else {
LOGGER.debug("Processing {} {}[{}]", pluginType, entry.getKey(), i);
}
final Iterator<Map.Entry<String, JsonNode>> itemIter = n.get(i).fields();
final List<Node> itemChildren = item.getChildren();
while (itemIter.hasNext()) {
final Map.Entry<String, JsonNode> itemEntry = itemIter.next();
if (itemEntry.getValue().isObject()) {
LOGGER.debug("Processing node for object {}", itemEntry.getKey());
itemChildren.add(constructNode(itemEntry.getKey(), item, itemEntry.getValue()));
} else if (itemEntry.getValue().isArray()) {
final JsonNode array = itemEntry.getValue();
final String entryName = itemEntry.getKey();
LOGGER.debug("Processing array for object {}", entryName);
for (int j = 0; j < array.size(); ++j) {
itemChildren.add(constructNode(entryName, item, array.get(j)));
}
}
}
children.add(item);
}
} else {
LOGGER.debug("Processing node for object {}", entry.getKey());
children.add(constructNode(entry.getKey(), node, n));
}
} else {
LOGGER.debug("Node {} is of type {}", entry.getKey(), n.getNodeType());
}
}
String t;
if (type == null) {
t = "null";
} else {
t = type.getElementName() + ':' + type.getPluginClass();
}
final String p = node.getParent() == null ? "null" : node.getParent().getName() == null ? "root" : node
.getParent().getName();
LOGGER.debug("Returning {} with parent {} of type {}", node.getName(), p, t);
return node;
}
private String getType(final JsonNode node, final String name) {
final Iterator<Map.Entry<String, JsonNode>> iter = node.fields();
while (iter.hasNext()) {
final Map.Entry<String, JsonNode> entry = iter.next();
if (entry.getKey().equalsIgnoreCase("type")) {
final JsonNode n = entry.getValue();
if (n.isValueNode()) {
return n.asText();
}
}
}
return name;
}
private void processAttributes(final Node parent, final JsonNode node) {
final Map<String, String> attrs = parent.getAttributes();
final Iterator<Map.Entry<String, JsonNode>> iter = node.fields();
while (iter.hasNext()) {
final Map.Entry<String, JsonNode> entry = iter.next();
if (!entry.getKey().equalsIgnoreCase("type")) {
final JsonNode n = entry.getValue();
if (n.isValueNode()) {
attrs.put(entry.getKey(), n.asText());
}
}
}
}
@Override
public String toString() {
return getClass().getSimpleName() + "[location=" + getConfigurationSource() + "]";
}
/**
* The error that occurred.
*/
private enum ErrorType {
CLASS_NOT_FOUND
}
/**
* Status for recording errors.
*/
private static class Status {
private final JsonNode node;
private final String name;
private final ErrorType errorType;
public Status(final String name, final JsonNode node, final ErrorType errorType) {
this.name = name;
this.node = node;
this.errorType = errorType;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.meta;
import com.google.common.base.Optional;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.LedgerMetadata;
import org.apache.bookkeeper.conf.AbstractConfiguration;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.LedgerMetadataListener;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.MultiCallback;
import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.Processor;
import org.apache.bookkeeper.util.BookKeeperConstants;
import org.apache.bookkeeper.util.ZkUtils;
import org.apache.bookkeeper.versioning.Version;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.AsyncCallback.DataCallback;
import org.apache.zookeeper.AsyncCallback.StatCallback;
import org.apache.zookeeper.AsyncCallback.StringCallback;
import org.apache.zookeeper.AsyncCallback.VoidCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooDefs.Ids;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* Abstract ledger manager based on zookeeper, which provides common methods such as query zk nodes.
*/
abstract class AbstractZkLedgerManager implements LedgerManager, Watcher {
private final static Logger LOG = LoggerFactory.getLogger(AbstractZkLedgerManager.class);
static int ZK_CONNECT_BACKOFF_MS = 200;
protected final AbstractConfiguration conf;
protected final ZooKeeper zk;
protected final String ledgerRootPath;
// ledger metadata listeners
protected final ConcurrentMap<Long, Set<LedgerMetadataListener>> listeners =
new ConcurrentHashMap<Long, Set<LedgerMetadataListener>>();
// we use this to prevent long stack chains from building up in callbacks
protected ScheduledExecutorService scheduler;
protected class ReadLedgerMetadataTask implements Runnable, GenericCallback<LedgerMetadata> {
final long ledgerId;
ReadLedgerMetadataTask(long ledgerId) {
this.ledgerId = ledgerId;
}
@Override
public void run() {
if (null != listeners.get(ledgerId)) {
LOG.debug("Re-read ledger metadata for {}.", ledgerId);
readLedgerMetadata(ledgerId, this, AbstractZkLedgerManager.this);
} else {
LOG.debug("Ledger metadata listener for ledger {} is already removed.", ledgerId);
}
}
@Override
public void operationComplete(int rc, final LedgerMetadata result) {
if (BKException.Code.OK == rc) {
final Set<LedgerMetadataListener> listenerSet = listeners.get(ledgerId);
if (null != listenerSet) {
LOG.debug("Ledger metadata is changed for {} : {}.", ledgerId, result);
scheduler.submit(new Runnable() {
@Override
public void run() {
synchronized(listenerSet) {
for (LedgerMetadataListener listener : listenerSet) {
listener.onChanged(ledgerId, result);
}
}
}
});
}
} else if (BKException.Code.NoSuchLedgerExistsException == rc) {
// the ledger is removed, do nothing
Set<LedgerMetadataListener> listenerSet = listeners.remove(ledgerId);
if (null != listenerSet) {
LOG.debug("Removed ledger metadata listener set on ledger {} as its ledger is deleted : {}",
ledgerId, listenerSet.size());
}
} else {
LOG.warn("Failed on read ledger metadata of ledger {} : {}", ledgerId, rc);
scheduler.schedule(this, ZK_CONNECT_BACKOFF_MS, TimeUnit.MILLISECONDS);
}
}
}
/**
* ZooKeeper-based Ledger Manager Constructor
*
* @param conf
* Configuration object
* @param zk
* ZooKeeper Client Handle
*/
protected AbstractZkLedgerManager(AbstractConfiguration conf, ZooKeeper zk) {
this.conf = conf;
this.zk = zk;
this.ledgerRootPath = conf.getZkLedgersRootPath();
ThreadFactoryBuilder tfb = new ThreadFactoryBuilder().setNameFormat(
"ZkLedgerManagerScheduler-%d");
this.scheduler = Executors
.newSingleThreadScheduledExecutor(tfb.build());
LOG.debug("Using AbstractZkLedgerManager with root path : {}", ledgerRootPath);
}
/**
* Get the znode path that is used to store ledger metadata
*
* @param ledgerId
* Ledger ID
* @return ledger node path
*/
protected abstract String getLedgerPath(long ledgerId);
/**
* Get ledger id from its znode ledger path
*
* @param ledgerPath
* Ledger path to store metadata
* @return ledger id
* @throws IOException when the ledger path is invalid
*/
protected abstract long getLedgerId(String ledgerPath) throws IOException;
@Override
public void process(WatchedEvent event) {
LOG.info("Received watched event {} from zookeeper based ledger manager.", event);
if (Event.EventType.None == event.getType()) {
/** TODO: BOOKKEEPER-537 to handle expire events.
if (Event.KeeperState.Expired == event.getState()) {
LOG.info("ZooKeeper client expired on ledger manager.");
Set<Long> keySet = new HashSet<Long>(listeners.keySet());
for (Long lid : keySet) {
scheduler.submit(new ReadLedgerMetadataTask(lid));
LOG.info("Re-read ledger metadata for {} after zookeeper session expired.", lid);
}
}
**/
return;
}
String path = event.getPath();
if (null == path) {
return;
}
final long ledgerId;
try {
ledgerId = getLedgerId(event.getPath());
} catch (IOException ioe) {
LOG.info("Received invalid ledger path {} : ", event.getPath(), ioe);
return;
}
switch (event.getType()) {
case NodeDeleted:
Set<LedgerMetadataListener> listenerSet = listeners.get(ledgerId);
if (null != listenerSet) {
synchronized(listenerSet){
if (LOG.isDebugEnabled()) {
LOG.debug("Removed ledger metadata listeners on ledger {} : {}",
ledgerId, listenerSet);
}
for (LedgerMetadataListener l : listenerSet) {
l.onChanged( ledgerId, null );
}
listeners.remove(ledgerId, listenerSet);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("No ledger metadata listeners to remove from ledger {} after it's deleted.", ledgerId);
}
}
break;
case NodeDataChanged:
new ReadLedgerMetadataTask(ledgerId).run();
break;
default:
LOG.debug("Received event {} on {}.", event.getType(), event.getPath());
break;
}
}
@Override
public void createLedgerMetadata(final long ledgerId, final LedgerMetadata metadata,
final GenericCallback<Void> ledgerCb) {
String ledgerPath = getLedgerPath(ledgerId);
StringCallback scb = new StringCallback() {
@Override
public void processResult(int rc, String path, Object ctx, String name) {
if (rc == Code.OK.intValue()) {
// update version
metadata.setVersion(new ZkVersion(0));
ledgerCb.operationComplete(BKException.Code.OK, null);
} else if (rc == Code.NODEEXISTS.intValue()) {
LOG.warn("Failed to create ledger metadata for {} which already exist", ledgerId);
ledgerCb.operationComplete(BKException.Code.LedgerExistException, null);
} else {
LOG.error("Could not create node for ledger {}", ledgerId,
KeeperException.create(Code.get(rc), path));
ledgerCb.operationComplete(BKException.Code.ZKException, null);
}
}
};
ZkUtils.asyncCreateFullPathOptimistic(zk, ledgerPath, metadata.serialize(), Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT, scb, null);
}
/**
* Removes ledger metadata from ZooKeeper if version matches.
*
* @param ledgerId ledger identifier
* @param version local version of metadata znode
* @param cb callback object
*/
@Override
public void removeLedgerMetadata(final long ledgerId, final Version version,
final GenericCallback<Void> cb) {
int znodeVersion = -1;
if (Version.NEW == version) {
LOG.error("Request to delete ledger {} metadata with version set to the initial one", ledgerId);
cb.operationComplete(BKException.Code.MetadataVersionException, (Void)null);
return;
} else if (Version.ANY != version) {
if (!(version instanceof ZkVersion)) {
LOG.info("Not an instance of ZKVersion: {}", ledgerId);
cb.operationComplete(BKException.Code.MetadataVersionException, (Void)null);
return;
} else {
znodeVersion = ((ZkVersion)version).getZnodeVersion();
}
}
zk.delete(getLedgerPath(ledgerId), znodeVersion, new VoidCallback() {
@Override
public void processResult(int rc, String path, Object ctx) {
int bkRc;
if (rc == KeeperException.Code.NONODE.intValue()) {
LOG.warn("Ledger node does not exist in ZooKeeper: ledgerId={}", ledgerId);
bkRc = BKException.Code.NoSuchLedgerExistsException;
} else if (rc == KeeperException.Code.OK.intValue()) {
// removed listener on ledgerId
Set<LedgerMetadataListener> listenerSet = listeners.remove(ledgerId);
if (null != listenerSet) {
LOG.debug("Remove registered ledger metadata listeners on ledger {} after ledger is deleted.",
ledgerId, listenerSet);
} else {
LOG.debug("No ledger metadata listeners to remove from ledger {} when it's being deleted.",
ledgerId);
}
bkRc = BKException.Code.OK;
} else {
bkRc = BKException.Code.ZKException;
}
cb.operationComplete(bkRc, (Void)null);
}
}, null);
}
@Override
public void registerLedgerMetadataListener(long ledgerId, LedgerMetadataListener listener) {
if (null != listener) {
LOG.info("Registered ledger metadata listener {} on ledger {}.", listener, ledgerId);
Set<LedgerMetadataListener> listenerSet = listeners.get(ledgerId);
if (listenerSet == null) {
Set<LedgerMetadataListener> newListenerSet = new HashSet<LedgerMetadataListener>();
Set<LedgerMetadataListener> oldListenerSet = listeners.putIfAbsent(ledgerId, newListenerSet);
if (null != oldListenerSet) {
listenerSet = oldListenerSet;
} else {
listenerSet = newListenerSet;
}
}
synchronized (listenerSet) {
listenerSet.add(listener);
}
new ReadLedgerMetadataTask(ledgerId).run();
}
}
@Override
public void unregisterLedgerMetadataListener(long ledgerId, LedgerMetadataListener listener) {
Set<LedgerMetadataListener> listenerSet = listeners.get(ledgerId);
if (listenerSet != null) {
synchronized (listenerSet) {
if (listenerSet.remove(listener)) {
LOG.info("Unregistered ledger metadata listener {} on ledger {}.", listener, ledgerId);
}
if (listenerSet.isEmpty()) {
listeners.remove(ledgerId, listenerSet);
}
}
}
}
@Override
public void readLedgerMetadata(final long ledgerId, final GenericCallback<LedgerMetadata> readCb) {
readLedgerMetadata(ledgerId, readCb, null);
}
protected void readLedgerMetadata(final long ledgerId, final GenericCallback<LedgerMetadata> readCb,
Watcher watcher) {
zk.getData(getLedgerPath(ledgerId), watcher, new DataCallback() {
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
if (rc == KeeperException.Code.NONODE.intValue()) {
if (LOG.isDebugEnabled()) {
LOG.debug("No such ledger: " + ledgerId,
KeeperException.create(KeeperException.Code.get(rc), path));
}
readCb.operationComplete(BKException.Code.NoSuchLedgerExistsException, null);
return;
}
if (rc != KeeperException.Code.OK.intValue()) {
LOG.error("Could not read metadata for ledger: " + ledgerId,
KeeperException.create(KeeperException.Code.get(rc), path));
readCb.operationComplete(BKException.Code.ZKException, null);
return;
}
if (stat == null) {
LOG.error("Could not parse ledger metadata for ledger: " + ledgerId+". Stat object is null");
readCb.operationComplete(BKException.Code.ZKException, null);
return;
}
LedgerMetadata metadata;
try {
metadata = LedgerMetadata.parseConfig(data, new ZkVersion(stat.getVersion()), Optional.of(stat.getCtime()));
} catch (IOException e) {
LOG.error("Could not parse ledger metadata for ledger: " + ledgerId, e);
readCb.operationComplete(BKException.Code.ZKException, null);
return;
}
readCb.operationComplete(BKException.Code.OK, metadata);
}
}, null);
}
@Override
public void writeLedgerMetadata(final long ledgerId, final LedgerMetadata metadata,
final GenericCallback<Void> cb) {
Version v = metadata.getVersion();
if (Version.NEW == v || !(v instanceof ZkVersion)) {
cb.operationComplete(BKException.Code.MetadataVersionException, null);
return;
}
final ZkVersion zv = (ZkVersion) v;
zk.setData(getLedgerPath(ledgerId),
metadata.serialize(), zv.getZnodeVersion(),
new StatCallback() {
@Override
public void processResult(int rc, String path, Object ctx, Stat stat) {
if (KeeperException.Code.BADVERSION.intValue() == rc) {
cb.operationComplete(BKException.Code.MetadataVersionException, null);
} else if (KeeperException.Code.OK.intValue() == rc) {
// update metadata version
metadata.setVersion(zv.setZnodeVersion(stat.getVersion()));
cb.operationComplete(BKException.Code.OK, null);
} else {
LOG.warn("Conditional update ledger metadata failed: ", KeeperException.Code.get(rc));
cb.operationComplete(BKException.Code.ZKException, null);
}
}
}, null);
}
/**
* Process ledgers in a single zk node.
*
* <p>
* for each ledger found in this zk node, processor#process(ledgerId) will be triggerred
* to process a specific ledger. after all ledgers has been processed, the finalCb will
* be called with provided context object. The RC passed to finalCb is decided by :
* <ul>
* <li> All ledgers are processed successfully, successRc will be passed.
* <li> Either ledger is processed failed, failureRc will be passed.
* </ul>
* </p>
*
* @param path
* Zk node path to store ledgers
* @param processor
* Processor provided to process ledger
* @param finalCb
* Callback object when all ledgers are processed
* @param ctx
* Context object passed to finalCb
* @param successRc
* RC passed to finalCb when all ledgers are processed successfully
* @param failureRc
* RC passed to finalCb when either ledger is processed failed
*/
protected void asyncProcessLedgersInSingleNode(
final String path, final Processor<Long> processor,
final AsyncCallback.VoidCallback finalCb, final Object ctx,
final int successRc, final int failureRc) {
ZkUtils.getChildrenInSingleNode(zk, path, new GenericCallback<List<String>>() {
@Override
public void operationComplete(int rc, List<String> ledgerNodes) {
if (Code.OK.intValue() != rc) {
finalCb.processResult(failureRc, null, ctx);
return;
}
Set<Long> zkActiveLedgers = ledgerListToSet(ledgerNodes, path);
LOG.debug("Processing ledgers: {}", zkActiveLedgers);
// no ledgers found, return directly
if (zkActiveLedgers.size() == 0) {
finalCb.processResult(successRc, null, ctx);
return;
}
MultiCallback mcb = new MultiCallback(zkActiveLedgers.size(), finalCb, ctx,
successRc, failureRc);
// start loop over all ledgers
for (Long ledger : zkActiveLedgers) {
processor.process(ledger, mcb);
}
}
});
}
/**
* Whether the znode a special znode
*
* @param znode
* Znode Name
* @return true if the znode is a special znode otherwise false
*/
protected boolean isSpecialZnode(String znode) {
if (BookKeeperConstants.AVAILABLE_NODE.equals(znode)
|| BookKeeperConstants.COOKIE_NODE.equals(znode)
|| BookKeeperConstants.LAYOUT_ZNODE.equals(znode)
|| BookKeeperConstants.INSTANCEID.equals(znode)
|| BookKeeperConstants.UNDER_REPLICATION_NODE.equals(znode)) {
return true;
}
return false;
}
/**
* Convert the ZK retrieved ledger nodes to a HashSet for easier comparisons.
*
* @param ledgerNodes
* zk ledger nodes
* @param path
* the prefix path of the ledger nodes
* @return ledger id hash set
*/
protected NavigableSet<Long> ledgerListToSet(List<String> ledgerNodes, String path) {
NavigableSet<Long> zkActiveLedgers = new TreeSet<Long>();
for (String ledgerNode : ledgerNodes) {
if (isSpecialZnode(ledgerNode)) {
continue;
}
try {
// convert the node path to ledger id according to different ledger manager implementation
zkActiveLedgers.add(getLedgerId(path + "/" + ledgerNode));
} catch (IOException e) {
LOG.warn("Error extracting ledgerId from ZK ledger node: " + ledgerNode);
// This is a pretty bad error as it indicates a ledger node in ZK
// has an incorrect format. For now just continue and consider
// this as a non-existent ledger.
continue;
}
}
return zkActiveLedgers;
}
@Override
public void close() {
try {
scheduler.shutdown();
} catch (Exception e) {
LOG.warn("Error when closing zookeeper based ledger manager: ", e);
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.maven;
import hudson.model.Result;
import hudson.tasks.Maven.MavenInstallation;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.jvnet.hudson.test.Bug;
import org.jvnet.hudson.test.ExtractResourceSCM;
import org.jvnet.hudson.test.HudsonTestCase;
import org.jvnet.hudson.test.SingleFileSCM;
import org.jvnet.hudson.test.Email;
import java.io.File;
import java.io.FilenameFilter;
import java.util.Arrays;
/**
* @author Kohsuke Kawaguchi
*/
public class RedeployPublisherTest extends HudsonTestCase {
@Bug(2593)
public void testBug2593() throws Exception {
configureDefaultMaven();
MavenModuleSet m2 = createMavenProject();
File repo = createTmpDir();
// a fake build
m2.setScm(new SingleFileSCM("pom.xml",getClass().getResource("big-artifact.pom")));
m2.getPublishersList().add(new RedeployPublisher("",repo.toURI().toString(),true, false));
MavenModuleSetBuild b = m2.scheduleBuild2(0).get();
assertBuildStatus(Result.SUCCESS, b);
// TODO: confirm that the artifacts use a consistent timestamp
// TODO: we need to somehow introduce a large delay between deploy since timestamp is only second precision
// TODO: or maybe we could use a btrace like capability to count the # of invocations?
System.out.println(repo);
}
public void testConfigRoundtrip() throws Exception {
MavenModuleSet p = createMavenProject();
RedeployPublisher rp = new RedeployPublisher("theId", "http://some.url/", true, true);
p.getPublishersList().add(rp);
submit(new WebClient().getPage(p,"configure").getFormByName("config"));
assertEqualBeans(rp,p.getPublishersList().get(RedeployPublisher.class),"id,url,uniqueVersion,evenIfUnstable");
}
// /**
// * Makes sure that the webdav wagon component we bundle is compatible.
// */
// public void testWebDavDeployment() throws Exception {
// configureDefaultMaven();
// MavenModuleSet m2 = createMavenProject();
//
// // a fake build
// m2.setScm(new SingleFileSCM("pom.xml",getClass().getResource("big-artifact.pom")));
// m2.getPublishersList().add(new RedeployPublisher("","dav:http://localhost/dav/",true));
//
// MavenModuleSetBuild b = m2.scheduleBuild2(0).get();
// assertBuildStatus(Result.SUCCESS, b);
// }
/**
* Are we having a problem in handling file names with multiple extensions, like ".tar.gz"?
*/
@Email("http://www.nabble.com/tar.gz-becomes-.gz-after-Hudson-deployment-td25391364.html")
@Bug(3814)
public void testTarGz() throws Exception {
configureDefaultMaven();
MavenModuleSet m2 = createMavenProject();
File repo = createTmpDir();
// a fake build
m2.setScm(new SingleFileSCM("pom.xml",getClass().getResource("targz-artifact.pom")));
m2.getPublishersList().add(new RedeployPublisher("",repo.toURI().toString(),false, false));
MavenModuleSetBuild b = m2.scheduleBuild2(0).get();
assertBuildStatus(Result.SUCCESS, b);
assertTrue("tar.gz doesn't exist",new File(repo,"test/test/0.1-SNAPSHOT/test-0.1-SNAPSHOT-bin.tar.gz").exists());
}
public void testTarGzUniqueVersionTrue() throws Exception {
configureDefaultMaven();
MavenModuleSet m2 = createMavenProject();
File repo = createTmpDir();
FileUtils.cleanDirectory( repo );
// a fake build
m2.setScm(new SingleFileSCM("pom.xml",getClass().getResource("targz-artifact.pom")));
m2.getPublishersList().add(new RedeployPublisher("",repo.toURI().toString(),true, false));
MavenModuleSetBuild b = m2.scheduleBuild2(0).get();
assertBuildStatus(Result.SUCCESS, b);
File artifactDir = new File(repo,"test/test/0.1-SNAPSHOT/");
String[] files = artifactDir.list( new FilenameFilter()
{
public boolean accept( File dir, String name )
{
System.out.print( "deployed file " + name );
return name.contains( "-bin.tar.gz" ) || name.endsWith( ".jar" ) || name.endsWith( "-bin.zip" );
}
});
System.out.println("deployed files " + Arrays.asList( files ));
assertFalse("tar.gz doesn't exist",new File(repo,"test/test/0.1-SNAPSHOT/test-0.1-SNAPSHOT-bin.tar.gz").exists());
assertTrue("tar.gz doesn't exist",!files[0].contains( "SNAPSHOT" ));
for (String file : files) {
if (file.endsWith( "-bin.tar.gz" )) {
String ver = StringUtils.remove( file, "-bin.tar.gz" );
ver = ver.substring( ver.length() - 1, ver.length() );
assertEquals("-bin.tar.gz not ended with 1 , file " + file , "1", ver);
}
if (file.endsWith( ".jar" )) {
String ver = StringUtils.remove( file, ".jar" );
ver = ver.substring( ver.length() - 1, ver.length() );
assertEquals(".jar not ended with 1 , file " + file , "1", ver);
}
if (file.endsWith( "-bin.zip" )) {
String ver = StringUtils.remove( file, "-bin.zip" );
ver = ver.substring( ver.length() - 1, ver.length() );
assertEquals("-bin.zip not ended with 1 , file " + file , "1", ver);
}
}
}
public void testTarGzMaven3() throws Exception {
MavenModuleSet m3 = createMavenProject();
MavenInstallation mvn = configureMaven3();
m3.setMaven( mvn.getName() );
File repo = createTmpDir();
FileUtils.cleanDirectory( repo );
// a fake build
m3.setScm(new SingleFileSCM("pom.xml",getClass().getResource("targz-artifact.pom")));
m3.getPublishersList().add(new RedeployPublisher("",repo.toURI().toString(),false, false));
MavenModuleSetBuild b = m3.scheduleBuild2(0).get();
assertBuildStatus(Result.SUCCESS, b);
assertTrue( MavenUtil.maven3orLater( b.getMavenVersionUsed() ) );
File artifactDir = new File(repo,"test/test/0.1-SNAPSHOT/");
String[] files = artifactDir.list( new FilenameFilter()
{
public boolean accept( File dir, String name )
{
return name.endsWith( "tar.gz" );
}
});
assertFalse("tar.gz doesn't exist",new File(repo,"test/test/0.1-SNAPSHOT/test-0.1-SNAPSHOT-bin.tar.gz").exists());
assertTrue("tar.gz doesn't exist",!files[0].contains( "SNAPSHOT" ));
}
public void testTarGzUniqueVersionTrueMaven3() throws Exception {
MavenModuleSet m3 = createMavenProject();
MavenInstallation mvn = configureMaven3();
m3.setMaven( mvn.getName() );
File repo = createTmpDir();
FileUtils.cleanDirectory( repo );
// a fake build
m3.setScm(new SingleFileSCM("pom.xml",getClass().getResource("targz-artifact.pom")));
m3.getPublishersList().add(new RedeployPublisher("",repo.toURI().toString(),true, false));
MavenModuleSetBuild b = m3.scheduleBuild2(0).get();
assertBuildStatus(Result.SUCCESS, b);
assertTrue( MavenUtil.maven3orLater( b.getMavenVersionUsed() ) );
File artifactDir = new File(repo,"test/test/0.1-SNAPSHOT/");
String[] files = artifactDir.list( new FilenameFilter()
{
public boolean accept( File dir, String name )
{
return name.contains( "-bin.tar.gz" ) || name.endsWith( ".jar" ) || name.endsWith( "-bin.zip" );
}
});
System.out.println("deployed files " + Arrays.asList( files ));
assertFalse("tar.gz doesn't exist",new File(repo,"test/test/0.1-SNAPSHOT/test-0.1-SNAPSHOT-bin.tar.gz").exists());
assertTrue("tar.gz doesn't exist",!files[0].contains( "SNAPSHOT" ));
for (String file : files) {
if (file.endsWith( "-bin.tar.gz" )) {
String ver = StringUtils.remove( file, "-bin.tar.gz" );
ver = ver.substring( ver.length() - 1, ver.length() );
assertEquals("-bin.tar.gz not ended with 1 , file " + file , "1", ver);
}
if (file.endsWith( ".jar" )) {
String ver = StringUtils.remove( file, ".jar" );
ver = ver.substring( ver.length() - 1, ver.length() );
assertEquals(".jar not ended with 1 , file " + file , "1", ver);
}
if (file.endsWith( "-bin.zip" )) {
String ver = StringUtils.remove( file, "-bin.zip" );
ver = ver.substring( ver.length() - 1, ver.length() );
assertEquals("-bin.zip not ended with 1 , file " + file , "1", ver);
}
}
}
@Bug(3773)
public void testDeployUnstable() throws Exception {
configureDefaultMaven();
MavenModuleSet m2 = createMavenProject();
File repo = createTmpDir();
FileUtils.cleanDirectory( repo );
// a build with a failing unit tests
m2.setScm(new ExtractResourceSCM(getClass().getResource("maven-test-failure-findbugs.zip")));
m2.getPublishersList().add(new RedeployPublisher("",repo.toURI().toString(),false, true));
MavenModuleSetBuild b = m2.scheduleBuild2(0).get();
assertBuildStatus(Result.UNSTABLE, b);
assertTrue("Artifact should have been published even when the build is unstable",
new File(repo,"test/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar").exists());
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bditac;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.xml.bind.annotation.XmlRootElement;
/**
*
* @author Henrique
*/
@Entity
@Table(name = "ocorrencia")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "Ocorrencia.findAll", query = "SELECT o FROM Ocorrencia o"),
@NamedQuery(name = "Ocorrencia.findByOcrId", query = "SELECT o FROM Ocorrencia o WHERE o.ocrId = :ocrId"),
@NamedQuery(name = "Ocorrencia.findByCriId", query = "SELECT o FROM Ocorrencia o WHERE o.criId = :criId"),
@NamedQuery(name = "Ocorrencia.findByCapId", query = "SELECT o FROM Ocorrencia o WHERE o.capId = :capId"),
@NamedQuery(name = "Ocorrencia.findByApiId", query = "SELECT o FROM Ocorrencia o WHERE o.apiId = :apiId"),
@NamedQuery(name = "Ocorrencia.findByOcrIdApi", query = "SELECT o FROM Ocorrencia o WHERE o.ocrIdApi = :ocrIdApi"),
@NamedQuery(name = "Ocorrencia.findByOcrCriacao", query = "SELECT o FROM Ocorrencia o WHERE o.ocrCriacao = :ocrCriacao"),
@NamedQuery(name = "Ocorrencia.findByOcrUsuId", query = "SELECT o FROM Ocorrencia o WHERE o.ocrUsuId = :ocrUsuId"),
@NamedQuery(name = "Ocorrencia.findByOcrUsuNome", query = "SELECT o FROM Ocorrencia o WHERE o.ocrUsuNome = :ocrUsuNome"),
@NamedQuery(name = "Ocorrencia.findByOcrUsuScreenNome", query = "SELECT o FROM Ocorrencia o WHERE o.ocrUsuScreenNome = :ocrUsuScreenNome"),
@NamedQuery(name = "Ocorrencia.findByOcrFonte", query = "SELECT o FROM Ocorrencia o WHERE o.ocrFonte = :ocrFonte"),
@NamedQuery(name = "Ocorrencia.findByOcrLingua", query = "SELECT o FROM Ocorrencia o WHERE o.ocrLingua = :ocrLingua"),
@NamedQuery(name = "Ocorrencia.findByOcrPaisCodigo", query = "SELECT o FROM Ocorrencia o WHERE o.ocrPaisCodigo = :ocrPaisCodigo"),
@NamedQuery(name = "Ocorrencia.findByOcrPais", query = "SELECT o FROM Ocorrencia o WHERE o.ocrPais = :ocrPais"),
@NamedQuery(name = "Ocorrencia.findByOcrLocal", query = "SELECT o FROM Ocorrencia o WHERE o.ocrLocal = :ocrLocal"),
@NamedQuery(name = "Ocorrencia.findByOcrCoordenadas", query = "SELECT o FROM Ocorrencia o WHERE o.ocrCoordenadas = :ocrCoordenadas"),
@NamedQuery(name = "Ocorrencia.findByOcrGeo", query = "SELECT o FROM Ocorrencia o WHERE o.ocrGeo = :ocrGeo"),
@NamedQuery(name = "Ocorrencia.findByOcrIdentificacao", query = "SELECT o FROM Ocorrencia o WHERE o.ocrIdentificacao = :ocrIdentificacao"),
@NamedQuery(name = "Ocorrencia.findByOcrSentimento", query = "SELECT o FROM Ocorrencia o WHERE o.ocrSentimento = :ocrSentimento"),
@NamedQuery(name = "Ocorrencia.findByOcrTempo", query = "SELECT o FROM Ocorrencia o WHERE o.ocrTempo = :ocrTempo")})
public class Ocorrencia implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "ocr_id")
private Integer ocrId;
@Basic(optional = false)
@Column(name = "cri_id")
private int criId;
@Basic(optional = false)
@Column(name = "cap_id")
private int capId;
@Basic(optional = false)
@Column(name = "api_id")
private int apiId;
@Column(name = "ocr_id_api")
private Long ocrIdApi;
@Column(name = "ocr_criacao")
@Temporal(TemporalType.TIMESTAMP)
private Date ocrCriacao;
@Lob
@Column(name = "ocr_texto")
private String ocrTexto;
@Column(name = "ocr_usu_id")
private Long ocrUsuId;
@Column(name = "ocr_usu_nome")
private String ocrUsuNome;
@Column(name = "ocr_usu_screen_nome")
private String ocrUsuScreenNome;
@Column(name = "ocr_fonte")
private String ocrFonte;
@Column(name = "ocr_lingua")
private String ocrLingua;
@Column(name = "ocr_pais_codigo")
private String ocrPaisCodigo;
@Column(name = "ocr_pais")
private String ocrPais;
@Column(name = "ocr_local")
private String ocrLocal;
@Column(name = "ocr_coordenadas")
private String ocrCoordenadas;
@Column(name = "ocr_favorite")
private int ocrFavorite;
@Column(name = "ocr_retweet")
private int ocrRetweet;
@Column(name = "ocr_geo")
private Character ocrGeo;
@Column(name = "ocr_identificacao")
private Character ocrIdentificacao;
@Column(name = "ocr_identper")
private Float ocrIdenper;
@Column(name = "ocr_gravado")
private Character ocrGravado;
@Column(name = "ocr_sentimento")
private Character ocrSentimento;
@Column(name = "ocr_tempo")
private Character ocrTempo;
public Ocorrencia() {
}
public Ocorrencia(Integer ocrId) {
this.ocrId = ocrId;
}
public Ocorrencia(Integer ocrId, int criId, int capId, int apiId) {
this.ocrId = ocrId;
this.criId = criId;
this.capId = capId;
this.apiId = apiId;
}
public Integer getOcrId() {
return ocrId;
}
public void setOcrId(Integer ocrId) {
this.ocrId = ocrId;
}
public int getCriId() {
return criId;
}
public void setCriId(int criId) {
this.criId = criId;
}
public int getCapId() {
return capId;
}
public void setCapId(int capId) {
this.capId = capId;
}
public int getApiId() {
return apiId;
}
public void setApiId(int apiId) {
this.apiId = apiId;
}
public Long getOcrIdApi() {
return ocrIdApi;
}
public void setOcrIdApi(Long ocrIdApi) {
this.ocrIdApi = ocrIdApi;
}
public Date getOcrCriacao() {
return ocrCriacao;
}
public void setOcrCriacao(Date ocrCriacao) {
this.ocrCriacao = ocrCriacao;
}
public String getOcrTexto() {
return ocrTexto;
}
public void setOcrTexto(String ocrTexto) {
this.ocrTexto = ocrTexto;
}
public Long getOcrUsuId() {
return ocrUsuId;
}
public void setOcrUsuId(Long ocrUsuId) {
this.ocrUsuId = ocrUsuId;
}
public String getOcrUsuNome() {
return ocrUsuNome;
}
public void setOcrUsuNome(String ocrUsuNome) {
this.ocrUsuNome = ocrUsuNome;
}
public String getOcrUsuScreenNome() {
return ocrUsuScreenNome;
}
public void setOcrUsuScreenNome(String ocrUsuScreenNome) {
this.ocrUsuScreenNome = ocrUsuScreenNome;
}
public String getOcrFonte() {
return ocrFonte;
}
public void setOcrFonte(String ocrFonte) {
this.ocrFonte = ocrFonte;
}
public String getOcrLingua() {
return ocrLingua;
}
public void setOcrLingua(String ocrLingua) {
this.ocrLingua = ocrLingua;
}
public String getOcrPaisCodigo() {
return ocrPaisCodigo;
}
public void setOcrPaisCodigo(String ocrPaisCodigo) {
this.ocrPaisCodigo = ocrPaisCodigo;
}
public String getOcrPais() {
return ocrPais;
}
public void setOcrPais(String ocrPais) {
this.ocrPais = ocrPais;
}
public String getOcrLocal() {
return ocrLocal;
}
public void setOcrLocal(String ocrLocal) {
this.ocrLocal = ocrLocal;
}
public String getOcrCoordenadas() {
return ocrCoordenadas;
}
public void setOcrCoordenadas(String ocrCoordenadas) {
this.ocrCoordenadas = ocrCoordenadas;
}
public Character getOcrGeo() {
return ocrGeo;
}
public void setOcrGeo(Character ocrGeo) {
this.ocrGeo = ocrGeo;
}
public Character getOcrIdentificacao() {
return ocrIdentificacao;
}
public void setOcrIdentificacao(Character ocrIdentificacao) {
this.ocrIdentificacao = ocrIdentificacao;
}
public Character getOcrSentimento() {
return ocrSentimento;
}
public void setOcrSentimento(Character ocrSentimento) {
this.ocrSentimento = ocrSentimento;
}
public Character getOcrTempo() {
return ocrTempo;
}
public void setOcrTempo(Character ocrTempo) {
this.ocrTempo = ocrTempo;
}
@Override
public int hashCode() {
int hash = 0;
hash += (ocrId != null ? ocrId.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Ocorrencia)) {
return false;
}
Ocorrencia other = (Ocorrencia) object;
if ((this.ocrId == null && other.ocrId != null) || (this.ocrId != null && !this.ocrId.equals(other.ocrId))) {
return false;
}
return true;
}
@Override
public String toString() {
return "bditac.Ocorrencia[ ocrId=" + ocrId + " ]";
}
public Float getOcrIdenper() {
return ocrIdenper;
}
public void setOcrIdenper(Float ocrIdenper) {
this.ocrIdenper = ocrIdenper;
}
public Character getOcrGravado() {
return ocrGravado;
}
public void setOcrGravado(Character ocrGravado) {
this.ocrGravado = ocrGravado;
}
public int getOcrFavorite() {
return ocrFavorite;
}
public void setOcrFavorite(int ocrFavorite) {
this.ocrFavorite = ocrFavorite;
}
public int getOcrRetweet() {
return ocrRetweet;
}
public void setOcrRetweet(int ocrRetweet) {
this.ocrRetweet = ocrRetweet;
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python;
import com.intellij.codeInsight.completion.impl.CamelHumpMatcher;
import com.intellij.testFramework.LightProjectDescriptor;
import com.jetbrains.python.fixtures.PyTestCase;
import com.jetbrains.python.psi.LanguageLevel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author yole
*/
public class Py3CompletionTest extends PyTestCase {
@Override
protected LightProjectDescriptor getProjectDescriptor() {
return ourPy3Descriptor;
}
public void testPropertyDecorator() {
doTest();
}
public void testPropertyAfterAccessor() { // PY-5951
doTest();
}
public void testNamedTuple() {
final String testName = getTestName(true);
myFixture.configureByFile(testName + ".py");
myFixture.completeBasic();
final List<String> strings = myFixture.getLookupElementStrings();
assertNotNull(strings);
assertTrue(strings.contains("lat"));
assertTrue(strings.contains("long"));
}
public void testNamedTupleBaseClass() {
doTest();
}
// PY-13157
public void testMetaClass() {
doTestByText("class C(meta<caret>):\n" +
" pass\n");
myFixture.checkResult("class C(metaclass=):\n" +
" pass\n");
}
private void doTest() {
CamelHumpMatcher.forceStartMatching(myFixture.getTestRootDisposable());
final String testName = getTestName(true);
myFixture.configureByFile(testName + ".py");
myFixture.completeBasic();
myFixture.checkResultByFile(testName + ".after.py");
}
private void doMultiFileTest() {
myFixture.copyDirectoryToProject(getTestName(true), "");
myFixture.configureByFile("a.py");
myFixture.completeBasic();
myFixture.checkResultByFile(getTestName(true) + "/a.after.py");
}
@Nullable
private List<String> doTestByText(@NotNull String text) {
myFixture.configureByText(PythonFileType.INSTANCE, text);
myFixture.completeBasic();
return myFixture.getLookupElementStrings();
}
// PY-4073
public void testSpecialFunctionAttributesPy3() {
runWithLanguageLevel(
LanguageLevel.PYTHON32,
() -> {
List<String> suggested = doTestByText("def func(): pass; func.func_<caret>");
assertNotNull(suggested);
assertEmpty(suggested);
suggested = doTestByText("def func(): pass; func.__<caret>");
assertNotNull(suggested);
assertContainsElements(suggested, "__defaults__", "__globals__", "__closure__",
"__code__", "__name__", "__doc__", "__dict__", "__module__");
assertContainsElements(suggested, "__annotations__", "__kwdefaults__");
}
);
}
// PY-7375
public void testImportNamespacePackage() {
doMultiFileTest();
}
// PY-5422
public void testImportQualifiedNamespacePackage() {
doMultiFileTest();
}
// PY-6477
public void testFromQualifiedNamespacePackageImport() {
doMultiFileTest();
}
public void testImportNestedQualifiedNamespacePackage() {
doMultiFileTest();
}
// PY-7376
public void testRelativeFromImportInNamespacePackage() {
doMultiFileTestInsideNamespacePackage();
}
// PY-7376
public void testRelativeFromImportInNamespacePackage2() {
doMultiFileTestInsideNamespacePackage();
}
private void doMultiFileTestInsideNamespacePackage() {
myFixture.copyDirectoryToProject(getTestName(true), "");
myFixture.configureByFile("nspkg1/a.py");
myFixture.completeBasic();
myFixture.checkResultByFile(getTestName(true) + "/nspkg1/a.after.py");
}
// PY-14385
public void testNotImportedSubmodulesOfNamespacePackage() {
doMultiFileTest();
}
// PY-15390
public void testMatMul() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-11214
public void testDunderNext() {
doTest();
}
public void testAsync() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
public void testAwait() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-17828
public void testDunderPrepare() {
runWithLanguageLevel(LanguageLevel.PYTHON30, this::doTest);
}
// PY-20279
public void testImplicitDunderClass() {
doTestByText("class First:\n" +
" def foo(self):\n" +
" print(__cl<caret>)");
myFixture.checkResult("class First:\n" +
" def foo(self):\n" +
" print(__class__)");
doTestByText("class First:\n" +
" @staticmethod\n" +
" def foo():\n" +
" print(__cl<caret>)");
myFixture.checkResult("class First:\n" +
" @staticmethod\n" +
" def foo():\n" +
" print(__class__)");
doTestByText("class First:\n" +
" print(__cl<caret>)");
myFixture.checkResult("class First:\n" +
" print(__cl)");
doTestByText("def abc():\n" +
" print(__cl<caret>)");
myFixture.checkResult("def abc():\n" +
" print(__cl)");
}
// PY-11208
public void testMockPatchObject1() {
doMultiFileTest();
}
// PY-11208
public void testMockPatchObject2() {
doMultiFileTest();
}
// PY-11208
public void testMockPatchObject3() {
doMultiFileTest();
}
// PY-21060
public void testGenericTypeInheritor() {
myFixture.copyDirectoryToProject("../typing", "");
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-19702
public void testMetaclassAttributeOnDefinition() {
final List<String> suggested = doTestByText("class Meta(type):\n" +
" def __init__(self, what, bases, dict):\n" +
" self.meta_attr = \"attr\"\n" +
" super().__init__(what, bases, dict)\n" +
"class A(metaclass=Meta):\n" +
" pass\n" +
"print(A.<caret>)");
assertNotNull(suggested);
assertContainsElements(suggested, "meta_attr");
}
// PY-19702
public void testMetaclassAttributeOnInstance() {
final List<String> suggested = doTestByText("class Meta(type):\n" +
" def __init__(self, what, bases, dict):\n" +
" self.meta_attr = \"attr\"\n" +
" super().__init__(what, bases, dict)\n" +
"class A(metaclass=Meta):\n" +
" pass\n" +
"print(A().<caret>)");
assertNotNull(suggested);
assertContainsElements(suggested, "meta_attr");
}
public void testMetaclassMethodOnDefinition() {
final List<String> suggested = doTestByText("class Meta(type):\n" +
" def meta_method(cls):\n" +
" pass\n" +
"class A(metaclass=Meta):\n" +
" pass\n" +
"print(A.<caret>)");
assertNotNull(suggested);
assertContainsElements(suggested, "meta_method");
}
public void testMetaclassMethodOnInstance() {
final List<String> suggested = doTestByText("class Meta(type):\n" +
" def meta_method(cls):\n" +
" pass\n" +
"class A(metaclass=Meta):\n" +
" pass\n" +
"print(A().<caret>)");
assertNotNull(suggested);
assertDoesntContain(suggested, "meta_method");
}
@Override
protected String getTestDataPath() {
return super.getTestDataPath() + "/completion";
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import static org.apache.hadoop.hbase.ipc.IPCUtil.buildRequestHeader;
import static org.apache.hadoop.hbase.ipc.IPCUtil.createRemoteException;
import static org.apache.hadoop.hbase.ipc.IPCUtil.getTotalSizeWhenWrittenDelimited;
import static org.apache.hadoop.hbase.ipc.IPCUtil.isFatalConnectionException;
import static org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled;
import static org.apache.hadoop.hbase.ipc.IPCUtil.write;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayDeque;
import java.util.Locale;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ThreadLocalRandom;
import javax.security.sasl.SaslException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
import org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
/**
* Thread that reads responses and notifies callers. Each connection owns a socket connected to a
* remote address. Calls are multiplexed through this socket: responses may be delivered out of
* order.
*/
@InterfaceAudience.Private
class BlockingRpcConnection extends RpcConnection implements Runnable {
private static final Log LOG = LogFactory.getLog(BlockingRpcConnection.class);
private final BlockingRpcClient rpcClient;
private final String threadName;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "IS2_INCONSISTENT_SYNC",
justification = "We are always under lock actually")
private Thread thread;
// connected socket. protected for writing UT.
protected Socket socket = null;
private DataInputStream in;
private DataOutputStream out;
private HBaseSaslRpcClient saslRpcClient;
// currently active calls
private final ConcurrentMap<Integer, Call> calls = new ConcurrentHashMap<>();
private final CallSender callSender;
private boolean closed = false;
private byte[] connectionHeaderPreamble;
private byte[] connectionHeaderWithLength;
private boolean waitingConnectionHeaderResponse = false;
/**
* If the client wants to interrupt its calls easily (i.e. call Thread#interrupt), it gets into a
* java issue: an interruption during a write closes the socket/channel. A way to avoid this is to
* use a different thread for writing. This way, on interruptions, we either cancel the writes or
* ignore the answer if the write is already done, but we don't stop the write in the middle. This
* adds a thread per region server in the client, so it's kept as an option.
* <p>
* The implementation is simple: the client threads adds their call to the queue, and then wait
* for an answer. The CallSender blocks on the queue, and writes the calls one after the other. On
* interruption, the client cancels its call. The CallSender checks that the call has not been
* canceled before writing it.
* </p>
* When the connection closes, all the calls not yet sent are dismissed. The client thread is
* notified with an appropriate exception, as if the call was already sent but the answer not yet
* received.
* </p>
*/
private class CallSender extends Thread {
private final Queue<Call> callsToWrite;
private final int maxQueueSize;
public CallSender(String name, Configuration conf) {
int queueSize = conf.getInt("hbase.ipc.client.write.queueSize", 1000);
callsToWrite = new ArrayDeque<>(queueSize);
this.maxQueueSize = queueSize;
setDaemon(true);
setName(name + " - writer");
}
public void sendCall(final Call call) throws IOException {
if (callsToWrite.size() >= maxQueueSize) {
throw new IOException("Can't add the call " + call.id
+ " to the write queue. callsToWrite.size()=" + callsToWrite.size());
}
callsToWrite.offer(call);
BlockingRpcConnection.this.notifyAll();
}
public void remove(Call call) {
callsToWrite.remove(call);
// By removing the call from the expected call list, we make the list smaller, but
// it means as well that we don't know how many calls we cancelled.
calls.remove(call.id);
call.setException(new CallCancelledException("Call id=" + call.id + ", waitTime="
+ (EnvironmentEdgeManager.currentTime() - call.getStartTime()) + ", rpcTimeout="
+ call.timeout));
}
/**
* Reads the call from the queue, write them on the socket.
*/
@Override
public void run() {
synchronized (BlockingRpcConnection.this) {
while (!closed) {
if (callsToWrite.isEmpty()) {
// We should use another monitor object here for better performance since the read
// thread also uses ConnectionImpl.this. But this makes the locking schema more
// complicated, can do it later as an optimization.
try {
BlockingRpcConnection.this.wait();
} catch (InterruptedException e) {
}
// check if we need to quit, so continue the main loop instead of fallback.
continue;
}
Call call = callsToWrite.poll();
if (call.isDone()) {
continue;
}
try {
tracedWriteRequest(call);
} catch (IOException e) {
// exception here means the call has not been added to the pendingCalls yet, so we need
// to fail it by our own.
if (LOG.isDebugEnabled()) {
LOG.debug("call write error for call #" + call.id, e);
}
call.setException(e);
closeConn(e);
}
}
}
}
/**
* Cleans the call not yet sent when we finish.
*/
public void cleanup(IOException e) {
IOException ie = new ConnectionClosingException(
"Connection to " + remoteId.address + " is closing.");
for (Call call : callsToWrite) {
call.setException(ie);
}
callsToWrite.clear();
}
}
BlockingRpcConnection(BlockingRpcClient rpcClient, ConnectionId remoteId) throws IOException {
super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId,
rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor);
this.rpcClient = rpcClient;
if (remoteId.getAddress().isUnresolved()) {
throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName());
}
this.connectionHeaderPreamble = getConnectionHeaderPreamble();
ConnectionHeader header = getConnectionHeader();
ByteArrayOutputStream baos = new ByteArrayOutputStream(4 + header.getSerializedSize());
DataOutputStream dos = new DataOutputStream(baos);
dos.writeInt(header.getSerializedSize());
header.writeTo(dos);
assert baos.size() == 4 + header.getSerializedSize();
this.connectionHeaderWithLength = baos.getBuffer();
UserGroupInformation ticket = remoteId.ticket.getUGI();
this.threadName = "IPC Client (" + this.rpcClient.socketFactory.hashCode() + ") connection to "
+ remoteId.getAddress().toString()
+ ((ticket == null) ? " from an unknown user" : (" from " + ticket.getUserName()));
if (this.rpcClient.conf.getBoolean(BlockingRpcClient.SPECIFIC_WRITE_THREAD, false)) {
callSender = new CallSender(threadName, this.rpcClient.conf);
callSender.start();
} else {
callSender = null;
}
}
// protected for write UT.
protected void setupConnection() throws IOException {
short ioFailures = 0;
short timeoutFailures = 0;
while (true) {
try {
this.socket = this.rpcClient.socketFactory.createSocket();
this.socket.setTcpNoDelay(this.rpcClient.isTcpNoDelay());
this.socket.setKeepAlive(this.rpcClient.tcpKeepAlive);
if (this.rpcClient.localAddr != null) {
this.socket.bind(this.rpcClient.localAddr);
}
NetUtils.connect(this.socket, remoteId.getAddress(), this.rpcClient.connectTO);
this.socket.setSoTimeout(this.rpcClient.readTO);
return;
} catch (SocketTimeoutException toe) {
/*
* The max number of retries is 45, which amounts to 20s*45 = 15 minutes retries.
*/
handleConnectionFailure(timeoutFailures++, this.rpcClient.maxRetries, toe);
} catch (IOException ie) {
handleConnectionFailure(ioFailures++, this.rpcClient.maxRetries, ie);
}
}
}
/**
* Handle connection failures If the current number of retries is equal to the max number of
* retries, stop retrying and throw the exception; Otherwise backoff N seconds and try connecting
* again. This Method is only called from inside setupIOstreams(), which is synchronized. Hence
* the sleep is synchronized; the locks will be retained.
* @param curRetries current number of retries
* @param maxRetries max number of retries allowed
* @param ioe failure reason
* @throws IOException if max number of retries is reached
*/
private void handleConnectionFailure(int curRetries, int maxRetries, IOException ioe)
throws IOException {
closeSocket();
// throw the exception if the maximum number of retries is reached
if (curRetries >= maxRetries || ExceptionUtil.isInterrupt(ioe)) {
throw ioe;
}
// otherwise back off and retry
try {
Thread.sleep(this.rpcClient.failureSleep);
} catch (InterruptedException ie) {
ExceptionUtil.rethrowIfInterrupt(ie);
}
LOG.info("Retrying connect to server: " + remoteId.getAddress() + " after sleeping "
+ this.rpcClient.failureSleep + "ms. Already tried " + curRetries + " time(s).");
}
/*
* wait till someone signals us to start reading RPC response or it is idle too long, it is marked
* as to be closed, or the client is marked as not running.
* @return true if it is time to read a response; false otherwise.
*/
private synchronized boolean waitForWork() {
// beware of the concurrent access to the calls list: we can add calls, but as well
// remove them.
long waitUntil = EnvironmentEdgeManager.currentTime() + this.rpcClient.minIdleTimeBeforeClose;
for (;;) {
if (thread == null) {
return false;
}
if (!calls.isEmpty()) {
return true;
}
if (EnvironmentEdgeManager.currentTime() >= waitUntil) {
closeConn(
new IOException("idle connection closed with " + calls.size() + " pending request(s)"));
return false;
}
try {
wait(Math.min(this.rpcClient.minIdleTimeBeforeClose, 1000));
} catch (InterruptedException e) {
}
}
}
@Override
public void run() {
if (LOG.isTraceEnabled()) {
LOG.trace(threadName + ": starting, connections " + this.rpcClient.connections.size());
}
while (waitForWork()) {
readResponse();
}
if (LOG.isTraceEnabled()) {
LOG.trace(threadName + ": stopped, connections " + this.rpcClient.connections.size());
}
}
private void disposeSasl() {
if (saslRpcClient != null) {
saslRpcClient.dispose();
saslRpcClient = null;
}
}
private boolean setupSaslConnection(final InputStream in2, final OutputStream out2)
throws IOException {
saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal,
this.rpcClient.fallbackAllowed, this.rpcClient.conf.get("hbase.rpc.protection",
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)),
this.rpcClient.conf.getBoolean(CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT));
return saslRpcClient.saslConnect(in2, out2);
}
/**
* If multiple clients with the same principal try to connect to the same server at the same time,
* the server assumes a replay attack is in progress. This is a feature of kerberos. In order to
* work around this, what is done is that the client backs off randomly and tries to initiate the
* connection again. The other problem is to do with ticket expiry. To handle that, a relogin is
* attempted.
* <p>
* The retry logic is governed by the {@link #shouldAuthenticateOverKrb} method. In case when the
* user doesn't have valid credentials, we don't need to retry (from cache or ticket). In such
* cases, it is prudent to throw a runtime exception when we receive a SaslException from the
* underlying authentication implementation, so there is no retry from other high level (for eg,
* HCM or HBaseAdmin).
* </p>
*/
private void handleSaslConnectionFailure(final int currRetries, final int maxRetries,
final Exception ex, final UserGroupInformation user)
throws IOException, InterruptedException {
closeSocket();
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException, InterruptedException {
if (shouldAuthenticateOverKrb()) {
if (currRetries < maxRetries) {
if (LOG.isDebugEnabled()) {
LOG.debug("Exception encountered while connecting to " + "the server : " + ex);
}
// try re-login
relogin();
disposeSasl();
// have granularity of milliseconds
// we are sleeping with the Connection lock held but since this
// connection instance is being used for connecting to the server
// in question, it is okay
Thread.sleep(ThreadLocalRandom.current().nextInt(reloginMaxBackoff) + 1);
return null;
} else {
String msg = "Couldn't setup connection for "
+ UserGroupInformation.getLoginUser().getUserName() + " to " + serverPrincipal;
LOG.warn(msg, ex);
throw (IOException) new IOException(msg).initCause(ex);
}
} else {
LOG.warn("Exception encountered while connecting to " + "the server : " + ex);
}
if (ex instanceof RemoteException) {
throw (RemoteException) ex;
}
if (ex instanceof SaslException) {
String msg = "SASL authentication failed."
+ " The most likely cause is missing or invalid credentials." + " Consider 'kinit'.";
LOG.fatal(msg, ex);
throw new RuntimeException(msg, ex);
}
throw new IOException(ex);
}
});
}
private void setupIOstreams() throws IOException {
if (socket != null) {
// The connection is already available. Perfect.
return;
}
if (this.rpcClient.failedServers.isFailedServer(remoteId.getAddress())) {
if (LOG.isDebugEnabled()) {
LOG.debug("Not trying to connect to " + remoteId.address
+ " this server is in the failed servers list");
}
throw new FailedServerException(
"This server is in the failed servers list: " + remoteId.address);
}
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Connecting to " + remoteId.address);
}
short numRetries = 0;
final short MAX_RETRIES = 5;
while (true) {
setupConnection();
InputStream inStream = NetUtils.getInputStream(socket);
// This creates a socket with a write timeout. This timeout cannot be changed.
OutputStream outStream = NetUtils.getOutputStream(socket, this.rpcClient.writeTO);
// Write out the preamble -- MAGIC, version, and auth to use.
writeConnectionHeaderPreamble(outStream);
if (useSasl) {
final InputStream in2 = inStream;
final OutputStream out2 = outStream;
UserGroupInformation ticket = getUGI();
boolean continueSasl;
if (ticket == null) {
throw new FatalConnectionException("ticket/user is null");
}
try {
continueSasl = ticket.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws IOException {
return setupSaslConnection(in2, out2);
}
});
} catch (Exception ex) {
ExceptionUtil.rethrowIfInterrupt(ex);
handleSaslConnectionFailure(numRetries++, MAX_RETRIES, ex, ticket);
continue;
}
if (continueSasl) {
// Sasl connect is successful. Let's set up Sasl i/o streams.
inStream = saslRpcClient.getInputStream();
outStream = saslRpcClient.getOutputStream();
} else {
// fall back to simple auth because server told us so.
// do not change authMethod and useSasl here, we should start from secure when
// reconnecting because regionserver may change its sasl config after restart.
}
}
this.in = new DataInputStream(new BufferedInputStream(inStream));
this.out = new DataOutputStream(new BufferedOutputStream(outStream));
// Now write out the connection header
writeConnectionHeader();
// process the response from server for connection header if necessary
processResponseForConnectionHeader();
break;
}
} catch (Throwable t) {
closeSocket();
IOException e = ExceptionUtil.asInterrupt(t);
if (e == null) {
this.rpcClient.failedServers.addToFailedServers(remoteId.address, t);
if (t instanceof LinkageError) {
// probably the hbase hadoop version does not match the running hadoop version
e = new DoNotRetryIOException(t);
} else if (t instanceof IOException) {
e = (IOException) t;
} else {
e = new IOException("Could not set up IO Streams to " + remoteId.address, t);
}
}
throw e;
}
// start the receiver thread after the socket connection has been set up
thread = new Thread(this, threadName);
thread.setDaemon(true);
thread.start();
}
/**
* Write the RPC header: {@code <MAGIC WORD -- 'HBas'> <ONEBYTE_VERSION> <ONEBYTE_AUTH_TYPE>}
*/
private void writeConnectionHeaderPreamble(OutputStream out) throws IOException {
out.write(connectionHeaderPreamble);
out.flush();
}
/**
* Write the connection header.
*/
private void writeConnectionHeader() throws IOException {
boolean isCryptoAesEnable = false;
// check if Crypto AES is enabled
if (saslRpcClient != null) {
boolean saslEncryptionEnabled = SaslUtil.QualityOfProtection.PRIVACY.
getSaslQop().equalsIgnoreCase(saslRpcClient.getSaslQOP());
isCryptoAesEnable = saslEncryptionEnabled && conf.getBoolean(
CRYPTO_AES_ENABLED_KEY, CRYPTO_AES_ENABLED_DEFAULT);
}
// if Crypto AES is enabled, set transformation and negotiate with server
if (isCryptoAesEnable) {
waitingConnectionHeaderResponse = true;
}
this.out.write(connectionHeaderWithLength);
this.out.flush();
}
private void processResponseForConnectionHeader() throws IOException {
// if no response excepted, return
if (!waitingConnectionHeaderResponse) return;
try {
// read the ConnectionHeaderResponse from server
int len = this.in.readInt();
byte[] buff = new byte[len];
int readSize = this.in.read(buff);
if (LOG.isDebugEnabled()) {
LOG.debug("Length of response for connection header:" + readSize);
}
RPCProtos.ConnectionHeaderResponse connectionHeaderResponse =
RPCProtos.ConnectionHeaderResponse.parseFrom(buff);
// Get the CryptoCipherMeta, update the HBaseSaslRpcClient for Crypto Cipher
if (connectionHeaderResponse.hasCryptoCipherMeta()) {
negotiateCryptoAes(connectionHeaderResponse.getCryptoCipherMeta());
}
waitingConnectionHeaderResponse = false;
} catch (SocketTimeoutException ste) {
LOG.fatal("Can't get the connection header response for rpc timeout, please check if" +
" server has the correct configuration to support the additional function.", ste);
// timeout when waiting the connection header response, ignore the additional function
throw new IOException("Timeout while waiting connection header response", ste);
}
}
private void negotiateCryptoAes(RPCProtos.CryptoCipherMeta cryptoCipherMeta)
throws IOException {
// initilize the Crypto AES with CryptoCipherMeta
saslRpcClient.initCryptoCipher(cryptoCipherMeta, this.rpcClient.conf);
// reset the inputStream/outputStream for Crypto AES encryption
this.in = new DataInputStream(new BufferedInputStream(saslRpcClient.getInputStream()));
this.out = new DataOutputStream(new BufferedOutputStream(saslRpcClient.getOutputStream()));
}
private void tracedWriteRequest(Call call) throws IOException {
try (TraceScope ignored = Trace.startSpan("RpcClientImpl.tracedWriteRequest", call.span)) {
writeRequest(call);
}
}
/**
* Initiates a call by sending the parameter to the remote server. Note: this is not called from
* the Connection thread, but by other threads.
* @see #readResponse()
*/
private void writeRequest(Call call) throws IOException {
ByteBuffer cellBlock = this.rpcClient.cellBlockBuilder.buildCellBlock(this.codec,
this.compressor, call.cells);
CellBlockMeta cellBlockMeta;
if (cellBlock != null) {
cellBlockMeta = CellBlockMeta.newBuilder().setLength(cellBlock.limit()).build();
} else {
cellBlockMeta = null;
}
RequestHeader requestHeader = buildRequestHeader(call, cellBlockMeta);
setupIOstreams();
// Now we're going to write the call. We take the lock, then check that the connection
// is still valid, and, if so we do the write to the socket. If the write fails, we don't
// know where we stand, we have to close the connection.
if (Thread.interrupted()) {
throw new InterruptedIOException();
}
calls.put(call.id, call); // We put first as we don't want the connection to become idle.
// from here, we do not throw any exception to upper layer as the call has been tracked in the
// pending calls map.
try {
call.callStats.setRequestSizeBytes(write(this.out, requestHeader, call.param, cellBlock));
} catch (IOException e) {
closeConn(e);
return;
}
notifyAll();
}
/*
* Receive a response. Because only one receiver, so no synchronization on in.
*/
private void readResponse() {
Call call = null;
boolean expectedCall = false;
try {
// See HBaseServer.Call.setResponse for where we write out the response.
// Total size of the response. Unused. But have to read it in anyways.
int totalSize = in.readInt();
// Read the header
ResponseHeader responseHeader = ResponseHeader.parseDelimitedFrom(in);
int id = responseHeader.getCallId();
call = calls.remove(id); // call.done have to be set before leaving this method
expectedCall = (call != null && !call.isDone());
if (!expectedCall) {
// So we got a response for which we have no corresponding 'call' here on the client-side.
// We probably timed out waiting, cleaned up all references, and now the server decides
// to return a response. There is nothing we can do w/ the response at this stage. Clean
// out the wire of the response so its out of the way and we can get other responses on
// this connection.
int readSoFar = getTotalSizeWhenWrittenDelimited(responseHeader);
int whatIsLeftToRead = totalSize - readSoFar;
IOUtils.skipFully(in, whatIsLeftToRead);
if (call != null) {
call.callStats.setResponseSizeBytes(totalSize);
call.callStats
.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.callStats.getStartTime());
}
return;
}
if (responseHeader.hasException()) {
ExceptionResponse exceptionResponse = responseHeader.getException();
RemoteException re = createRemoteException(exceptionResponse);
call.setException(re);
call.callStats.setResponseSizeBytes(totalSize);
call.callStats
.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.callStats.getStartTime());
if (isFatalConnectionException(exceptionResponse)) {
synchronized (this) {
closeConn(re);
}
}
} else {
Message value = null;
if (call.responseDefaultType != null) {
Builder builder = call.responseDefaultType.newBuilderForType();
ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
CellScanner cellBlockScanner = null;
if (responseHeader.hasCellBlockMeta()) {
int size = responseHeader.getCellBlockMeta().getLength();
byte[] cellBlock = new byte[size];
IOUtils.readFully(this.in, cellBlock, 0, cellBlock.length);
cellBlockScanner = this.rpcClient.cellBlockBuilder.createCellScanner(this.codec,
this.compressor, cellBlock);
}
call.setResponse(value, cellBlockScanner);
call.callStats.setResponseSizeBytes(totalSize);
call.callStats
.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.callStats.getStartTime());
}
} catch (IOException e) {
if (expectedCall) {
call.setException(e);
}
if (e instanceof SocketTimeoutException) {
// Clean up open calls but don't treat this as a fatal condition,
// since we expect certain responses to not make it by the specified
// {@link ConnectionId#rpcTimeout}.
if (LOG.isTraceEnabled()) {
LOG.trace("ignored", e);
}
} else {
synchronized (this) {
closeConn(e);
}
}
}
}
@Override
protected synchronized void callTimeout(Call call) {
// call sender
calls.remove(call.id);
}
// just close socket input and output.
private void closeSocket() {
IOUtils.closeStream(out);
IOUtils.closeStream(in);
IOUtils.closeSocket(socket);
out = null;
in = null;
socket = null;
}
// close socket, reader, and clean up all pending calls.
private void closeConn(IOException e) {
if (thread == null) {
return;
}
thread.interrupt();
thread = null;
closeSocket();
if (callSender != null) {
callSender.cleanup(e);
}
for (Call call : calls.values()) {
call.setException(e);
}
calls.clear();
}
// release all resources, the connection will not be used any more.
@Override
public synchronized void shutdown() {
closed = true;
if (callSender != null) {
callSender.interrupt();
}
closeConn(new IOException("connection to " + remoteId.address + " closed"));
}
@Override
public void cleanupConnection() {
// do nothing
}
@Override
public synchronized void sendRequest(final Call call, HBaseRpcController pcrc)
throws IOException {
pcrc.notifyOnCancel(new RpcCallback<Object>() {
@Override
public void run(Object parameter) {
setCancelled(call);
synchronized (BlockingRpcConnection.this) {
if (callSender != null) {
callSender.remove(call);
} else {
calls.remove(call.id);
}
}
}
}, new CancellationCallback() {
@Override
public void run(boolean cancelled) throws IOException {
if (cancelled) {
setCancelled(call);
return;
}
scheduleTimeoutTask(call);
if (callSender != null) {
callSender.sendCall(call);
} else {
tracedWriteRequest(call);
}
}
});
}
@Override
public synchronized boolean isActive() {
return thread != null;
}
}
| |
/*
* Copyright (c) 1997, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.plaf.basic;
import java.awt.*;
import java.awt.event.*;
import java.beans.*;
import java.net.URL;
import java.net.MalformedURLException;
import javax.swing.*;
import javax.swing.text.*;
import javax.swing.text.html.*;
import javax.swing.plaf.*;
import javax.swing.border.*;
/**
* Provides the look and feel for a JEditorPane.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans<sup><font size="-2">TM</font></sup>
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @author Timothy Prinzing
*/
public class BasicEditorPaneUI extends BasicTextUI {
/**
* Creates a UI for the JTextPane.
*
* @param c the JTextPane component
* @return the UI
*/
public static ComponentUI createUI(JComponent c) {
return new BasicEditorPaneUI();
}
/**
* Creates a new BasicEditorPaneUI.
*/
public BasicEditorPaneUI() {
super();
}
/**
* Fetches the name used as a key to lookup properties through the
* UIManager. This is used as a prefix to all the standard
* text properties.
*
* @return the name ("EditorPane")
*/
protected String getPropertyPrefix() {
return "EditorPane";
}
/**
*{@inheritDoc}
*
* @since 1.5
*/
public void installUI(JComponent c) {
super.installUI(c);
updateDisplayProperties(c.getFont(),
c.getForeground());
}
/**
*{@inheritDoc}
*
* @since 1.5
*/
public void uninstallUI(JComponent c) {
cleanDisplayProperties();
super.uninstallUI(c);
}
/**
* Fetches the EditorKit for the UI. This is whatever is
* currently set in the associated JEditorPane.
*
* @return the editor capabilities
* @see TextUI#getEditorKit
*/
public EditorKit getEditorKit(JTextComponent tc) {
JEditorPane pane = (JEditorPane) getComponent();
return pane.getEditorKit();
}
/**
* Fetch an action map to use. The map for a JEditorPane
* is not shared because it changes with the EditorKit.
*/
ActionMap getActionMap() {
ActionMap am = new ActionMapUIResource();
am.put("requestFocus", new FocusAction());
EditorKit editorKit = getEditorKit(getComponent());
if (editorKit != null) {
Action[] actions = editorKit.getActions();
if (actions != null) {
addActions(am, actions);
}
}
am.put(TransferHandler.getCutAction().getValue(Action.NAME),
TransferHandler.getCutAction());
am.put(TransferHandler.getCopyAction().getValue(Action.NAME),
TransferHandler.getCopyAction());
am.put(TransferHandler.getPasteAction().getValue(Action.NAME),
TransferHandler.getPasteAction());
return am;
}
/**
* This method gets called when a bound property is changed
* on the associated JTextComponent. This is a hook
* which UI implementations may change to reflect how the
* UI displays bound properties of JTextComponent subclasses.
* This is implemented to rebuild the ActionMap based upon an
* EditorKit change.
*
* @param evt the property change event
*/
protected void propertyChange(PropertyChangeEvent evt) {
super.propertyChange(evt);
String name = evt.getPropertyName();
if ("editorKit".equals(name)) {
ActionMap map = SwingUtilities.getUIActionMap(getComponent());
if (map != null) {
Object oldValue = evt.getOldValue();
if (oldValue instanceof EditorKit) {
Action[] actions = ((EditorKit)oldValue).getActions();
if (actions != null) {
removeActions(map, actions);
}
}
Object newValue = evt.getNewValue();
if (newValue instanceof EditorKit) {
Action[] actions = ((EditorKit)newValue).getActions();
if (actions != null) {
addActions(map, actions);
}
}
}
updateFocusTraversalKeys();
} else if ("editable".equals(name)) {
updateFocusTraversalKeys();
} else if ("foreground".equals(name)
|| "font".equals(name)
|| "document".equals(name)
|| JEditorPane.W3C_LENGTH_UNITS.equals(name)
|| JEditorPane.HONOR_DISPLAY_PROPERTIES.equals(name)
) {
JComponent c = getComponent();
updateDisplayProperties(c.getFont(), c.getForeground());
if ( JEditorPane.W3C_LENGTH_UNITS.equals(name)
|| JEditorPane.HONOR_DISPLAY_PROPERTIES.equals(name) ) {
modelChanged();
}
if ("foreground".equals(name)) {
Object honorDisplayPropertiesObject = c.
getClientProperty(JEditorPane.HONOR_DISPLAY_PROPERTIES);
boolean honorDisplayProperties = false;
if (honorDisplayPropertiesObject instanceof Boolean) {
honorDisplayProperties =
((Boolean)honorDisplayPropertiesObject).booleanValue();
}
if (honorDisplayProperties) {
modelChanged();
}
}
}
}
void removeActions(ActionMap map, Action[] actions) {
int n = actions.length;
for (int i = 0; i < n; i++) {
Action a = actions[i];
map.remove(a.getValue(Action.NAME));
}
}
void addActions(ActionMap map, Action[] actions) {
int n = actions.length;
for (int i = 0; i < n; i++) {
Action a = actions[i];
map.put(a.getValue(Action.NAME), a);
}
}
void updateDisplayProperties(Font font, Color fg) {
JComponent c = getComponent();
Object honorDisplayPropertiesObject = c.
getClientProperty(JEditorPane.HONOR_DISPLAY_PROPERTIES);
boolean honorDisplayProperties = false;
Object w3cLengthUnitsObject = c.getClientProperty(JEditorPane.
W3C_LENGTH_UNITS);
boolean w3cLengthUnits = false;
if (honorDisplayPropertiesObject instanceof Boolean) {
honorDisplayProperties =
((Boolean)honorDisplayPropertiesObject).booleanValue();
}
if (w3cLengthUnitsObject instanceof Boolean) {
w3cLengthUnits = ((Boolean)w3cLengthUnitsObject).booleanValue();
}
if (this instanceof BasicTextPaneUI
|| honorDisplayProperties) {
//using equals because can not use UIResource for Boolean
Document doc = getComponent().getDocument();
if (doc instanceof StyledDocument) {
if (doc instanceof HTMLDocument
&& honorDisplayProperties) {
updateCSS(font, fg);
} else {
updateStyle(font, fg);
}
}
} else {
cleanDisplayProperties();
}
if ( w3cLengthUnits ) {
Document doc = getComponent().getDocument();
if (doc instanceof HTMLDocument) {
StyleSheet documentStyleSheet =
((HTMLDocument)doc).getStyleSheet();
documentStyleSheet.addRule("W3C_LENGTH_UNITS_ENABLE");
}
} else {
Document doc = getComponent().getDocument();
if (doc instanceof HTMLDocument) {
StyleSheet documentStyleSheet =
((HTMLDocument)doc).getStyleSheet();
documentStyleSheet.addRule("W3C_LENGTH_UNITS_DISABLE");
}
}
}
/**
* Attribute key to reference the default font.
* used in javax.swing.text.StyleContext.getFont
* to resolve the default font.
*/
private static final String FONT_ATTRIBUTE_KEY = "FONT_ATTRIBUTE_KEY";
void cleanDisplayProperties() {
Document document = getComponent().getDocument();
if (document instanceof HTMLDocument) {
StyleSheet documentStyleSheet =
((HTMLDocument)document).getStyleSheet();
StyleSheet[] styleSheets = documentStyleSheet.getStyleSheets();
if (styleSheets != null) {
for (StyleSheet s : styleSheets) {
if (s instanceof StyleSheetUIResource) {
documentStyleSheet.removeStyleSheet(s);
documentStyleSheet.addRule("BASE_SIZE_DISABLE");
break;
}
}
}
Style style = ((StyledDocument) document).getStyle(StyleContext.DEFAULT_STYLE);
if (style.getAttribute(FONT_ATTRIBUTE_KEY) != null) {
style.removeAttribute(FONT_ATTRIBUTE_KEY);
}
}
}
static class StyleSheetUIResource extends StyleSheet implements UIResource {
}
private void updateCSS(Font font, Color fg) {
JTextComponent component = getComponent();
Document document = component.getDocument();
if (document instanceof HTMLDocument) {
StyleSheet styleSheet = new StyleSheetUIResource();
StyleSheet documentStyleSheet =
((HTMLDocument)document).getStyleSheet();
StyleSheet[] styleSheets = documentStyleSheet.getStyleSheets();
if (styleSheets != null) {
for (StyleSheet s : styleSheets) {
if (s instanceof StyleSheetUIResource) {
documentStyleSheet.removeStyleSheet(s);
}
}
}
String cssRule = sun.swing.
SwingUtilities2.displayPropertiesToCSS(font,
fg);
styleSheet.addRule(cssRule);
documentStyleSheet.addStyleSheet(styleSheet);
documentStyleSheet.addRule("BASE_SIZE " +
component.getFont().getSize());
Style style = ((StyledDocument) document).getStyle(StyleContext.DEFAULT_STYLE);
if (! font.equals(style.getAttribute(FONT_ATTRIBUTE_KEY))) {
style.addAttribute(FONT_ATTRIBUTE_KEY, font);
}
}
}
private void updateStyle(Font font, Color fg) {
updateFont(font);
updateForeground(fg);
}
/**
* Update the color in the default style of the document.
*
* @param color the new color to use or null to remove the color attribute
* from the document's style
*/
private void updateForeground(Color color) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (color == null) {
if (style.getAttribute(StyleConstants.Foreground) != null) {
style.removeAttribute(StyleConstants.Foreground);
}
} else {
if (! color.equals(StyleConstants.getForeground(style))) {
StyleConstants.setForeground(style, color);
}
}
}
/**
* Update the font in the default style of the document.
*
* @param font the new font to use or null to remove the font attribute
* from the document's style
*/
private void updateFont(Font font) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
String fontFamily = (String) style.getAttribute(StyleConstants.FontFamily);
Integer fontSize = (Integer) style.getAttribute(StyleConstants.FontSize);
Boolean isBold = (Boolean) style.getAttribute(StyleConstants.Bold);
Boolean isItalic = (Boolean) style.getAttribute(StyleConstants.Italic);
Font fontAttribute = (Font) style.getAttribute(FONT_ATTRIBUTE_KEY);
if (font == null) {
if (fontFamily != null) {
style.removeAttribute(StyleConstants.FontFamily);
}
if (fontSize != null) {
style.removeAttribute(StyleConstants.FontSize);
}
if (isBold != null) {
style.removeAttribute(StyleConstants.Bold);
}
if (isItalic != null) {
style.removeAttribute(StyleConstants.Italic);
}
if (fontAttribute != null) {
style.removeAttribute(FONT_ATTRIBUTE_KEY);
}
} else {
if (! font.getName().equals(fontFamily)) {
StyleConstants.setFontFamily(style, font.getName());
}
if (fontSize == null
|| fontSize.intValue() != font.getSize()) {
StyleConstants.setFontSize(style, font.getSize());
}
if (isBold == null
|| isBold.booleanValue() != font.isBold()) {
StyleConstants.setBold(style, font.isBold());
}
if (isItalic == null
|| isItalic.booleanValue() != font.isItalic()) {
StyleConstants.setItalic(style, font.isItalic());
}
if (! font.equals(fontAttribute)) {
style.addAttribute(FONT_ATTRIBUTE_KEY, font);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.resultset;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kBindings ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kBnode ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kBoolean ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kDatatype ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kHead ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kLink ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kLiteral ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kResults ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kType ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kTypedLiteral ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kUri ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kValue ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kVars ;
import static org.apache.jena.sparql.resultset.JSONResultsKW.kXmlLang ;
import java.io.InputStream ;
import java.util.* ;
import org.apache.jena.atlas.json.JSON ;
import org.apache.jena.atlas.json.JsonArray ;
import org.apache.jena.atlas.json.JsonObject ;
import org.apache.jena.atlas.json.JsonValue ;
import org.apache.jena.atlas.logging.Log ;
import org.apache.jena.datatypes.RDFDatatype ;
import org.apache.jena.datatypes.TypeMapper ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.NodeFactory ;
import org.apache.jena.query.ResultSet ;
import org.apache.jena.rdf.model.Model ;
import org.apache.jena.riot.lang.LabelToNode ;
import org.apache.jena.riot.system.SyntaxLabels ;
import org.apache.jena.sparql.core.Var ;
import org.apache.jena.sparql.engine.QueryIterator ;
import org.apache.jena.sparql.engine.ResultSetStream ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.engine.binding.BindingFactory ;
import org.apache.jena.sparql.engine.binding.BindingMap ;
import org.apache.jena.sparql.engine.iterator.QueryIterPlainWrapper ;
import org.apache.jena.sparql.graph.GraphFactory ;
public class JSONInput extends SPARQLResult
{
public static ResultSet fromJSON(InputStream input)
{
SPARQLResult r = new JSONInput().process(input, null) ;
return r.getResultSet() ;
}
public static boolean booleanFromJSON(InputStream input)
{
SPARQLResult r = new JSONInput().process(input, null) ;
return r.getBooleanResult() ;
}
public static SPARQLResult make(InputStream input)
{
return make(input, null) ;
}
public static SPARQLResult make(InputStream input, Model model)
{
return new JSONInput().process(input, model) ;
}
public JSONInput() {}
public JSONInput(InputStream in)
{
this(in, null) ;
}
// See also XMLInputSAX for design structure.
public JSONInput(InputStream in, Model model)
{
if ( model == null )
model = GraphFactory.makeJenaDefaultModel() ;
process(in, model) ;
}
Boolean booleanResult = null ; // Valid if rows is null.
List<Binding> rows = null ;
List<Var> vars = null ;
// TODO Streaming version of JSON Result set processing
private SPARQLResult process(InputStream in, Model model)
{
parse(in) ;
if ( model == null )
model = GraphFactory.makeJenaDefaultModel() ;
if ( rows != null )
{
QueryIterator qIter = new QueryIterPlainWrapper(rows.iterator()) ;
ResultSet rs = new ResultSetStream(Var.varNames(vars), model, qIter) ;
super.set(rs) ;
}
else
super.set(booleanResult) ;
return this ;
}
private void parse(InputStream in)
{
JsonObject obj = JSON.parse(in) ;
if ( obj.hasKey(kBoolean) )
{
checkContains(obj, true, true, kHead, kBoolean) ;
booleanResult = obj.get(kBoolean).getAsBoolean().value() ;
rows = null ;
return ;
}
rows = new ArrayList<>(1000) ;
checkContains(obj, true, true, kHead, kResults) ;
// process head
if ( ! obj.get(kHead).isObject() )
throw new ResultSetException("Key 'head' must have a JSON object as value: found: "+obj.get(kHead)) ;
JsonObject head = obj.get(kHead).getAsObject() ;
// ---- Head
// -- Link - array.
if ( head.hasKey(kLink) )
{
List<String> links = new ArrayList<>() ;
if ( head.get(kLink).isString() )
{
Log.warn(this, "Link field is a string, should be an array of strings") ;
links.add(head.get(kLink).getAsString().value()) ;
}
else
{
if ( ! head.get(kLink).isArray() )
throw new ResultSetException("Key 'link' must have be an array: found: "+obj.get(kLink)) ;
for ( JsonValue v : head.get(kLink).getAsArray() )
{
if ( ! v.isString() )
throw new ResultSetException("Key 'link' must have be an array of strings: found: "+v) ;
links.add(v.getAsString().value()) ;
}
}
}
// -- Vars
vars = parseVars(head) ;
// ---- Results
JsonObject results = obj.get(kResults).getAsObject() ;
if ( ! results.get(kBindings).isArray() )
throw new ResultSetException("'bindings' must be an array") ;
JsonArray array = results.get(kBindings).getAsArray() ;
Iterator<JsonValue> iter = array.iterator() ;
for ( ; iter.hasNext() ; )
{
BindingMap b = BindingFactory.create() ;
JsonValue v = iter.next() ;
if ( ! v.isObject() )
throw new ResultSetException("Entry in 'bindings' array must be an object {}") ;
JsonObject x = v.getAsObject() ;
Set<String> varNames = x.keys() ;
for ( String vn : varNames )
{
//if ( ! vars.contains(vn) ) {}
JsonValue vt = x.get(vn) ;
if ( ! vt.isObject() )
throw new ResultSetException("Binding for variable '"+vn+"' is not a JSON object: "+vt) ;
Node n = parseOneTerm(vt.getAsObject()) ;
b.add(Var.alloc(vn), n) ;
}
rows.add(b) ;
}
}
private List<Var> parseVars(JsonObject obj)
{
if ( ! obj.get(kVars).isArray() )
throw new ResultSetException("Key 'vars' must be a JSON array") ;
JsonArray a = obj.get(kVars).getAsArray() ;
Iterator<JsonValue> iter = a.iterator() ;
List<Var> vars = new ArrayList<>() ;
for ( ; iter.hasNext() ; )
{
JsonValue v = iter.next() ;
if ( ! v.isString() )
throw new ResultSetException("Entries in vars array must be strings") ;
Var var = Var.alloc(v.getAsString().value()) ;
vars.add(var) ;
}
return vars ;
}
LabelToNode labelMap = SyntaxLabels.createLabelToNode() ;
private Node parseOneTerm(JsonObject term)
{
checkContains(term, false, false, kType, kValue, kXmlLang, kDatatype) ;
String type = stringOrNull(term, kType) ;
String v = stringOrNull(term, kValue) ;
if ( kUri.equals(type) )
{
checkContains(term, false, true, kType, kValue) ;
String uri = v ;
Node n = NodeFactory.createURI(v) ;
return n ;
}
if ( kLiteral.equals(type) || kTypedLiteral.equals(type) )
{
String lang = stringOrNull(term, kXmlLang) ;
String dtStr = stringOrNull(term, kDatatype) ;
if ( lang != null && dtStr != null )
throw new ResultSetException("Both language and datatype defined: "+term) ;
RDFDatatype dt = TypeMapper.getInstance().getSafeTypeByName(dtStr) ;
return NodeFactory.createLiteral(v, lang, dt) ;
}
if ( kBnode.equals(type) )
return labelMap.get(null, v) ;
throw new ResultSetException("Object key not recognized as valid for an RDF term: "+term) ;
}
private static String stringOrNull(JsonObject obj, String key)
{
JsonValue v = obj.get(key) ;
if ( v == null ) return null ;
if ( ! v.isString() )
throw new ResultSetException("Not a string: key: "+key) ;
return v.getAsString().value();
}
private static void checkContains(JsonObject term, boolean allowUndefinedKeys, boolean requireAllExpectedKeys, String...keys)
{
List<String> expectedKeys = Arrays.asList(keys) ;
Set<String> declared = new HashSet<>();
for ( String k : term.keys() )
{
if ( !expectedKeys.contains(k) && !allowUndefinedKeys )
throw new ResultSetException("Expected only object keys "+Arrays.asList(keys)+" but encountered '"+k+"'") ;
if (expectedKeys.contains(k))
declared.add(k);
}
if (requireAllExpectedKeys && declared.size() < expectedKeys.size()) throw new ResultSetException("One or more of the required keys " + expectedKeys + " was not found");
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
/*
* Contains code from GNU Trove having the license below.
*
* Copyright (c) 2001, Eric D. Friedman All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package com.gemstone.gemfire.internal.cache;
import com.gemstone.gnu.trove.*;
import java.util.Arrays;
/**
* An open addressed hashing implementation for int primitives.
*
* @author darrel
*/
abstract public class TStatelessIntHash extends TStatelessPrimitiveHash
implements TIntHashingStrategy {
/** the set of ints */
protected transient int[] _set;
protected /*final*/ int _FREE;
/** strategy used to hash values in this collection */
protected TIntHashingStrategy _hashingStrategy;
/**
* Creates a new <code>TStatelessIntHash</code> instance with the default
* capacity and load factor.
*/
public TStatelessIntHash(int freeValue) {
super();
this._hashingStrategy = this;
this._FREE = freeValue;
}
/**
* Creates a new <code>TStatelessIntHash</code> instance whose capacity
* is the next highest prime above <tt>initialCapacity + 1</tt>
* unless that value is already prime.
*
* @param initialCapacity an <code>int</code> value
*/
public TStatelessIntHash(int freeValue, int initialCapacity) {
super(initialCapacity);
this._hashingStrategy = this;
this._FREE = freeValue;
}
/**
* Creates a new <code>TStatelessIntHash</code> instance with a prime
* value at or near the specified capacity and load factor.
*
* @param initialCapacity used to find a prime capacity for the table.
* @param loadFactor used to calculate the threshold over which
* rehashing takes place.
*/
public TStatelessIntHash(int freeValue, int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
this._hashingStrategy = this;
this._FREE = freeValue;
}
/**
* Creates a new <code>TStatelessIntHash</code> instance with the default
* capacity and load factor.
* @param strategy used to compute hash codes and to compare keys.
*/
public TStatelessIntHash(int freeValue, TIntHashingStrategy strategy) {
super();
this._hashingStrategy = strategy;
this._FREE = freeValue;
}
/**
* Creates a new <code>TStatelessIntHash</code> instance whose capacity
* is the next highest prime above <tt>initialCapacity + 1</tt>
* unless that value is already prime.
*
* @param initialCapacity an <code>int</code> value
* @param strategy used to compute hash codes and to compare keys.
*/
public TStatelessIntHash(int freeValue, int initialCapacity, TIntHashingStrategy strategy) {
super(initialCapacity);
this._hashingStrategy = strategy;
this._FREE = freeValue;
}
/**
* Creates a new <code>TStatelessIntHash</code> instance with a prime
* value at or near the specified capacity and load factor.
*
* @param initialCapacity used to find a prime capacity for the table.
* @param loadFactor used to calculate the threshold over which
* rehashing takes place.
* @param strategy used to compute hash codes and to compare keys.
*/
public TStatelessIntHash(int freeValue, int initialCapacity, float loadFactor, TIntHashingStrategy strategy) {
super(initialCapacity, loadFactor);
this._hashingStrategy = strategy;
this._FREE = freeValue;
}
public int getFreeValue() {
return this._FREE;
}
/**
* @return a deep clone of this collection
*/
@Override
public Object clone() {
TStatelessIntHash h = (TStatelessIntHash)super.clone();
h._set = this._set.clone();
h._FREE = this._FREE;
return h;
}
/**
* Returns the capacity of the hash table. This is the true
* physical capacity, without adjusting for the load factor.
*
* @return the physical capacity of the hash table.
*/
@Override
protected int capacity() {
return _set.length;
}
/**
* initializes the hashtable to a prime capacity which is at least
* <tt>initialCapacity + 1</tt>.
*
* @param initialCapacity an <code>int</code> value
* @return the actual capacity chosen
*/
@Override
protected int setUp(int initialCapacity) {
int capacity;
capacity = super.setUp(initialCapacity);
_set = new int[capacity];
if (this._FREE != 0) {
Arrays.fill(_set, this._FREE);
}
return capacity;
}
/**
* Searches the set for <tt>val</tt>
*
* @param val an <code>int</code> value
* @return a <code>boolean</code> value
*/
public boolean contains(int val) {
return index(val) >= 0;
}
/**
* Executes <tt>procedure</tt> for each element in the set.
*
* @param procedure a <code>TObjectProcedure</code> value
* @return false if the loop over the set terminated because
* the procedure returned false for some value.
*/
public boolean forEach(TIntProcedure procedure) {
int[] set = _set;
for (int i = set.length; i-- > 0;) {
if (set[i] != this._FREE && ! procedure.execute(set[i])) {
return false;
}
}
return true;
}
// /**
// * Releases the element currently stored at <tt>index</tt>.
// *
// * @param index an <code>int</code> value
// */
// @Override
// protected void removeAt(int index) {
// super.removeAt(index);
// _set[index] = this._FREE;
// }
/**
* Locates the index of <tt>val</tt>.
*
* @param val an <code>int</code> value
* @return the index of <tt>val</tt> or -1 if it isn't in the set.
*/
protected int index(int val) {
int hash, probe, index, length;
int[] set;
if (val == this._FREE) {
return -1;
}
set = _set;
length = set.length;
hash = _hashingStrategy.computeHashCode(val) & 0x7fffffff;
index = hash % length;
if (set[index] != this._FREE && set[index] != val) {
// see Knuth, p. 529
probe = 1 + (hash % (length - 2));
do {
index -= probe;
if (index < 0) {
index += length;
}
} while (set[index] != this._FREE && set[index] != val);
}
return set[index] == this._FREE ? -1 : index;
}
/**
* Locates the index at which <tt>val</tt> can be inserted. if
* there is already a value equal()ing <tt>val</tt> in the set,
* returns that value as a negative integer.
*
* @param val an <code>int</code> value
* @return an <code>int</code> value
*/
protected int insertionIndex(int val) {
int hash, probe, index, length;
int[] set;
if (val == this._FREE) {
throw new IllegalArgumentException("can not add the value " + val);
}
set = _set;
length = set.length;
hash = _hashingStrategy.computeHashCode(val) & 0x7fffffff;
index = hash % length;
if (set[index] == this._FREE) {
return index; // empty, all done
} else if (set[index] == val) {
return -index -1; // already stored
} else { // already FULL or REMOVED, must probe
// compute the double hash
probe = 1 + (hash % (length - 2));
// if the slot we landed on is not FREE, probe
// until we find an empty slot or an element
// equal to the one we are trying to insert.
// finding an empty slot means that the value is not present
// and that we should use that slot as the insertion point;
// finding a matching value means that we've found that our desired
// key is already in the table
// starting at the natural offset, probe until we find an
// offset that isn't full.
do {
index -= probe;
if (index < 0) {
index += length;
}
} while (set[index] != this._FREE && set[index] != val);
// if it's not free, the key is already stored
return set[index] != this._FREE ? -index -1 : index;
}
}
/**
* Default implementation of TIntHashingStrategy:
* delegates hashing to HashFunctions.hash(int).
*
* @param val the value to hash
* @return the hashcode.
*/
public final int computeHashCode(int val) {
return HashFunctions.hash(val);
}
} // TStatelessIntHash
| |
package it.neokree.materialnavigationdrawer.elements;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.Point;
import android.graphics.Typeface;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.view.View;
import it.neokree.materialnavigationdrawer.elements.listeners.MaterialSectionListener;
import it.neokree.materialnavigationdrawer.util.Utils;
public class MaterialAccount {
// datas
private Drawable photo;
private Drawable background;
private Drawable circularPhoto;
private String title;
private String subTitle;
private int accountNumber;
private String notifications;
private boolean hasNotifications;
private Resources resources;
private OnAccountDataLoaded listener;
private MaterialSection sectionView;
public static final int FIRST_ACCOUNT = 0;
public static final int SECOND_ACCOUNT = 1;
public static final int THIRD_ACCOUNT = 2;
// constructors
public MaterialAccount(Resources resources, String title, String subTitle, int photo, Bitmap background) {
this.title = title;
this.subTitle = subTitle;
this.resources = resources;
// resize and caching bitmap
new ResizePhotoResource().execute(photo);
if (background != null) {
new ResizeBackgroundBitmap().execute(background);
}
}
public MaterialAccount(Resources resources, String title, String subTitle, int photo, int background) {
this.title = title;
this.subTitle = subTitle;
this.resources = resources;
// resize and caching bitmap
new ResizePhotoResource().execute(photo);
new ResizeBackgroundResource().execute(background);
}
public MaterialAccount(Resources resources, String title, String subTitle, Bitmap photo, int background) {
this.title = title;
this.subTitle = subTitle;
this.resources = resources;
// resize and caching bitmap
if (photo != null) {
new ResizePhotoBitmap().execute(photo);
}
new ResizeBackgroundResource().execute(background);
}
public MaterialAccount(Resources resources, String title, String subTitle, Bitmap photo, Bitmap background) {
this.title = title;
this.subTitle = subTitle;
this.resources = resources;
// resize and caching bitmap
if (photo != null) {
new ResizePhotoBitmap().execute(photo);
}
if (background != null) {
new ResizeBackgroundBitmap().execute(background);
}
}
// setter
public void setPhoto(int photo) {
new ResizePhotoResource().execute(photo);
}
public void setPhoto(Bitmap photo) {
new ResizePhotoBitmap().execute(photo);
}
public void setBackground(Bitmap background) {
new ResizeBackgroundBitmap().execute(background);
}
public void setBackground(int background) {
new ResizeBackgroundResource().execute(background);
}
public void setTitle(String title) {
this.title = title;
}
public void setSubTitle(String subTitle) {
this.subTitle = subTitle;
}
public void setAccountNumber(int number) {
this.accountNumber = number;
}
public void setAccountListener(OnAccountDataLoaded listener) {
this.listener = listener;
}
public MaterialAccount setNotifications(int number) {
hasNotifications = true;
notifications = String.valueOf(number);
if (number >= 100) {
notifications = "99+";
}
if (number < 0) {
notifications = "0";
}
return this;
}
// getter
public Drawable getPhoto() {
return photo;
}
public Drawable getBackground() {
return background;
}
public Drawable getCircularPhoto() {
return circularPhoto;
}
public String getTitle() {
return title;
}
public String getSubTitle() {
return subTitle;
}
public int getAccountNumber() {
return accountNumber;
}
public View getSectionView(Context ctx, Typeface font, MaterialSectionListener listener, boolean rippleSupport, boolean rippleDelayClick, int position) {
if (sectionView == null) {
sectionView = new MaterialSection(ctx, MaterialSection.ICON_40DP, rippleSupport, rippleDelayClick, MaterialSection.TARGET_LISTENER);
sectionView.useRealColor();
}
// set dei dati passati
sectionView.setTypeface(font);
sectionView.setOnClickListener(listener);
// set dei dati dell'account
sectionView.setIcon(getCircularPhoto());
sectionView.setTitle(getTitle());
if (hasNotifications) {
sectionView.setNotificationsText(notifications);
}
sectionView.setAccountPosition(position);
return sectionView.getView();
}
// custom
public void recycle() {
Utils.recycleDrawable(photo);
Utils.recycleDrawable(circularPhoto);
Utils.recycleDrawable(background);
}
public interface OnAccountDataLoaded {
void onUserPhotoLoaded(MaterialAccount account);
void onBackgroundLoaded(MaterialAccount account);
}
// asynctasks
private class ResizePhotoResource extends AsyncTask<Integer, Void, BitmapDrawable> {
@Override
protected BitmapDrawable doInBackground(Integer... params) {
Point photoSize = Utils.getUserPhotoSize(resources);
Bitmap photo = Utils.resizeBitmapFromResource(resources, params[0], photoSize.x, photoSize.y);
circularPhoto = new BitmapDrawable(resources, Utils.getCroppedBitmapDrawable(photo));
return new BitmapDrawable(resources, photo);
}
@Override
protected void onPostExecute(BitmapDrawable drawable) {
photo = drawable;
if (listener != null) {
listener.onUserPhotoLoaded(MaterialAccount.this);
}
}
}
private class ResizePhotoBitmap extends AsyncTask<Bitmap, Void, BitmapDrawable> {
@Override
protected BitmapDrawable doInBackground(Bitmap... params) {
Point photoSize = Utils.getUserPhotoSize(resources);
Bitmap photo = Utils.resizeBitmap(params[0], photoSize.x, photoSize.y);
Bitmap croppedBitmap;
try {
croppedBitmap = Utils.getCroppedBitmapDrawable(photo);
} catch (RuntimeException e) {
// this will just show an empty bitmap
return null;
}
circularPhoto = new BitmapDrawable(resources, croppedBitmap);
return new BitmapDrawable(resources, photo);
}
@Override
protected void onPostExecute(BitmapDrawable drawable) {
photo = drawable;
if (listener != null) {
listener.onUserPhotoLoaded(MaterialAccount.this);
}
}
}
private class ResizeBackgroundResource extends AsyncTask<Integer, Void, BitmapDrawable> {
@Override
protected BitmapDrawable doInBackground(Integer... params) {
Point backSize = Utils.getBackgroundSize(resources);
Bitmap back = Utils.resizeBitmapFromResource(resources, params[0], backSize.x, backSize.y);
return new BitmapDrawable(resources, back);
}
@Override
protected void onPostExecute(BitmapDrawable drawable) {
background = drawable;
if (listener != null) {
listener.onBackgroundLoaded(MaterialAccount.this);
}
}
}
private class ResizeBackgroundBitmap extends AsyncTask<Bitmap, Void, BitmapDrawable> {
@Override
protected BitmapDrawable doInBackground(Bitmap... params) {
Point backSize = Utils.getBackgroundSize(resources);
Bitmap back = Utils.resizeBitmap(params[0], backSize.x, backSize.y);
return new BitmapDrawable(resources, back);
}
@Override
protected void onPostExecute(BitmapDrawable drawable) {
background = drawable;
if (listener != null) {
listener.onBackgroundLoaded(MaterialAccount.this);
}
}
}
}
| |
package org.apache.pdfbox.pdmodel.font;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import org.apache.fontbox.afm.FontMetrics;
import org.apache.fontbox.cmap.CMap;
import org.apache.fontbox.util.BoundingBox;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.cos.COSNumber;
import org.apache.pdfbox.cos.COSStream;
import org.apache.pdfbox.io.IOUtils;
import org.apache.pdfbox.pdmodel.common.COSArrayList;
import org.apache.pdfbox.pdmodel.common.COSObjectable;
import org.apache.pdfbox.pdmodel.font.encoding.GlyphList;
import org.apache.pdfbox.util.Matrix;
import org.apache.pdfbox.util.Vector;
import android.util.Log;
/**
* This is the base class for all PDF fonts.
*
* @author Ben Litchfield
*/
public abstract class PDFont implements COSObjectable, PDFontLike
{
protected static final Matrix DEFAULT_FONT_MATRIX = new Matrix(0.001f, 0, 0, 0.001f, 0, 0);
protected final COSDictionary dict;
private final CMap toUnicodeCMap;
private final FontMetrics afmStandard14; // AFM for standard 14 fonts
private PDFontDescriptor fontDescriptor;
private List<Integer> widths;
private float avgFontWidth;
private float fontWidthOfSpace = -1f;
/**
* Constructor for embedding.
*/
PDFont()
{
dict = new COSDictionary();
dict.setItem(COSName.TYPE, COSName.FONT);
toUnicodeCMap = null;
fontDescriptor = null;
afmStandard14 = null;
}
/**
* Constructor for Standard 14.
*/
PDFont(String baseFont)
{
dict = new COSDictionary();
toUnicodeCMap = null;
afmStandard14 = Standard14Fonts.getAFM(baseFont);
if (afmStandard14 == null)
{
throw new IllegalArgumentException("No AFM for font " + baseFont);
}
fontDescriptor = PDType1FontEmbedder.buildFontDescriptor(afmStandard14);
}
/**
* Constructor.
*
* @param fontDictionary Font dictionary.
*/
protected PDFont(COSDictionary fontDictionary) throws IOException
{
dict = fontDictionary;
// standard 14 fonts use an AFM
afmStandard14 = Standard14Fonts.getAFM(getName()); // may be null (it usually is)
// font descriptor
COSDictionary fd = (COSDictionary) dict.getDictionaryObject(COSName.FONT_DESC);
if (fd != null)
{
fontDescriptor = new PDFontDescriptor(fd);
}
else if (afmStandard14 != null)
{
// build font descriptor from the AFM
fontDescriptor = PDType1FontEmbedder.buildFontDescriptor(afmStandard14);
}
else
{
fontDescriptor = null;
}
// ToUnicode CMap
COSBase toUnicode = dict.getDictionaryObject(COSName.TO_UNICODE);
if (toUnicode != null)
{
toUnicodeCMap = readCMap(toUnicode);
if (toUnicodeCMap != null && !toUnicodeCMap.hasUnicodeMappings())
{
Log.w("PdfBoxAndroid", "Invalid ToUnicode CMap in font " + getName());
}
}
else
{
toUnicodeCMap = null;
}
}
/**
* Returns the AFM if this is a Standard 14 font.
*/
protected final FontMetrics getStandard14AFM()
{
return afmStandard14;
}
@Override
public PDFontDescriptor getFontDescriptor()
{
return fontDescriptor;
}
/**
* Sets the font descriptor when embedding a font.
*/
protected final void setFontDescriptor(PDFontDescriptor fontDescriptor)
{
this.fontDescriptor = fontDescriptor;
}
/**
* Reads a CMap given a COS Stream or Name. May return null if a predefined CMap does not exist.
*
* @param base COSName or COSStream
*/
protected final CMap readCMap(COSBase base) throws IOException
{
if (base instanceof COSName)
{
// predefined CMap
String name = ((COSName)base).getName();
return CMapManager.getPredefinedCMap(name);
}
else if (base instanceof COSStream)
{
// embedded CMap
InputStream input = null;
try
{
input = ((COSStream)base).getUnfilteredStream();
return CMapManager.parseCMap(input);
}
finally
{
IOUtils.closeQuietly(input);
}
}
else
{
throw new IOException("Expected Name or Stream");
}
}
@Override
public COSDictionary getCOSObject()
{
return dict;
}
@Override
public Vector getPositionVector(int code)
{
throw new UnsupportedOperationException("Horizontal fonts have no position vector");
}
/**
* Returns the displacement vector (w0, w1) in text space, for the given character.
* For horizontal text only the x component is used, for vertical text only the y component.
*
* @param code character code
* @return displacement vector
*/
public Vector getDisplacement(int code) throws IOException
{
return new Vector(getWidth(code) / 1000, 0);
}
@Override
public float getWidth(int code) throws IOException
{
// Acrobat overrides the widths in the font program on the conforming reader's system with
// the widths specified in the font dictionary." (Adobe Supplement to the ISO 32000)
//
// Note: The Adobe Supplement says that the override happens "If the font program is not
// embedded", however PDFBOX-427 shows that it also applies to embedded fonts.
// Type1, Type1C, Type3
if (dict.containsKey(COSName.WIDTHS) || dict.containsKey(COSName.MISSING_WIDTH))
{
int firstChar = dict.getInt(COSName.FIRST_CHAR, -1);
int lastChar = dict.getInt(COSName.LAST_CHAR, -1);
if (getWidths().size() > 0 && code >= firstChar && code <= lastChar)
{
return getWidths().get(code - firstChar).floatValue();
}
PDFontDescriptor fd = getFontDescriptor();
if (fd != null)
{
// if there's nothing to override with, then obviously we fall back to the font
return fd.getMissingWidth(); // default is 0
}
}
// if there's nothing to override with, then obviously we fall back to the font
return getWidthFromFont(code);
}
// @Override TODO
// public abstract float getWidthFromFont(int code) throws IOException;
@Override
public abstract boolean isEmbedded();
// @Override TODO
// public abstract float getHeight(int code) throws IOException;
/**
* Encodes the given string for use in a PDF content stream.
*
* @param text Any Unicode text.
* @return Array of PDF content stream bytes.
* @throws IOException If the text could not be encoded.
*/
public final byte[] encode(String text) throws IOException
{
ByteArrayOutputStream out = new ByteArrayOutputStream();
for (int offset = 0; offset < text.length(); )
{
int codePoint = text.codePointAt(offset);
// multi-byte encoding with 1 to 4 bytes
byte[] bytes = encode(codePoint);
out.write(bytes);
offset += Character.charCount(codePoint);
}
return out.toByteArray();
}
/**
* Encodes the given Unicode code point for use in a PDF content stream.
* Content streams use a multi-byte encoding with 1 to 4 bytes.
*
* <p>This method is called when embedding text in PDFs and when filling in fields.
*
* @param unicode Unicode code point.
* @return Array of 1 to 4 PDF content stream bytes.
* @throws IOException If the text could not be encoded.
*/
protected abstract byte[] encode(int unicode) throws IOException;
/**
* Returns the width of the given Unicode string.
*
* @param text The text to get the width of.
* @return The width of the string in 1/1000 units of text space
* @throws IOException If there is an error getting the width information.
*/
public float getStringWidth(String text) throws IOException
{
byte[] bytes = encode(text);
ByteArrayInputStream in = new ByteArrayInputStream(bytes);
float width = 0;
while (in.available() > 0)
{
int code = readCode(in);
width += getWidth(code);
}
return width;
}
/**
* This will get the average font width for all characters.
*
* @return The width is in 1000 unit of text space, ie 333 or 777
*/
// todo: this method is highly suspicious, the average glyph width is not usually a good metric
@Override
public float getAverageFontWidth()
{
float average;
if (avgFontWidth != 0.0f)
{
average = avgFontWidth;
}
else
{
float totalWidth = 0.0f;
float characterCount = 0.0f;
COSArray widths = (COSArray) dict.getDictionaryObject(COSName.WIDTHS);
if (widths != null)
{
for (int i = 0; i < widths.size(); i++)
{
COSNumber fontWidth = (COSNumber) widths.getObject(i);
if (fontWidth.floatValue() > 0)
{
totalWidth += fontWidth.floatValue();
characterCount += 1;
}
}
}
if (totalWidth > 0)
{
average = totalWidth / characterCount;
}
else
{
average = 0;
}
avgFontWidth = average;
}
return average;
}
/**
* Reads a character code from a content stream string. Codes may be up to 4 bytes long.
*
* @param in string stream
* @return character code
* @throws IOException if the CMap or stream cannot be read
*/
public abstract int readCode(InputStream in) throws IOException;
/**
* Returns the Unicode character sequence which corresponds to the given character code.
*
* @param code character code
* @param customGlyphList a custom glyph list to use instead of the Adobe Glyph List
* @return Unicode character(s)
*/
public String toUnicode(int code, GlyphList customGlyphList) throws IOException
{
return toUnicode(code);
}
/**
* Returns the Unicode character sequence which corresponds to the given character code.
*
* @param code character code
* @return Unicode character(s)
*/
public String toUnicode(int code) throws IOException
{
// if the font dictionary containsName a ToUnicode CMap, use that CMap
if (toUnicodeCMap != null)
{
if (toUnicodeCMap.getName() != null && toUnicodeCMap.getName().startsWith("Identity-"))
{
// handle the undocumented case of using Identity-H/V as a ToUnicode CMap, this
// isn't actually valid as the Identity-x CMaps are code->CID maps, not
// code->Unicode maps. See sample_fonts_solidconvertor.pdf for an example.
return new String(new char[] { (char) code });
}
else
{
// proceed as normal
return toUnicodeCMap.toUnicode(code);
}
}
// if no value has been produced, there is no way to obtain Unicode for the character.
// this behaviour can be overridden is subclasses, but this method *must* return null here
return null;
}
/**
* This will always return "Font" for fonts.
*
* @return The type of object that this is.
*/
public String getType()
{
return dict.getNameAsString(COSName.TYPE);
}
/**
* This will get the subtype of font.
*/
public String getSubType()
{
return dict.getNameAsString(COSName.SUBTYPE);
}
@Override
public abstract String getName();
@Override
public abstract BoundingBox getBoundingBox() throws IOException;
/**
* The widths of the characters. This will be null for the standard 14 fonts.
*
* @return The widths of the characters.
*/
protected final List<Integer> getWidths()
{
if (widths == null)
{
COSArray array = (COSArray) dict.getDictionaryObject(COSName.WIDTHS);
if (array != null)
{
widths = COSArrayList.convertIntegerCOSArrayToList(array);
}
else
{
widths = Collections.emptyList();
}
}
return widths;
}
@Override
public Matrix getFontMatrix()
{
return DEFAULT_FONT_MATRIX;
}
/**
* Determines the width of the space character.
*
* @return the width of the space character
*/
public float getSpaceWidth()
{
if (fontWidthOfSpace == -1f)
{
COSBase toUnicode = dict.getDictionaryObject(COSName.TO_UNICODE);
try
{
if (toUnicode != null)
{
int spaceMapping = toUnicodeCMap.getSpaceMapping();
if (spaceMapping > -1)
{
fontWidthOfSpace = getWidth(spaceMapping);
}
}
else
{
fontWidthOfSpace = getWidth(32);
}
// use the average font width as fall back
if (fontWidthOfSpace <= 0)
{
fontWidthOfSpace = getAverageFontWidth();
}
}
catch (Exception e)
{
Log.e("PdfBoxAndroid", "Can't determine the width of the space character, assuming 250", e);
fontWidthOfSpace = 250f;
}
}
return fontWidthOfSpace;
}
/**
* Returns true if the font uses vertical writing mode.
*/
public abstract boolean isVertical();
/**
* Returns true if this font is one of the "Standard 14" fonts and receives special handling.
*/
public boolean isStandard14()
{
// this logic is based on Acrobat's behaviour, see see PDFBOX-2372
// embedded fonts never get special treatment
if (isEmbedded())
{
return false;
}
// if the name matches, this is a Standard 14 font
return Standard14Fonts.containsName(getName());
}
/**
* Adds the given Unicode point to the subset.
*
* @param codePoint Unicode code point
*/
public abstract void addToSubset(int codePoint);
/**
* Replaces this font with a subset containing only the given Unicode characters.
*
* @throws IOException if the subset could not be written
*/
public abstract void subset() throws IOException;
/**
* Returns true if this font will be subset when embedded.
*/
public abstract boolean willBeSubset();
@Override
public abstract boolean isDamaged();
@Override
public boolean equals(Object other)
{
return other instanceof PDFont && ((PDFont) other).getCOSObject() == this.getCOSObject();
}
@Override
public int hashCode()
{
return this.getCOSObject().hashCode();
}
@Override
public String toString()
{
return getClass().getSimpleName() + " " + getName();
}
}
| |
package generator.randomisers;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Vector;
import cern.jet.random.Normal;
import org.apache.log4j.Logger;
import generator.extenders.IRandomiserFunctionality;
import generator.extenders.RandomiserInstance;
import generator.misc.Utils;
public class ReferentialStdDeviationRandomiser implements IRandomiserFunctionality{
Logger logger = Logger.getLogger(ReferentialStdDeviationRandomiser.class);
List<String> RefPos = new ArrayList<String>() ;
List<String> KeyPos = new ArrayList<String>();
int amavgPos ;
int amstdPos ;
int rangesNum ;
HashMap <String,List<String>>tmpMap = new HashMap<String,List<String>>();
Vector<String> vItems;
public void setRandomiserInstance(RandomiserInstance ri) {
LinkedHashMap<String, String> hashMap;
//int rangesNum = 0;
String sItem ;
String inputFile,sRangesNum, samavgPos,samstdPos;
hashMap = ri.getProperties();
inputFile = (String) hashMap.get("inputFile");
sRangesNum = (String) hashMap.get("rangesNum");
samavgPos = (String) hashMap.get("amavgPos");
samstdPos = (String) hashMap.get("amstdPos");
logger.debug("Params : " + sRangesNum + " " + samavgPos + " " + samstdPos );
try
{
rangesNum = Integer.parseInt(sRangesNum);
amavgPos = Integer.parseInt(samavgPos);
amstdPos = Integer.parseInt(samstdPos);
}
catch(Exception e)
{
logger.warn(ri.getName() +": Error setting the numerical values (1 - init)", e);
}
logger.debug("HashMap : " + hashMap.toString());
for(int i=1; i<=rangesNum; i++)
{
try
{
sItem = (String) hashMap.get("RefPos"+ (i-1) );
//sItem = (String) hashMap.get("RefPos0" );
// RefPos0
logger.debug("sItem : " + sItem + " " + sItem.getClass() );
this.RefPos.add(sItem);
logger.debug("Added : " + sItem + " at " + i);
sItem = (String) hashMap.get("KeyPos"+ (i-1) );
this.KeyPos.add(sItem);
logger.debug("Added : " + sItem + " at " + i);
}
catch(Exception e)
{
logger.warn(ri.getName() +": Error setting values (2 - Loop, index="+i+")", e);
}
}
// Read the stats file .
Utils utils = new Utils();
try
{
vItems = utils.readFile(inputFile);
}
catch(Exception e)
{
logger.error(ri.getName() +": could not locate file:"+inputFile,e);
}
if(vItems.size()==0)
{
vItems.add("ERROR");
logger.warn(ri.getName() +": Vector size is 0:");
}
String compositeKey;
Iterator<String> ittr=vItems.iterator() ;
//List <String> valList = new ArrayList<String>();
while (ittr.hasNext() ) {
compositeKey = "" ;
String line = ittr.next() ;
List <String> valList = new ArrayList<String>();
// if (valList != null ) {
// valList.clear() ;
//}
logger.debug("Line : " + line);
//logger.debug("Separator " + sepString) ;
// TODO: [TM] fix the separator.
String[] lineList=line.split("\t") ;
logger.debug("lineList " + lineList[0] );
for(int i=0; i<rangesNum; i++) {
compositeKey = compositeKey + lineList[Integer.parseInt(KeyPos.get(i))].replace(" ", "_") ;
logger.debug("Key , Pos : " + compositeKey + "," + i) ;
}
valList.add(lineList[amavgPos]) ;
valList.add(lineList[amstdPos]) ;
tmpMap.put(compositeKey,valList) ;
}
}
public Object generate() {
// Should never get called.
/*double am ;
logger.debug("Input Values " + amavg + " " + amstd) ;
am = Normal.staticNextDouble(amavg, Math.sqrt(amstd));
logger.debug("Output " + am ) ;
am = Math.round(am * amtdivide)/amtdivide ;
return(am);
*/
return null ;
}
public void destroy() {
// Auto-generated method stub
}
public boolean isListCompatible() {
return true;
}
public Object generatefromlist(int pos, long numrecs, List<String> dslist) {
double amavg = 0 ;
double amstd = 0 ;
double amtdivide= 100 ;
List <String> statlist ;
String samavgPos ;
String samstdPos ;
String compositeKey = "" ;
logger.debug("Begin") ;
for(int i=0; i<rangesNum; i++) {
compositeKey = compositeKey + dslist.get(Integer.parseInt(RefPos.get(i))).replace(" ", "_") ;
logger.debug("Key , Pos : " + compositeKey + "," + i) ;
}
logger.debug("Next") ;
logger.debug("compositeKey " + compositeKey) ;
logger.debug("tmpMap " + tmpMap.toString()) ;
if (this.tmpMap.containsKey(compositeKey)) {
logger.debug("compositeKey Values" + this.tmpMap.get(compositeKey).toString()) ;
}
else {
logger.error("Cannot find values for " + compositeKey + " in " + tmpMap.toString()) ;
return null ;
}
statlist = this.tmpMap.get(compositeKey);
logger.debug("statlist " + statlist.toString()) ;
samavgPos = (String) statlist.get(0) ;
samstdPos = (String) statlist.get(1) ;
try
{
amavg = Double.parseDouble(samavgPos) ;
amstd = Double.parseDouble(samstdPos) ;
}
catch(Exception e)
{
logger.error("Could not cast input values to doubles : ",e);
}
double am ;
logger.debug("Input Values " + amavg + " " + amstd) ;
am = Normal.staticNextDouble(amavg, Math.sqrt(amstd));
logger.debug("Output " + am ) ;
am = Math.round(am * amtdivide)/amtdivide ;
return am ;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.metastore.api.AggrStats;
import org.apache.hadoop.hive.metastore.api.GetTableRequest;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.PartitionsByExprRequest;
import org.apache.hadoop.hive.metastore.api.PartitionsByExprResult;
import org.apache.hadoop.hive.metastore.api.PartitionsSpecByExprResult;
import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.ql.util.IncrementalObjectSizeEstimator;
import org.apache.hadoop.hive.ql.util.IncrementalObjectSizeEstimator.ObjectEstimator;
import org.apache.thrift.TException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Objects;
/**
* This class introduces a caching layer in HS2 for metadata for some selected query APIs. It extends
* HiveMetaStoreClient, and overrides some of its methods to add this feature.
* Its design is simple, relying on snapshot information being queried to cache and invalidate the metadata.
* It helps to reduce the time spent in compilation by using HS2 memory more effectively, and it allows to
* improve HMS throughput for multi-tenant workloads by reducing the number of calls it needs to serve.
*/
public class HiveMetaStoreClientWithLocalCache extends HiveMetaStoreClient {
private static Cache<CacheKey, Object> mscLocalCache = null;
private static boolean IS_CACHE_ENABLED;
private static long MAX_SIZE;
private static boolean RECORD_STATS;
private static HashMap<Class<?>, ObjectEstimator> sizeEstimator = null;
private static String cacheObjName = null;
public static synchronized void init() {
if (mscLocalCache != null) return; // init cache only once
Configuration metaConf = MetastoreConf.newMetastoreConf();
LOG.debug("Initializing local cache in HiveMetaStoreClient...");
MAX_SIZE = MetastoreConf.getSizeVar(metaConf, MetastoreConf.ConfVars.MSC_CACHE_MAX_SIZE);
IS_CACHE_ENABLED = MetastoreConf.getBoolVar(metaConf, MetastoreConf.ConfVars.MSC_CACHE_ENABLED);
RECORD_STATS = MetastoreConf.getBoolVar(metaConf, MetastoreConf.ConfVars.MSC_CACHE_RECORD_STATS);
initSizeEstimator();
initCache();
LOG.debug("Local cache initialized in HiveMetaStoreClient: " + mscLocalCache);
}
public HiveMetaStoreClientWithLocalCache(Configuration conf) throws MetaException {
this(conf, null, true);
}
public HiveMetaStoreClientWithLocalCache(Configuration conf, HiveMetaHookLoader hookLoader) throws MetaException {
this(conf, hookLoader, true);
}
public HiveMetaStoreClientWithLocalCache(Configuration conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException {
super(conf, hookLoader, allowEmbedded);
}
private static void initSizeEstimator() {
sizeEstimator = new HashMap<>();
IncrementalObjectSizeEstimator.createEstimators(CacheKey.class, sizeEstimator);
Arrays.stream(KeyType.values()).forEach(e -> {
IncrementalObjectSizeEstimator.createEstimators(e.keyClass, sizeEstimator);
IncrementalObjectSizeEstimator.createEstimators(e.valueClass, sizeEstimator);}
);
}
/**
* KeyType is used to differentiate the request types. More types can be added in future.
*/
public enum KeyType {
PARTITIONS_BY_EXPR(PartitionsByExprRequest.class, PartitionsByExprResult.class),
PARTITIONS_SPEC_BY_EXPR(PartitionsByExprRequest.class, PartitionsSpecByExprResult.class),
AGGR_COL_STATS(PartitionsStatsRequest.class, AggrStats.class);
private final Class<?> keyClass;
private final Class<?> valueClass;
KeyType(Class<?> keyClass, Class<?> valueClass) {
this.keyClass = keyClass;
this.valueClass = valueClass;
}
}
/**
* CacheKey objects are used as key for the cache.
*/
public static class CacheKey{
KeyType IDENTIFIER;
Object obj;
public CacheKey(KeyType IDENTIFIER, Object obj) {
this.IDENTIFIER = IDENTIFIER;
this.obj = obj;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CacheKey cacheKey = (CacheKey) o;
return IDENTIFIER == cacheKey.IDENTIFIER &&
Objects.equals(obj, cacheKey.obj);
}
@Override
public int hashCode() {
return Objects.hash(IDENTIFIER, obj);
}
}
public static class PartitionsStatsCustomRequest {
PartitionsStatsRequest request;
String validWriteIdList;
long tableId;
public PartitionsStatsCustomRequest(PartitionsStatsRequest req, String validWriteIdList, long tableId) {
this.request = req;
this.validWriteIdList = validWriteIdList;
this.tableId = tableId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PartitionsStatsCustomRequest that = (PartitionsStatsCustomRequest) o;
return tableId == that.tableId &&
Objects.equals(request, that.request) &&
Objects.equals(validWriteIdList, that.validWriteIdList);
}
@Override
public int hashCode() {
return Objects.hash(request, validWriteIdList, tableId);
}
}
private static int getWeight(CacheKey key, Object val) {
ObjectEstimator keySizeEstimator = sizeEstimator.get(key.getClass());
ObjectEstimator valSizeEstimator = sizeEstimator.get(key.IDENTIFIER.valueClass);
int keySize = keySizeEstimator.estimate(key, sizeEstimator);
int valSize = valSizeEstimator.estimate(val, sizeEstimator);
if (LOG.isDebugEnabled()) {
LOG.debug("Cache entry weight - key: {}, value: {}, total: {}", keySize, valSize, keySize + valSize);
}
return keySize + valSize;
}
private Object load(CacheKey key) {
try {
return getResultObject(key);
} catch (TException e) {
throw new UncheckedCacheException(e);
}
}
/**
* Initializes the cache
*/
private static void initCache() {
int initSize = 100;
Caffeine<CacheKey, Object> cacheBuilder = Caffeine.newBuilder()
.initialCapacity(initSize)
.maximumWeight(MAX_SIZE)
.weigher(HiveMetaStoreClientWithLocalCache::getWeight)
.removalListener((key, val, cause) -> {
if (LOG.isDebugEnabled()) {
LOG.debug("Caffeine - ({}, {}) was removed ({})", key, val, cause);
}});
if (RECORD_STATS) {
cacheBuilder.recordStats();
}
mscLocalCache = cacheBuilder.build();
cacheObjName = mscLocalCache.toString().substring(mscLocalCache.toString().indexOf("Cache@"));
}
/**
* This method is used to load the cache by calling relevant APIs, depending on the type of the request.
*
* @param cacheKey key of the cache, containing an identifier and a request object
* @return Result object / null
* @throws TException
*/
private Object getResultObject(CacheKey cacheKey) throws TException {
Object result = null;
switch (cacheKey.IDENTIFIER) {
case PARTITIONS_BY_EXPR:
result = super.getPartitionsByExprResult((PartitionsByExprRequest)cacheKey.obj);
break;
case PARTITIONS_SPEC_BY_EXPR:
result = super.getPartitionsSpecByExprResult((PartitionsByExprRequest)cacheKey.obj);
break;
case AGGR_COL_STATS:
PartitionsStatsCustomRequest customRequest = (PartitionsStatsCustomRequest) cacheKey.obj;
result = super.getAggrStatsFor(customRequest.request);
break;
default:
break;
}
return result;
}
@Override
protected PartitionsByExprResult getPartitionsByExprResult(PartitionsByExprRequest req) throws TException {
PartitionsByExprResult r;
// table should be transactional to get responses from the cache
if (isCacheEnabledAndInitialized() && isRequestCacheable(req, KeyType.PARTITIONS_BY_EXPR)) {
CacheKey cacheKey = new CacheKey(KeyType.PARTITIONS_BY_EXPR, req);
try {
r = (PartitionsByExprResult) mscLocalCache.get(cacheKey, this::load); // get either the result or an Exception
if (LOG.isDebugEnabled() && RECORD_STATS) {
LOG.debug(cacheObjName + ": " + mscLocalCache.stats().toString());
}
} catch (UncheckedCacheException e) {
if (e.getCause() instanceof MetaException) {
throw (MetaException) e.getCause();
} else if (e.getCause() instanceof TException) {
throw (TException) e.getCause();
} else {
throw new TException(e.getCause());
}
}
} else {
r = client.get_partitions_by_expr(req);
}
return r;
}
@Override
protected PartitionsSpecByExprResult getPartitionsSpecByExprResult(PartitionsByExprRequest req) throws TException {
PartitionsSpecByExprResult r;
// table should be transactional to get responses from the cache
if (isCacheEnabledAndInitialized() && isRequestCacheable(req, KeyType.PARTITIONS_SPEC_BY_EXPR)) {
CacheKey cacheKey = new CacheKey(KeyType.PARTITIONS_SPEC_BY_EXPR, req);
try {
r = (PartitionsSpecByExprResult) mscLocalCache.get(cacheKey, this::load); // get either the result or an Exception
if (LOG.isDebugEnabled() && RECORD_STATS) {
LOG.debug(cacheObjName + ": " + mscLocalCache.stats().toString());
}
} catch (UncheckedCacheException e) {
if (e.getCause() instanceof MetaException) {
throw (MetaException) e.getCause();
} else if (e.getCause() instanceof TException) {
throw (TException) e.getCause();
} else {
throw new TException(e.getCause());
}
}
} else {
r = client.get_partitions_spec_by_expr(req);
}
return r;
}
@Override
protected AggrStats getAggrStatsFor(PartitionsStatsRequest req) throws TException {
AggrStats r;
Table tbl = getTable(req.getDbName(), req.getTblName());
PartitionsStatsCustomRequest customRequest = new PartitionsStatsCustomRequest(req,
getValidWriteIdList(TableName.getDbTable(req.getDbName(), req.getTblName())), tbl.getId());
if (isCacheEnabledAndInitialized() && isRequestCacheable(customRequest, KeyType.AGGR_COL_STATS)) {
CacheKey cacheKey = new CacheKey(KeyType.AGGR_COL_STATS, customRequest);
try {
r = (AggrStats) mscLocalCache.get(cacheKey, this::load);
if (LOG.isDebugEnabled() && RECORD_STATS) {
LOG.debug(cacheObjName + ": " + mscLocalCache.stats().toString());
}
} catch (UncheckedCacheException e) {
if (e.getCause() instanceof MetaException) {
throw (MetaException) e.getCause();
} else if (e.getCause() instanceof TException) {
throw (TException) e.getCause();
} else {
throw new TException(e.getCause());
}
}
} else {
r = super.getAggrStatsFor(req);
}
return r;
}
/**
* This method determines if the request should be cached.
* @param request Request object
* @return boolean
*/
private boolean isRequestCacheable(Object request, KeyType keyType) {
switch (keyType) {
//cache only requests for transactional tables, with a valid table id
case PARTITIONS_BY_EXPR:
case PARTITIONS_SPEC_BY_EXPR:
PartitionsByExprRequest req = (PartitionsByExprRequest) request;
return req.getValidWriteIdList() != null && req.getId() != -1;
case AGGR_COL_STATS:
PartitionsStatsCustomRequest customRequest = (PartitionsStatsCustomRequest) request;
return customRequest.tableId != -1 && customRequest.validWriteIdList != null;
// Requests of other types can have different conditions and should be added here.
default:
return false;
}
}
/**
* Checks if cache is enabled and initialized
*
* @return boolean
*/
private boolean isCacheEnabledAndInitialized() {
return IS_CACHE_ENABLED && mscLocalCache != null;
}
}
/**
* This unchecked exception is thrown from the load method because checked exception is
* not thrown from the functional interface
*/
class UncheckedCacheException extends RuntimeException {
public UncheckedCacheException(Throwable t) {
super(t);
}
}
| |
package org.apereo.cas.support.events.dao;
import org.apereo.cas.authentication.adaptive.geo.GeoLocationRequest;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.val;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.annotation.Id;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.JoinColumn;
import javax.persistence.MapKeyColumn;
import javax.persistence.MappedSuperclass;
import javax.persistence.Transient;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* This is {@link CasEvent}, which represents a single event stored in the events repository.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@MappedSuperclass
@ToString
@Getter
@Setter
@AllArgsConstructor
public class CasEvent implements Serializable {
private static final long serialVersionUID = -4206712375316470417L;
@Id
@JsonProperty
@Transient
private long id = -1;
@JsonProperty("type")
@Column(nullable = false)
private String type;
@JsonProperty("principalId")
@Column(nullable = false)
private String principalId;
@JsonProperty("creationTime")
@Column(nullable = false)
private String creationTime;
@JsonProperty("properties")
@ElementCollection
@MapKeyColumn(name = "name")
@Column(name = "value")
@CollectionTable(name = "events_properties", joinColumns = @JoinColumn(name = "eventId"))
private Map<String, String> properties = new HashMap<>(0);
/**
* Instantiates a new CAS event.
*/
public CasEvent() {
this.id = System.currentTimeMillis();
}
/**
* Put timestamp.
*
* @param time the time
*/
public void putTimestamp(final Long time) {
put("timestamp", time.toString());
}
/**
* Put id.
*
* @param eventId the id
*/
public void putEventId(final String eventId) {
put("eventId", eventId);
}
/**
* Put client ip.
*
* @param loc the loc
*/
public void putClientIpAddress(final String loc) {
put("clientip", loc);
}
/**
* Put server ip.
*
* @param loc the loc
*/
public void putServerIpAddress(final String loc) {
put("serverip", loc);
}
/**
* Put agent.
*
* @param dev the dev
*/
public void putAgent(final String dev) {
put("agent", dev);
}
@JsonIgnore
public Long getTimestamp() {
return Long.valueOf(get("timestamp"));
}
@JsonIgnore
public String getAgent() {
return get("agent");
}
@JsonIgnore
public String getEventId() {
return get("eventId");
}
@JsonIgnore
public String getClientIpAddress() {
return get("clientip");
}
@JsonIgnore
public String getServerIpAddress() {
return get("serverip");
}
/**
* Put property.
*
* @param key the key
* @param value the value
*/
public void put(final String key, final String value) {
if (StringUtils.isBlank(value)) {
this.properties.remove(key);
} else {
this.properties.put(key, value);
}
}
/**
* Get property.
*
* @param key the key
* @return the string
*/
public String get(final String key) {
return this.properties.get(key);
}
/**
* Put geo latitude.
*
* @param s the s
*/
private void putGeoLatitude(final String s) {
put("geoLatitude", s);
}
/**
* Put geo longitude.
*
* @param s the longitude
*/
private void putGeoLongitude(final String s) {
put("geoLongitude", s);
}
/**
* Put geo accuracy.
*
* @param s the accuracy
*/
private void putGeoAccuracy(final String s) {
put("geoAccuracy", s);
}
/**
* Put geo timestamp.
*
* @param s the timestamp
*/
private void putGeoTimestamp(final String s) {
put("geoTimestamp", s);
}
/**
* Put geo location.
*
* @param location the location
*/
public void putGeoLocation(final GeoLocationRequest location) {
putGeoAccuracy(location.getAccuracy());
putGeoLatitude(location.getLatitude());
putGeoLongitude(location.getLongitude());
putGeoTimestamp(location.getTimestamp());
}
/**
* Gets geo location.
*
* @return the geo location
*/
@JsonIgnore
public GeoLocationRequest getGeoLocation() {
val request = new GeoLocationRequest();
request.setAccuracy(get("geoAccuracy"));
request.setTimestamp(get("geoTimestamp"));
request.setLongitude(get("geoLongitude"));
request.setLatitude(get("geoLatitude"));
return request;
}
}
| |
/*
* Copyright (c) 2008-2013 EMC Corporation
* All Rights Reserved
*/
package com.emc.storageos.model.vpool;
import org.codehaus.jackson.annotate.JsonProperty;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.HashSet;
import java.util.Set;
@XmlRootElement(name = "find_matching_pools")
public class VirtualPoolAttributesParam {
private Set<String> protocols;
private Integer maxPaths;
private Integer minPaths;
private Integer pathsPerInitiator;
private Set<String> virtualArrays;
private VirtualPoolProtectionParam protection;
private String provisionType;
private VirtualPoolHighAvailabilityParam highAvailability;
private String systemType;
private Set<String> raidLevels;
private String autoTieringPolicyName;
private String driveType;
private Boolean multiVolumeConsistency;
public VirtualPoolAttributesParam() {
}
@XmlElementWrapper(name = "protocols")
/**
* The protocols for a virtual pool.
* Valid values:
* FC = Fibre Channel (block)
* ISCSI = Internet Small Computer System Interface (block)
* FCoE = Fibre Channel over Ethernet (block)
* NFS = Network File System (file)
* NFSV4 = Network File System Version 4 (file)
* CIFS = Common Internet File System (file)
*
*/
@XmlElement(name = "protocol")
public Set<String> getProtocols() {
return protocols;
}
public void setProtocols(Set<String> protocols) {
if (protocols == null) {
protocols = new HashSet<String>();
}
this.protocols = protocols;
}
/**
* The maximum number of paths to a given storage system.
*
*/
@XmlElement(name = "max_paths")
public Integer getMaxPaths() {
return maxPaths;
}
public void setMaxPaths(Integer maxPaths) {
this.maxPaths = maxPaths;
}
/**
* The mininm number of paths to a given storage system for export.
*
*/
@XmlElement(name = "min_paths")
public Integer getMinPaths() {
return minPaths;
}
public void setMinPaths(Integer minPaths) {
this.minPaths = minPaths;
}
/**
* @deprecated use getMaxPaths instead of getNumPaths
*/
@Deprecated
@XmlElement(name = "num_paths")
public Integer getNumPaths() {
return maxPaths;
}
/**
* @deprecated use setMaxPaths instead of setNumPaths
*/
@Deprecated
public void setNumPaths(Integer numPaths) {
this.maxPaths = numPaths;
}
/**
* The maximum number of paths to a given storage system for the initiator.
*/
@XmlElement(name = "paths_per_initiator")
public Integer getPathsPerInitiator() {
return pathsPerInitiator;
}
public void setPathsPerInitiator(Integer pathsPerInitiator) {
this.pathsPerInitiator = pathsPerInitiator;
}
@XmlElementWrapper(name = "varrays")
/**
* The virtual arrays for the virtual pool.
*
*/
@XmlElement(name = "varray")
@JsonProperty("varrays")
public Set<String> getVirtualArrays() {
if (virtualArrays == null) {
virtualArrays = new HashSet<String>();
}
return virtualArrays;
}
public void setVirtualArrays(Set<String> virtualArrays) {
this.virtualArrays = virtualArrays;
}
/**
* The protection settings for the virtual pool.
*
*/
@XmlElement(name = "protection")
public VirtualPoolProtectionParam getProtection() {
return protection;
}
public void setProtection(VirtualPoolProtectionParam protection) {
this.protection = protection;
}
/**
* The provisioning type for the virtual pool.
* Valid values:
* NONE
* Thin
* Thick
*
*/
@XmlElement(name = "provisioning_type", required = false)
public String getProvisionType() {
return provisionType;
}
public void setProvisionType(String provisionType) {
this.provisionType = provisionType;
}
/**
* The high availability settings for the virtual pool.
*
*/
@XmlElement(name = "high_availability")
public VirtualPoolHighAvailabilityParam getHighAvailability() {
return highAvailability;
}
public void setHighAvailability(
VirtualPoolHighAvailabilityParam highAvailability) {
this.highAvailability = highAvailability;
}
/**
* The system type for the virtual pool.
* Valid values:
* vnxblock
* vmax
* vnxfile
* isilon
* netapp
*
*/
@XmlElement(name = "system_type")
public String getSystemType() {
return systemType;
}
public void setSystemType(String systemType) {
this.systemType = systemType;
}
@XmlElementWrapper(name = "raid_levels")
/**
* The desired RAID levels. Only supported for VMAX and VNX storage systems.
* When specified, only storage pools that support the specified RAID levels
* are matched. During volume creation, a specific RAID level to be used may
* be specified. This RAID level must be supported by the virtual pool.
*
* RAID levels set the amount of redundancy and striping.
* Here is a quick definition of the various RAID levels.
*
* RAID 0 is a striped set of disks without parity.
* RAID 1 is a mirror copy on two disks.
* RAID 2 is a stripe at the bit level rather than the block level. Rarely used or supported.
* RAID 3 is a byte level striping with a dedicated parity disk.
* RAID 4 is block level striping with a dedicated parity disk.
* RAID 5 is block level striping with the parity data distributed across all disks.
* RAID 6 extends RAID 5 by adding an additional parity block;
* thus it uses block level striping with two parity blocks.
* RAID 10 is a stripe of mirrors, i.e. a RAID 0 combination of RAID 1 drives.
* Valid values:
* RAID0
* RAID1
* RAID2
* RAID3
* RAID4
* RAID5
* RAID6
* RAID10
*/
@XmlElement(name = "raid_level")
public Set<String> getRaidLevels() {
if (raidLevels == null) {
raidLevels = new HashSet<String>();
}
return raidLevels;
}
public void setRaidLevels(Set<String> raidLevels) {
this.raidLevels = raidLevels;
}
/**
* The auto tier policy name. Only supported for VMAX and VNX storage
* systems. If auto_tiering_policy_name is specified, then on VNX, a ranking
* algorithm is applied to get matching pools. On VMAX, only pools
* associated with VMAX Auto Tier Policies are matched.
*
*/
@XmlElement(name = "auto_tiering_policy_name")
public String getAutoTieringPolicyName() {
return autoTieringPolicyName;
}
public void setAutoTieringPolicyName(String autoTieringPolicyName) {
this.autoTieringPolicyName = autoTieringPolicyName;
}
/**
* The supported drive type. When specified, only storage pools that are
* comprised of the specified drive type are matched.
* Valid values:
* FC
* ISCSI
* FCoE
* NFS
* NFSV4
* CIFS
*
*/
@XmlElement(name = "drive_type")
public String getDriveType() {
return driveType;
}
public void setDriveType(String driveType) {
this.driveType = driveType;
}
/**
* Specifies whether or not the virtual pool supports multi-volume
* consistency. When specified for a virtual pool, volumes created using the
* virtual pool can be created in consistency groups.
*
*/
@XmlElement(name = "multi_volume_consistency")
public Boolean getMultiVolumeConsistency() {
return multiVolumeConsistency;
}
public void setMultiVolumeConsistency(Boolean multiVolumeConsistency) {
this.multiVolumeConsistency = multiVolumeConsistency;
}
}
| |
/*
* Copyright 2015 Kejun Xia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.shipdream.lib.android.mvc.samples.note.controller.internal;
import com.shipdream.lib.android.mvc.MvcGraph;
import com.shipdream.lib.android.mvc.controller.NavigationController;
import com.shipdream.lib.android.mvc.samples.note.LocId;
import com.shipdream.lib.android.mvc.samples.note.controller.AppController;
import com.shipdream.lib.android.mvc.samples.note.controller.NoteController;
import com.shipdream.lib.android.mvc.samples.note.model.NoteModel;
import com.shipdream.lib.android.mvc.samples.note.service.android.PreferenceService;
import com.shipdream.lib.poke.Component;
import com.shipdream.lib.poke.Provides;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Singleton;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
//Test cases in this class do not cover all possible scenarios but just for showing examples
public class TestNoteController extends TestControllerBase<NoteController> {
private AppController appControllerMock;
private PreferenceService preferenceServiceMock;
private PreferenceService.Editor editorMock;
public static class TestComp extends Component {
TestNoteController testNoteController;
@Singleton
@Provides
public PreferenceService providePreferenceService() {
return testNoteController.preferenceServiceMock;
}
@Provides
public AppController provideAppController() {
return testNoteController.appControllerMock;
}
}
@Override
protected void registerDependencies(MvcGraph mvcGraph) {
appControllerMock = mock(AppController.class);
preferenceServiceMock = mock(PreferenceService.class);
editorMock = mock(PreferenceService.Editor.class);
when(editorMock.putString(anyString(), anyString())).thenReturn(editorMock);
when(preferenceServiceMock.edit()).thenReturn(editorMock);
TestComp testComp = new TestComp();
testComp.testNoteController = this;
mvcGraph.register(testComp);
}
@Override
protected NoteController createTestingController() {
return new NoteControllerImpl();
}
@Test
public void shouldNavigateToNewNoteViewByCalling_ToCreateNewNote() {
//arrange
class Monitor {
public void onEvent(NavigationController.EventC2V.OnLocationForward event) {
}
}
Monitor monitor = mock(Monitor.class);
eventBusC2V.register(monitor);
//act
controllerToTest.toCreateNote();
//verify
ArgumentCaptor<NavigationController.EventC2V.OnLocationForward> navEvent
= ArgumentCaptor.forClass(NavigationController.EventC2V.OnLocationForward.class);
//Navigation event should be raised
verify(monitor, times(1)).onEvent(navEvent.capture());
//It should go to new note view
Assert.assertEquals(navEvent.getValue().getCurrentValue().getLocationId(), LocId.NEW_NOTE);
}
@Test
public void shouldBeInSelectionModeOnlyWhenSelectedNoteIdListIsNullOrEmpty() {
//pre-check
Assert.assertFalse(controllerToTest.inSelectionMode());
//arrange
NoteModel model = new NoteModel();
controllerToTest.bindModel(this, model);
model.setSelectedNoteIds(null);
//assert
Assert.assertFalse(controllerToTest.inSelectionMode());
//arrange
model.setSelectedNoteIds(new ArrayList<Long>());
//assert
Assert.assertFalse(controllerToTest.inSelectionMode());
//arrange
List<Long> selectedIds = new ArrayList<>();
selectedIds.add(1L);
model.setSelectedNoteIds(selectedIds);
//assert
Assert.assertTrue(controllerToTest.inSelectionMode());
}
@Test
public void shouldNavigateToNoteDetailViewWhenSelectANoteInNonSelectionModeWhenInPortraitMode() {
//arrange
class Monitor {
public void onEvent(NavigationController.EventC2V.OnLocationForward event) {
}
}
Monitor monitor = mock(Monitor.class);
eventBusC2V.register(monitor);
Assert.assertFalse(controllerToTest.inSelectionMode());
when(appControllerMock.getCurrentOrientation()).thenReturn(AppController.Orientation.PORTRAIT);
//act
controllerToTest.selectNote(5);
//verify
ArgumentCaptor<NavigationController.EventC2V.OnLocationForward> navEvent
= ArgumentCaptor.forClass(NavigationController.EventC2V.OnLocationForward.class);
//Navigation event should be raised
verify(monitor, times(1)).onEvent(navEvent.capture());
//It should go to new note view
Assert.assertEquals(navEvent.getValue().getCurrentValue().getLocationId(), LocId.NOTE_HANDSET_DETAIL);
}
@Test
public void shouldNotNavigateToNoteDetailViewWhenSelectANoteInNonSelectionModeWhenInLandscapeMode() {
//arrange
class Monitor {
public void onEvent(NavigationController.EventC2V.OnLocationForward event) {
}
}
Monitor monitor = mock(Monitor.class);
eventBusC2V.register(monitor);
Assert.assertFalse(controllerToTest.inSelectionMode());
when(appControllerMock.getCurrentOrientation()).thenReturn(AppController.Orientation.LANDSCAPE);
//act
controllerToTest.selectNote(5);
//verify
ArgumentCaptor<NavigationController.EventC2V.OnLocationForward> navEvent
= ArgumentCaptor.forClass(NavigationController.EventC2V.OnLocationForward.class);
//Navigation event should be raised
verify(monitor, times(0)).onEvent(navEvent.capture());
}
@Test
public void shouldNotNavigateToNoteDetailViewWhenItIsInSelectionMode() {
//arrange
class Monitor {
public void onEvent(NavigationController.EventC2V.OnLocationForward event) {
}
}
Monitor monitor = mock(Monitor.class);
eventBusC2V.register(monitor);
Assert.assertFalse(controllerToTest.inSelectionMode());
//arrange
NoteModel model = new NoteModel();
controllerToTest.bindModel(this, model);
List<Long> selectedIds = new ArrayList<>();
selectedIds.add(1L);
model.setSelectedNoteIds(selectedIds);
//now controller is in note selection mode
Assert.assertTrue(controllerToTest.inSelectionMode());
//act
controllerToTest.selectNote(1);
//verify
ArgumentCaptor<NavigationController.EventC2V.OnLocationForward> navEvent
= ArgumentCaptor.forClass(NavigationController.EventC2V.OnLocationForward.class);
//Navigation event should be raised
verify(monitor, times(0)).onEvent(navEvent.capture());
}
}
| |
package com.nightonke.boommenu;
import com.nightonke.boommenu.Types.PlaceType;
/**
* Created by Weiping on 2016/4/1.
*/
public class EndLocationsFactory {
public static int[][] getEndLocations(
PlaceType placeType,
int screenWidth,
int screenHeight,
int buttonWidth,
int buttonHeight) {
int[][] endLocations = new int[BoomMenuButton.MAX_CIRCLE_BUTTON_NUMBER][2];
if (placeType.equals(PlaceType.CIRCLE_1_1)) {
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_2_1)) {
endLocations[0][0] = screenWidth / 3 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth * 2 / 3 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_2_2)) {
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 3 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight * 2 / 3 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_3_1)) {
int dis = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_3_2)) {
int dis = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + dis - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_3_3)) {
int b = screenWidth / 6;
int c = (int)(2 * b / Math.sqrt(3));
int a = c / 2;
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - c - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - b - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 + a - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + b - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + a - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_3_4)) {
int b = screenWidth / 6;
int c = (int)(2 * b / Math.sqrt(3));
int a = c / 2;
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 + c - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - b - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - a - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + b - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - a - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_4_1)) {
endLocations[0][0] = screenWidth / 3 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - screenWidth / 6 - buttonWidth / 2;
endLocations[1][0] = screenWidth * 2 / 3 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - screenWidth / 6 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 3 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + screenWidth / 6 - buttonWidth / 2;
endLocations[3][0] = screenWidth * 2 / 3 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + screenWidth / 6 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_4_2)) {
double s2 = Math.sqrt(2);
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = (int) (screenHeight / 2 - screenWidth / 3 / s2 - buttonWidth / 2);
endLocations[1][0] = (int) (screenWidth / 2 + screenWidth / 3 / s2 - buttonWidth / 2);
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[2][1] = (int) (screenHeight / 2 + screenWidth / 3 / s2 - buttonWidth / 2);
endLocations[3][0] = (int) (screenWidth / 2 - screenWidth / 3 / s2 - buttonWidth / 2);
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_5_1)) {
double s3 = Math.sqrt(3);
int h = screenHeight / 2;
endLocations[0][0] = screenWidth / 4 - buttonWidth / 2;
endLocations[0][1] = h - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = h - buttonWidth / 2;
endLocations[2][0] = screenWidth * 3 / 4 - buttonWidth / 2;
endLocations[2][1] = h - buttonWidth / 2;
endLocations[3][0] = screenWidth * 3 / 8 - buttonWidth / 2;
endLocations[3][1] = (int) (h + s3 / 8 * screenWidth - buttonWidth / 2);
endLocations[4][0] = screenWidth * 5 / 8 - buttonWidth / 2;
endLocations[4][1] = (int) (h + s3 / 8 * screenWidth - buttonWidth / 2);
}
else if (placeType.equals(PlaceType.CIRCLE_5_2)) {
double s3 = Math.sqrt(3);
int h = screenHeight / 2;
endLocations[0][0] = screenWidth * 3 / 8 - buttonWidth / 2;
endLocations[0][1] = h - buttonWidth / 2;
endLocations[1][0] = screenWidth * 5 / 8 - buttonWidth / 2;
endLocations[1][1] = h - buttonWidth / 2;
endLocations[2][0] = screenWidth / 4 - buttonWidth / 2;
endLocations[2][1] = (int) (h + s3 / 8 * screenWidth - buttonWidth / 2);
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = (int) (h + s3 / 8 * screenWidth - buttonWidth / 2);
endLocations[4][0] = screenWidth * 3 / 4 - buttonWidth / 2;
endLocations[4][1] = (int) (h + s3 / 8 * screenWidth - buttonWidth / 2);
}
else if (placeType.equals(PlaceType.CIRCLE_5_3)) {
int dis = (int) ((buttonWidth * 9 / 8) / Math.sqrt(2));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + dis - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_5_4)) {
int dis = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_6_1)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = buttonWidth * 9 / 16;
endLocations[0][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_6_2)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = buttonWidth * 9 / 16;
endLocations[0][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_6_3)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = (int) (dis1 / 2 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_6_4)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = (int) (dis1 / 2 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_6_5)) {
int dis1 = buttonWidth * 9 / 16;
int dis2 = (int) (dis1 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - dis1 * 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis1 * 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_6_6)) {
int dis1 = buttonWidth * 9 / 16;
int dis2 = (int) (dis1 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - dis1 * 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis1 * 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_7_1)) {
int dis = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 + dis - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_7_2)) {
int dis = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 + dis - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - dis - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 - dis - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 + dis - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 - dis - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_7_3)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = (int) (dis1 / 2 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_7_4)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = (int) (dis1 / 2 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_8_1)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = (int) (dis1 / 2 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
endLocations[7][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[7][1] = screenHeight / 2 + dis2 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_8_2)) {
int dis1 = buttonWidth * 9 / 8;
int dis2 = (int) (dis1 / 2 * Math.sqrt(3));
endLocations[0][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - dis2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[7][0] = screenWidth / 2 + dis2 - buttonWidth / 2;
endLocations[7][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_8_3)) {
int dis1 = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[7][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[7][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_9_1)) {
int dis1 = buttonWidth * 9 / 8;
endLocations[0][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[7][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[7][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[8][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[8][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.CIRCLE_9_2)) {
int dis1 = (int) (buttonWidth * 8 / 8 * Math.sqrt(2));
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - dis1 - buttonWidth / 2;
endLocations[1][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[2][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[3][0] = screenWidth / 2 - dis1 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[4][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[4][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[5][0] = screenWidth / 2 + dis1 - buttonWidth / 2;
endLocations[5][1] = screenHeight / 2 - buttonWidth / 2;
endLocations[6][0] = screenWidth / 2 - dis1 / 2 - buttonWidth / 2;
endLocations[6][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[7][0] = screenWidth / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[7][1] = screenHeight / 2 + dis1 / 2 - buttonWidth / 2;
endLocations[8][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[8][1] = screenHeight / 2 + dis1 - buttonWidth / 2;
}
else if (placeType.equals(PlaceType.HAM_1_1)) {
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonHeight / 2;
}
else if (placeType.equals(PlaceType.HAM_2_1)) {
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonHeight;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 + buttonHeight;
}
else if (placeType.equals(PlaceType.HAM_3_1)) {
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonHeight * 7 / 4;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonHeight / 2;
endLocations[2][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + buttonHeight * 3 / 4;
}
else if (placeType.equals(PlaceType.HAM_4_1)) {
endLocations[0][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[0][1] = screenHeight / 2 - buttonHeight * 23 / 10;
endLocations[1][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[1][1] = screenHeight / 2 - buttonHeight * 11/ 10;
endLocations[2][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[2][1] = screenHeight / 2 + buttonHeight / 10;
endLocations[3][0] = screenWidth / 2 - buttonWidth / 2;
endLocations[3][1] = screenHeight / 2 + buttonHeight * 13 / 10;
} else if (PlaceType.SHARE_3_1.v <= placeType.v && placeType.v <= PlaceType.SHARE_9_2.v) {
// the end locations of share is the same as the circle's
endLocations = EndLocationsFactory.getEndLocations(
PlaceType.valueOf(placeType.v -
(PlaceType.SHARE_3_1.v - PlaceType.CIRCLE_3_1.v)),
screenWidth, screenHeight, buttonWidth, buttonWidth);
}
return endLocations;
}
}
| |
/*
* Orika - simpler, better and faster Java bean mapping
*
* Copyright (C) 2011-2013 Orika authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ma.glasnost.orika.impl.generator;
import ma.glasnost.orika.MapEntry;
import ma.glasnost.orika.impl.util.StringUtil;
import ma.glasnost.orika.metadata.FieldMap;
import ma.glasnost.orika.metadata.Property;
import ma.glasnost.orika.metadata.Type;
import ma.glasnost.orika.metadata.TypeFactory;
import java.util.*;
public class Node {
public final Node parent;
private final boolean isSource;
public final Property property;
public final FieldMap value;
public final NodeList children;
public MultiOccurrenceVariableRef multiOccurrenceVar;
public MultiOccurrenceVariableRef newDestination;
public VariableRef elementRef;
public VariableRef nullCheckFlag;
public VariableRef shouldAddToCollectorFlag;
public boolean addedToCollector;
private Node(Property property, FieldMap fieldMap, Node parent, NodeList nodes, boolean isSource, int uniqueIndex) {
this.isSource = isSource;
String name = (isSource ? "source" : "destination");
String propertySuffix = StringUtil.capitalize(property.getName());
this.value = fieldMap;
this.parent = parent;
this.property = property;
if (property.isMultiOccurrence()) {
/*
* Use a List for storing elements intended for an Array; this allows flexibility in case we
* can't (or it's too difficult to) determine the total size up front.
*
* Also, use a List (of Map.Entry) for elements intended for a Map; since we need to add the
* Entry as soon as it's created (while it has null key and value), we can't put() it into
* it's destination map until the other properties have been given values.
*/
Type<?> elementType = buildElementType(property, isSource);
Type<?> destinationType = buildDestinationType(property, elementType);
this.newDestination = new MultiOccurrenceVariableRef(destinationType, "new_" + name + propertySuffix + uniqueIndex);
String multiOccurrenceName;
if (parent != null) {
multiOccurrenceName = name(parent.elementRef.name(),name + propertySuffix);
} else {
multiOccurrenceName = name;
}
this.multiOccurrenceVar = new MultiOccurrenceVariableRef(property, multiOccurrenceName);
this.elementRef = new VariableRef(elementType, property.getName() + "_" + name + uniqueIndex + "Element");
if (elementType != null && elementType.isPrimitive()) {
this.nullCheckFlag = new VariableRef(TypeFactory.valueOf(Boolean.TYPE), property.getName() + "_" + name+ uniqueIndex + "ElementIsNull");
}
this.shouldAddToCollectorFlag = new VariableRef(TypeFactory.valueOf(Boolean.TYPE),
property.getName() + "_" + name+ uniqueIndex + "ElementShouldBeAddedToCollector");
}
if (nodes != null) {
nodes.add(this);
this.children = new NodeList(nodes);
} else if (parent != null) {
parent.children.add(this);
this.children = new NodeList(parent.children);
} else {
this.children = new NodeList();
}
}
private Type<?> primitiveSafeListType(Type<?> type) {
if (type.isPrimitive()) {
return type.getWrapperType();
} else {
return type;
}
}
private Node(Property property, Node parent, boolean isSource, int uniqueIndex) {
this(property, null, parent, null, isSource, uniqueIndex);
}
private Node(Property property, FieldMap fieldMap, Node parent, boolean isSource, int uniqueIndex) {
this(property, fieldMap, parent, null, isSource, uniqueIndex);
}
private Node(Property property, FieldMap fieldMap, NodeList nodes, boolean isSource, int uniqueIndex) {
this(property, fieldMap, null, nodes, isSource, uniqueIndex);
}
private String name(String value1, String defaultValue) {
if (value1 != null && !"".equals(value1)) {
return value1;
} else {
return defaultValue;
}
}
/**
* @return true if this Node has no children
*/
public boolean isLeaf() {
return children.isEmpty();
}
/**
* @return true if this Node has no parent
*/
public boolean isRoot() {
return parent == null;
}
public FieldMap getMap() {
TreeMap<Integer, FieldMap> nodes = new TreeMap<Integer, FieldMap>();
for (Node child: children) {
if (child.value != null) {
int depth = 0;
FieldMap value = child.value;
Property prop = isSource ? value.getSource() : value.getDestination();
while (prop.getContainer() != null) {
++depth;
prop = prop.getContainer();
}
if (!nodes.containsKey(depth)) {
nodes.put(depth, value);
}
}
}
if (!nodes.isEmpty()) {
return nodes.get(nodes.firstKey());
} else {
return null;
}
}
public String toString() {
return toString("");
}
private String toString(String indent) {
StringBuilder out = new StringBuilder();
out.append(indent + this.property.toString());
if (!this.children.isEmpty()) {
out.append(" {");
for (Node child: children) {
out.append("\n" + child.toString(" " + indent));
}
out.append("\n" + indent + "}");
}
return out.toString();
}
public static Node findFieldMap(final FieldMap map, final NodeList nodes, boolean useSource) {
LinkedList<Property> path = new LinkedList<Property>();
Property container = useSource ? map.getSource() : map.getDestination();
while (container.getContainer() != null) {
path.addFirst(container.getContainer());
container = container.getContainer();
}
Node currentNode = null;
NodeList children = nodes;
for (Property pathElement : path) {
currentNode = null;
for (Node node : children) {
if (node.property.equals(pathElement)) {
currentNode = node;
children = currentNode.children;
break;
}
}
if (currentNode == null) {
return null;
}
}
for (Node node: children) {
if (map.equals(node.value)) {
return node;
}
}
return null;
}
private Type<?> buildElementType(Property property, boolean isSource) {
Type<?> elementType = null;
if (property.isMap()) {
if (isSource) {
@SuppressWarnings("unchecked")
Type<?> entryType = MapEntry.entryType((Type<Map<Object,Object>>) property.getType());
elementType = entryType;
} else {
@SuppressWarnings("unchecked")
Type<?> entryType = MapEntry.concreteEntryType((Type<Map<Object,Object>>) property.getType());
elementType = entryType;
}
} else if (property.isCollection()) {
elementType = property.getElementType();
} else if (property.isArray()) {
elementType = property.getType().getComponentType();
}
return elementType;
}
private Type<?> buildDestinationType(Property property, Type<?> elementType) {
if (property.getType().isArray()) {
return TypeFactory.valueOf(ArrayList.class, primitiveSafeListType(property.getType().getComponentType()));
} else if (property.getType().isMap()) {
return TypeFactory.valueOf(ArrayList.class, elementType);
} else {
return property.getType();
}
}
public static class NodeList extends ArrayList<Node> {
private static final long serialVersionUID = 1L;
private int totalNodes = 0;
private final NodeList parent;
public NodeList() {
this.parent = null;
}
private NodeList(NodeList parent) {
this.parent = parent;
}
public Node addFieldMap(final FieldMap map, boolean useSource) {
LinkedList<Property> path = new LinkedList<Property>();
Property root = useSource ? map.getSource() : map.getDestination();
Property container = root;
while (container.getContainer() != null) {
path.addFirst(container.getContainer());
container = container.getContainer();
}
/*
* Attempt to locate the path within the tree of nodes
* under which this fieldMap should be placed
*/
Node currentNode = null;
Node parentNode = null;
NodeList children = this;
for(int p = 0, len=path.size(); p < len; ++p) {
Property pathElement = path.get(p);
for (Node node: children) {
if (node.property.equals(pathElement)) {
currentNode = node;
children = currentNode.children;
break;
}
}
if (currentNode == null) {
currentNode = new Node(pathElement, parentNode, useSource, totalNodes);
if (parentNode == null) {
children.add(currentNode);
}
parentNode = currentNode;
for (p+=1; p < len; ++p) {
currentNode = new Node(path.get(p), parentNode, useSource, totalNodes);
parentNode = currentNode;
}
} else {
parentNode = currentNode;
currentNode = null;
}
}
/*
* Finally add a node for the fieldMap at the end
*/
if (parentNode == null) {
root = innermostElement(root);
currentNode = new Node(root, map, this, useSource, totalNodes);
} else {
root = innermostElement(root);
currentNode = new Node(root, map, parentNode, useSource, totalNodes);
}
return currentNode;
}
private Property innermostElement(final Property p) {
Property result = p;
while (result.getElement() != null) {
result = result.getElement();
}
return result;
}
public String toString() {
StringBuilder out = new StringBuilder();
out.append("{");
if (!isEmpty()) {
for (Node node: this) {
out.append("\n" + node.toString(" "));
}
out.append("\n}");
} else {
out.append("}");
}
return out.toString();
}
private void incrementTotalNodes() {
if (parent != null) {
parent.incrementTotalNodes();
}
++totalNodes;
}
public boolean add(Node node) {
incrementTotalNodes();
return super.add(node);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.ObjectMapper;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder> {
public static final String NAME = "nested";
/**
* The default value for ignore_unmapped.
*/
public static final boolean DEFAULT_IGNORE_UNMAPPED = false;
private static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
private static final ParseField PATH_FIELD = new ParseField("path");
private static final ParseField QUERY_FIELD = new ParseField("query");
private static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
private final String path;
private final ScoreMode scoreMode;
private final QueryBuilder query;
private InnerHitBuilder innerHitBuilder;
private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
public NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode) {
this(path, query, scoreMode, null);
}
private NestedQueryBuilder(String path, QueryBuilder query, ScoreMode scoreMode, InnerHitBuilder innerHitBuilder) {
this.path = requireValue(path, "[" + NAME + "] requires 'path' field");
this.query = requireValue(query, "[" + NAME + "] requires 'query' field");
this.scoreMode = requireValue(scoreMode, "[" + NAME + "] requires 'score_mode' field");
this.innerHitBuilder = innerHitBuilder;
}
/**
* Read from a stream.
*/
public NestedQueryBuilder(StreamInput in) throws IOException {
super(in);
path = in.readString();
scoreMode = ScoreMode.values()[in.readVInt()];
query = in.readNamedWriteable(QueryBuilder.class);
innerHitBuilder = in.readOptionalWriteable(InnerHitBuilder::new);
ignoreUnmapped = in.readBoolean();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(path);
out.writeVInt(scoreMode.ordinal());
out.writeNamedWriteable(query);
out.writeOptionalWriteable(innerHitBuilder);
out.writeBoolean(ignoreUnmapped);
}
/**
* Returns the nested query to execute.
*/
public QueryBuilder query() {
return query;
}
/**
* Returns inner hit definition in the scope of this query and reusing the defined type and query.
*/
public InnerHitBuilder innerHit() {
return innerHitBuilder;
}
public NestedQueryBuilder innerHit(InnerHitBuilder innerHit) {
this.innerHitBuilder = new InnerHitBuilder(innerHit, path, query);
return this;
}
/**
* Returns how the scores from the matching child documents are mapped into the nested parent document.
*/
public ScoreMode scoreMode() {
return scoreMode;
}
/**
* Sets whether the query builder should ignore unmapped paths (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the path is unmapped.
*/
public NestedQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) {
this.ignoreUnmapped = ignoreUnmapped;
return this;
}
/**
* Gets whether the query builder will ignore unmapped fields (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the path is unmapped.
*/
public boolean ignoreUnmapped() {
return ignoreUnmapped;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(QUERY_FIELD.getPreferredName());
query.toXContent(builder, params);
builder.field(PATH_FIELD.getPreferredName(), path);
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
if (scoreMode != null) {
builder.field(SCORE_MODE_FIELD.getPreferredName(), HasChildQueryBuilder.scoreModeAsString(scoreMode));
}
printBoostAndQueryName(builder);
if (innerHitBuilder != null) {
builder.field(INNER_HITS_FIELD.getPreferredName(), innerHitBuilder, params);
}
builder.endObject();
}
public static NestedQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
ScoreMode scoreMode = ScoreMode.Avg;
String queryName = null;
QueryBuilder query = null;
String path = null;
String currentFieldName = null;
InnerHitBuilder innerHitBuilder = null;
boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHitBuilder = InnerHitBuilder.fromXContent(parseContext);
} else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, PATH_FIELD)) {
path = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
scoreMode = HasChildQueryBuilder.parseScoreMode(parser.text());
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
}
}
}
NestedQueryBuilder queryBuilder = new NestedQueryBuilder(path, query, scoreMode)
.ignoreUnmapped(ignoreUnmapped)
.queryName(queryName)
.boost(boost);
if (innerHitBuilder != null) {
queryBuilder.innerHit(innerHitBuilder);
}
return queryBuilder;
}
@Override
public final String getWriteableName() {
return NAME;
}
@Override
protected boolean doEquals(NestedQueryBuilder that) {
return Objects.equals(query, that.query)
&& Objects.equals(path, that.path)
&& Objects.equals(scoreMode, that.scoreMode)
&& Objects.equals(innerHitBuilder, that.innerHitBuilder)
&& Objects.equals(ignoreUnmapped, that.ignoreUnmapped);
}
@Override
protected int doHashCode() {
return Objects.hash(query, path, scoreMode, innerHitBuilder, ignoreUnmapped);
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
ObjectMapper nestedObjectMapper = context.getObjectMapper(path);
if (nestedObjectMapper == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new IllegalStateException("[" + NAME + "] failed to find nested object under path [" + path + "]");
}
}
if (!nestedObjectMapper.nested().isNested()) {
throw new IllegalStateException("[" + NAME + "] nested object under path [" + path + "] is not of nested type");
}
final BitSetProducer parentFilter;
final Query childFilter;
final Query innerQuery;
ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter());
}
childFilter = nestedObjectMapper.nestedTypeFilter();
try {
context.nestedScope().nextLevel(nestedObjectMapper);
innerQuery = this.query.toQuery(context);
} finally {
context.nestedScope().previousLevel();
}
return new ToParentBlockJoinQuery(Queries.filtered(innerQuery, childFilter), parentFilter, scoreMode);
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
QueryBuilder rewrittenQuery = query.rewrite(queryRewriteContext);
if (rewrittenQuery != query) {
InnerHitBuilder rewrittenInnerHit = InnerHitBuilder.rewrite(innerHitBuilder, rewrittenQuery);
return new NestedQueryBuilder(path, rewrittenQuery, scoreMode, rewrittenInnerHit);
}
return this;
}
@Override
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
if (innerHitBuilder != null) {
innerHitBuilder.inlineInnerHits(innerHits);
}
}
}
| |
package water.util;
import water.api.RequestBuilders;
/**
* Helper class to plot simple 2D scatter plots.
* Input: x and y are two equal-sized float arrays with X and Y coordinates to be plotted.
*/
public class D3Plot {
private float[] x;
private float[] y;
private String xaxislabel = "x axis";
private String yaxislabel = "y axis";
private String title = "Missing Title";
private boolean ordinal_interpolation = false;
private boolean hide_toggle = true;
// default values are usually fine - might want to add a formatting method later
private String link = "Toggle view of plot";
private int width = 1000;
private int height = 400;
private int padding = 40;
private int font_size = 11;
public D3Plot(float[] x, float[] y, String xaxislabel, String yaxislabel, String title, boolean ordinal_interpolation, boolean hide_toggle) {
this.yaxislabel = yaxislabel;
this.xaxislabel = xaxislabel;
this.title = title;
this.x = x;
this.y = y;
this.link = "Toggle view of plot of " + title;
assert(x.length == y.length);
this.ordinal_interpolation = ordinal_interpolation;
this.hide_toggle = hide_toggle;
}
public D3Plot(float[] x, float[] y, String xaxislabel, String yaxislabel, String title) {
this.yaxislabel = yaxislabel;
this.xaxislabel = xaxislabel;
this.title = title;
this.x = x;
this.y = y;
this.link = "Toggle view of plot of " + title;
assert(x.length == y.length);
}
// populate the StringBuilder object with the Javascript code to display a 2D plot in a HTML page
public void generate(StringBuilder sb) {
final String plot = title.replaceAll(" ", "");
sb.append("<script type=\"text/javascript\" src='/h2o/js/d3.v3.min.js'></script>");
sb.append("<div>");
sb.append("<script>\n");
sb.append("$(document).on(\"click\", \"#pl" + plot + "\", function() { $(\"#plot" + plot + "\").toggleClass(\"hide\");});\n");
sb.append("</script>\n");
if (hide_toggle) {
sb.append("<button class = 'btn btn-inverse btn-mini' id = \"pl" + plot +"\">" + link + "</button>\n");
sb.append("<div class=\"hide\" id=\"" + "plot" + plot + "\">");
} else {
sb.append("<div id=\"" + "plot" + plot + "\">");
}
sb.append("<style type=\"text/css\">");
sb.append(".axis path," +
".axis line {\n" +
"fill: none;\n" +
"stroke: black;\n" +
"shape-rendering: crispEdges;\n" +
"}\n" +
".axis text {\n" +
"font-family: sans-serif;\n" +
"font-size: " + font_size + "px;\n" +
"}\n");
sb.append("</style>");
sb.append("<div id=\"" + "plot" + plot + "\" style=\"display:inline;\">");
sb.append("<script type=\"text/javascript\">");
sb.append("//Width and height\n");
sb.append("var w = " + width + ";\n"+
"var h = " + height + ";\n"+
"var padding = " + padding + ";\n"
);
sb.append("var dataset = [");
for(int c = 0; c < x.length; c++) {
if (c == 0) {
sb.append("["+String.valueOf(x[c])+",").append(RequestBuilders.ElementBuilder.format(y[c])).append("]");
}
sb.append(", ["+String.valueOf(x[c])+",").append(RequestBuilders.ElementBuilder.format(y[c])).append("]");
}
sb.append("];");
sb.append(
"//Create scale functions\n"+
"var xScale = d3.scale.linear()\n"+
".domain([0, d3.max(dataset, function(d) { return d[0]; })])\n"+
".range([padding, w - padding * 2]);\n"+
"var yScale = d3.scale.linear()"+
".domain([0, d3.max(dataset, function(d) { return d[1]; })])\n"+
".range([h - padding, padding]);\n"+
"var rScale = d3.scale.linear()"+
".domain([0, d3.max(dataset, function(d) { return d[1]; })])\n"+
".range([2, 5]);\n"+
"var lineFunction = d3.svg.line().interpolate(\"ordinal\")\n"+
".x(function(d) {return xScale(d[0]); })\n"+
".y(function(d) { return yScale(d[1]); });\n"+
"//Define X axis\n"+
"var xAxis = d3.svg.axis()\n"+
".scale(xScale)\n"+
".orient(\"bottom\")\n"+
".ticks(5);\n"+
"//Define Y axis\n"+
"var yAxis = d3.svg.axis()\n"+
".scale(yScale)\n"+
".orient(\"left\")\n"+
".ticks(5);\n"+
"//Create SVG element\n"+
"var svg = d3.select(\"#" + "plot" + plot + "\")\n"+
".append(\"svg\")\n"+
".attr(\"width\", w)\n"+
".attr(\"height\", h);\n"+
"//Create circles\n"+
"svg.selectAll(\"circle\")\n"+
".data(dataset)\n"+
".enter()\n"+
".append(\"circle\")\n"+
".attr(\"cx\", function(d) {\n"+
"return xScale(d[0]);\n"+
"})\n"+
".attr(\"cy\", function(d) {\n"+
"return yScale(d[1]);\n"+
"})\n"+
".attr(\"r\", function(d) {\n"+
"return 2;\n"+//rScale(d[1]);\n"+
"});\n"+
"/*"+
"//Create labels\n"+
"svg.selectAll(\"text\")"+
".data(dataset)"+
".enter()"+
".append(\"text\")"+
".text(function(d) {"+
"return d[0] + \",\" + d[1];"+
"})"+
".attr(\"x\", function(d) {"+
"return xScale(d[0]);"+
"})"+
".attr(\"y\", function(d) {"+
"return yScale(d[1]);"+
"})"+
".attr(\"font-family\", \"sans-serif\")"+
".attr(\"font-size\", \"11px\")"+
".attr(\"fill\", \"red\");"+
"*/\n"+
"//Create X axis\n"+
"svg.append(\"g\")"+
".attr(\"class\", \"axis\")"+
".attr(\"transform\", \"translate(0,\" + (h - padding) + \")\")"+
".call(xAxis);\n"+
"//X axis label\n"+
"d3.select('#" + "plot" + plot + " svg')"+
".append(\"text\")"+
".attr(\"x\",w/2)"+
".attr(\"y\",h - 5)"+
".attr(\"text-anchor\", \"middle\")"+
".text(\"" + xaxislabel + "\");\n"+
"//Create Y axis\n"+
"svg.append(\"g\")"+
".attr(\"class\", \"axis\")"+
".attr(\"transform\", \"translate(\" + padding + \",0)\")"+
".call(yAxis);\n"+
"//Y axis label\n"+
"d3.select('#" + "plot" + plot + " svg')"+
".append(\"text\")"+
".attr(\"x\",150)"+
".attr(\"y\",-2)"+
".attr(\"transform\", \"rotate(90)\")"+
//".attr(\"transform\", \"translate(0,\" + (h - padding) + \")\")"+
".attr(\"text-anchor\", \"middle\")"+
".text(\"" + yaxislabel + "\");\n"+
"//Title\n"+
"d3.select('#" + "plot" + plot + " svg')"+
".append(\"text\")"+
".attr(\"x\",w/2)"+
".attr(\"y\",padding - 20)"+
".attr(\"text-anchor\", \"middle\")"+
".text(\"" + title + "\");\n");
if (ordinal_interpolation) {
sb.append("var linesGroup = svg.append(\"g\").attr(\"class\", \"line\");\n"+
"linesGroup.append(\"path\")\n"+
".attr(\"d\", lineFunction(dataset))\n"+
".attr(\"class\", \"lines\")\n"+
".attr(\"fill\", \"none\")\n"+
".attr(\"stroke\", function(d, i) {\n"+
"return linedata.color;\n"+
"});\n");
}
sb.append("</script>");
sb.append("</div>");
sb.append("</script>");
sb.append("</div>");
sb.append("<style>");
sb.append(".line {\n" +
" fill: none;\n" +
" stroke: steelblue;\n" +
" stroke-width: 1.5px;\n" +
" }");
sb.append("</style>");
sb.append("</div>");
}
}
| |
/*
* Copyright (C) 2008 The University of Chicago
*
*
*
****************************************************************
* Source code information
* -----------------------
* Filename $RCSfile: CaGridServiceQueryUtility.java,v $
* Revision $Revision: 1.2 $
* Release status $State: Exp $
* Last modified on $Date: 2008-08-04 18:36:10 $
* by $Author: tanw $
* Created on 01-Dec-2007
*****************************************************************/
package net.sf.taverna.t2.activities.cagrid.query;
//Comments: do not use CaGrid client API, use pure WS client instead
import gov.nih.nci.cagrid.discovery.client.DiscoveryClient;
import gov.nih.nci.cagrid.metadata.MetadataUtils;
import gov.nih.nci.cagrid.metadata.ServiceMetadata;
import gov.nih.nci.cagrid.metadata.ServiceMetadataServiceDescription;
import gov.nih.nci.cagrid.metadata.common.ResearchCenterPointOfContactCollection;
import gov.nih.nci.cagrid.metadata.common.PointOfContact;
import gov.nih.nci.cagrid.metadata.common.UMLClass;
import gov.nih.nci.cagrid.metadata.exceptions.QueryInvalidException;
import gov.nih.nci.cagrid.metadata.exceptions.RemoteResourcePropertyRetrievalException;
import gov.nih.nci.cagrid.metadata.exceptions.ResourcePropertyRetrievalException;
import gov.nih.nci.cagrid.metadata.service.Operation;
//import gov.nih.nci.cagrid.metadata.service.OperationInputParameterCollection;
import gov.nih.nci.cagrid.metadata.service.ServiceContext;
import gov.nih.nci.cagrid.metadata.service.ServicePointOfContactCollection;
import gov.nih.nci.cagrid.metadata.service.ServiceServiceContextCollection;
import java.util.ArrayList;
import java.util.List;
import java.util.Arrays;
import org.apache.axis.message.addressing.EndpointReferenceType;
import org.apache.axis.types.URI.MalformedURIException;
import org.apache.log4j.Logger;
/**
* An agent to query gt4 server to determine the available categories and services.
* @author sowen
*
*/
public class CaGridServiceQueryUtility {
private static Logger logger = Logger.getLogger(CaGridServiceQueryUtility.class);
/**
* Returns a list of GT4 services, containing a list of their operations.
* Throws Exception if a service cannot be found.
*/
public static List<CaGridService> load(String indexURL, ServiceQuery[] sq) throws Exception{
List<CaGridService> services=new ArrayList<CaGridService>();
// Get the categories for this installation
boolean findServices = loadServices(indexURL,sq,services);
if (!findServices) {
throw new Exception("Unable to locate a GT4 index at \n" + indexURL);
}
return services;
}
//load services & operations by caGrid discovery service API
private static boolean loadServices(String indexURL, ServiceQuery[] sq, List<CaGridService> services) throws Exception{
boolean foundSome = false;
System.out.println("==================================================");
System.out.println("Start to generate Scavenger");
EndpointReferenceType[] servicesList = null;
servicesList = getEPRListByServiceQueryArray(indexURL,sq);
System.out.println("DiscoveryClient loaded and EPR to services returned.");
for (EndpointReferenceType epr : servicesList) {
if(epr!=null){
foundSome = true;
//add a service node
String serviceAddress = epr.getAddress().toString();
//TODO add more metadata to s -- like research institute, operation class?
CaGridService s = new CaGridService(serviceAddress+"?wsdl",serviceAddress);
services.add(s);
System.out.println(serviceAddress+"?wsdl");
try{
ServiceMetadata serviceMetadata = MetadataUtils.getServiceMetadata(epr);
ServiceMetadataServiceDescription serviceDes = serviceMetadata.getServiceDescription();
//ServiceContextOperationCollection s =
//serviceDes.getService().getServiceContextCollection().getServiceContext(0).getOperationCollection();
ServiceServiceContextCollection srvContxCol = serviceDes.getService().getServiceContextCollection();
ServiceContext [] srvContxs =srvContxCol.getServiceContext();
s.setResearchCenterName(serviceMetadata.getHostingResearchCenter().getResearchCenter().getDisplayName());
for (ServiceContext srvcontx:srvContxs)
{
Operation [] ops = srvcontx.getOperationCollection().getOperation();
//TODO: portType is no longer needed??
for (Operation op :ops){
//add an operation node
//print out the name of an operation
String operationName = op.getName();
//OperationInputParameterCollection opp = op.getInputParameterCollection();
s.addOperation(operationName);
System.out.println(operationName);
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
return foundSome;
}
public static EndpointReferenceType[] getEPRListByServiceQuery(String indexURL, ServiceQuery sq){
EndpointReferenceType[] servicesList = null;
DiscoveryClient client = null;
try {
client = new DiscoveryClient(indexURL);
} catch (MalformedURIException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if(sq==null){
System.out.println("Retrieving all services from the index: "+ indexURL);
System.out.println("==================================================");
try {
servicesList = client.getAllServices(true);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else {
System.out.println("Service Index URL: "+indexURL);
//semanticQueryingClause = indexURL.substring(n1+2);
System.out.println("Service Query: " + sq.queryCriteria + " == "+ sq.queryValue);
System.out.println("==================================================");
//TODO: semantic based service searching
//query by Search String
if(sq.queryCriteria.equals("Search String")){
try {
servicesList = client.discoverServicesBySearchString(sq.queryCriteria);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Research Center Name
else if(sq.queryCriteria.equals("Research Center")){
try {
servicesList = client.discoverServicesByResearchCenter(sq.queryValue);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Point of Contact
else if(sq.queryCriteria.equals("Point Of Contact")){
PointOfContact poc = new PointOfContact();
int n3 = sq.queryValue.indexOf(" ");
String firstName = sq.queryValue.substring(0,n3);
String lastName = sq.queryValue.substring(n3+1);
poc.setFirstName(firstName);
poc.setLastName(lastName);
try {
servicesList = client.discoverServicesByPointOfContact(poc);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Service Name
else if(sq.queryCriteria.equals("Service Name")){
try {
servicesList = client.discoverServicesByName(sq.queryValue);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Operation Name
else if(sq.queryCriteria.equals("Operation Name")){
try {
servicesList = client.discoverServicesByOperationName(sq.queryValue);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Operation Input
else if(sq.queryCriteria.equals("Operation Input")){
UMLClass umlClass = new UMLClass();
umlClass.setClassName(sq.queryValue);
try {
servicesList = client.discoverServicesByOperationInput(umlClass);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Operation Output
else if(sq.queryCriteria.equals("Operation Output")){
UMLClass umlClass = new UMLClass();
umlClass.setClassName(sq.queryValue);
try {
servicesList = client.discoverServicesByOperationOutput(umlClass);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//query by Operation Class
else if(sq.queryCriteria.equals("Operation Class")){
UMLClass umlClass = new UMLClass();
umlClass.setClassName(sq.queryValue);
try {
servicesList = client.discoverServicesByOperationClass(umlClass);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//discoverServicesByConceptCode("C43418")
else if(sq.queryCriteria.equals("Concept Code")){
try {
servicesList = client.discoverServicesByConceptCode(sq.queryValue);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//discoverServicesByOperationConceptCode
//discoverServicesByDataConceptCode
//discoverServicesByPermissibleValue
//getAllDataServices
//discoverDataServicesByDomainModel("caCore")
else if(sq.queryCriteria.equals("Domain Model for Data Services")){
try {
servicesList = client.discoverDataServicesByDomainModel(sq.queryValue);
} catch (RemoteResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ResourcePropertyRetrievalException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//discoverDataServicesByModelConceptCode
//discoverDataServicesByExposedClass
//discoverDataServicesByPermissibleValue
//discoverDataServicesByAssociationsWithClass
//discoverByFilter
}
return servicesList;
}
public static EndpointReferenceType[] getEPRListByServiceQueryArray(String indexURL, ServiceQuery sq[]){
EndpointReferenceType[] servicesList = null;
if(sq==null){
return getEPRListByServiceQuery(indexURL, null);
}
else if(sq.length==1){
return getEPRListByServiceQuery(indexURL, sq[0]);
}
//sq holds more than 1 queries
else if(sq.length>1){
EndpointReferenceType[][] tempEPRList = new EndpointReferenceType[sq.length][];
for(int i=0;i<sq.length;i++){
tempEPRList[i] = getEPRListByServiceQuery(indexURL,sq[i]);
}
return CombineEPRList(tempEPRList);
}
return servicesList;
}
public static EndpointReferenceType[] CombineEPRList(EndpointReferenceType[][] tempEPRList){
EndpointReferenceType[] servicesList = null;
String [][] addressList = new String [tempEPRList.length][];
for (int i=0;i<tempEPRList.length;i++){
addressList[i] = new String [tempEPRList[i].length];
for (int j=0;j<tempEPRList[i].length;j++){
addressList[i][j] = tempEPRList[i][j].getAddress().toString();
}
}
List alist = new ArrayList(Arrays.asList(addressList[0]));
for (int i=1;i<tempEPRList.length;i++){
alist.retainAll(Arrays.asList(addressList[i]));
}
int count = 0;
int [] flag = new int [tempEPRList[0].length];
for (int i=0;i<tempEPRList[0].length;i++){
if(alist.contains(tempEPRList[0][i].getAddress().toString())){
count ++ ;
flag[i]=1;
}
}
servicesList = new EndpointReferenceType[count];
int j=0;
for (int i=0;i<tempEPRList[0].length;i++){
if(flag[i]==1){
servicesList [j++] = tempEPRList[0][i];
}
}
return servicesList;
}
}
class ServiceMetaData{
String [] serviceAddress;
String [][]operationName;
ServiceMetaData(){
String [] serviceAddress = null;
String [][]operationName = null;
}
}
| |
package net.londatiga.android;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.widget.PopupWindow;
import com.limemobile.app.blog.R;
/**
* Custom popup window.
*
* @author Lorensius W. L. T <lorenz@londatiga.net>
*/
public class PopupWindows {
protected Context mContext;
protected PopupWindow mWindow;
protected View mRootView;
protected Drawable mBackground = null;
protected WindowManager mWindowManager;
protected boolean mDidAction;
protected int mAnimStyle;
protected int rootWidth = 0;
public static final int ANIM_GROW_FROM_LEFT = 1;
public static final int ANIM_GROW_FROM_RIGHT = 2;
public static final int ANIM_GROW_FROM_CENTER = 3;
public static final int ANIM_REFLECT = 4;
public static final int ANIM_AUTO = 5;
/**
* Constructor.
*
* @param context Context
*/
public PopupWindows(Context context) {
mContext = context;
mWindow = new PopupWindow(context);
mWindow.setTouchInterceptor(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_OUTSIDE) {
mWindow.dismiss();
return true;
}
return false;
}
});
mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
mAnimStyle = ANIM_AUTO;
}
/**
* Set animation style
*
* @param mAnimStyle animation style, default is set to ANIM_AUTO
*/
public void setAnimStyle(int mAnimStyle) {
this.mAnimStyle = mAnimStyle;
}
/**
* On dismiss
*/
protected void onDismiss() {
}
/**
* On show
*/
protected void onShow() {
}
/**
* On pre show
*/
protected void preShow() {
if (mRootView == null)
throw new IllegalStateException("setContentView was not called with a view to display.");
onShow();
if (mBackground == null)
mWindow.setBackgroundDrawable(new BitmapDrawable());
else
mWindow.setBackgroundDrawable(mBackground);
mWindow.setWidth(WindowManager.LayoutParams.WRAP_CONTENT);
mWindow.setHeight(WindowManager.LayoutParams.WRAP_CONTENT);
mWindow.setTouchable(true);
mWindow.setFocusable(true);
mWindow.setOutsideTouchable(true);
mWindow.setContentView(mRootView);
}
/**
* Set background drawable.
*
* @param background Background drawable
*/
public void setBackgroundDrawable(Drawable background) {
mBackground = background;
}
/**
* Set content view.
*
* @param root Root view
*/
public void setContentView(View root) {
mRootView = root;
mWindow.setContentView(root);
}
/**
* Set content view.
*
* @param layoutResID Resource id
*/
public void setContentView(int layoutResID) {
LayoutInflater inflator = (LayoutInflater) mContext
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
setContentView(inflator.inflate(layoutResID, null));
}
/**
* Set listener on window dismissed.
*
* @param listener
*/
public void setOnDismissListener(PopupWindow.OnDismissListener listener) {
mWindow.setOnDismissListener(listener);
}
/**
* Dismiss the popup window.
*/
public void dismiss() {
mWindow.dismiss();
}
/**
* Shows the quick action menu using the given Rect as the anchor.
* @param parent
* @param rect
*/
public void show(View parent, Rect rect){
preShow();
int xPos, yPos, arrowPos;
mDidAction = false;
int[] location = new int[2];
parent.getLocationOnScreen(location);
int parentXPos = location[0];
int parentYPos = location[1];
Rect anchorRect = new Rect(parentXPos + rect.left, parentYPos + rect.top, parentXPos + rect.left + rect.width(), parentYPos + rect.top
+ rect.height());
int width = anchorRect.width();
int height = anchorRect.height();
//mRootView.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
mRootView.measure(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
int rootHeight = mRootView.getMeasuredHeight();
if (rootWidth == 0) {
rootWidth = mRootView.getMeasuredWidth();
}
int screenWidth = mWindowManager.getDefaultDisplay().getWidth();
int screenHeight = mWindowManager.getDefaultDisplay().getHeight();
//automatically get X coord of popup (top left)
if ((anchorRect.left + parentXPos + rootWidth) > screenWidth) {
xPos = anchorRect.left - (rootWidth-width);
xPos = (xPos < 0) ? 0 : xPos;
arrowPos = anchorRect.centerX()-xPos;
} else {
if (width > rootWidth) {
xPos = anchorRect.centerX() - (rootWidth/2);
} else {
xPos = anchorRect.left;
}
arrowPos = anchorRect.centerX()-xPos;
}
int dyTop = anchorRect.top;
int dyBottom = screenHeight - anchorRect.bottom;
boolean onTop = (dyTop > dyBottom) ? true : false;
if (onTop) {
if (rootHeight > dyTop) {
yPos = 15;
} else {
yPos = anchorRect.top - rootHeight;
}
} else {
yPos = anchorRect.bottom;
if (rootHeight > dyBottom) {
}
}
//showArrow(((onTop) ? R.id.arrow_down : R.id.arrow_up), arrowPos);
setAnimationStyle(screenWidth, anchorRect.centerX(), onTop);
mWindow.showAtLocation(parent, Gravity.NO_GRAVITY, xPos, yPos);
}
/**
* Popup is automatically positioned, on top or bottom of anchor view.
*
*/
public void show(View anchor) {
preShow();
int xPos, yPos, arrowPos;
mDidAction = false;
int[] location = new int[2];
anchor.getLocationOnScreen(location);
Rect anchorRect = new Rect(location[0], location[1], location[0] + anchor.getWidth(),
location[1] + anchor.getHeight());
// mRootView.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT,
// LayoutParams.WRAP_CONTENT));
mRootView.measure(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
int rootHeight = mRootView.getMeasuredHeight();
if (rootWidth == 0) {
rootWidth = mRootView.getMeasuredWidth();
}
int screenWidth = mWindowManager.getDefaultDisplay().getWidth();
int screenHeight = mWindowManager.getDefaultDisplay().getHeight();
// automatically get X coord of popup (top left)
if ((anchorRect.left + rootWidth) > screenWidth) {
xPos = anchorRect.left - (rootWidth - anchor.getWidth());
xPos = (xPos < 0) ? 0 : xPos;
arrowPos = anchorRect.centerX() - xPos;
} else {
if (anchor.getWidth() > rootWidth) {
xPos = anchorRect.centerX() - (rootWidth / 2);
} else {
xPos = anchorRect.left;
}
arrowPos = anchorRect.centerX() - xPos;
}
int dyTop = anchorRect.top;
int dyBottom = screenHeight - anchorRect.bottom;
boolean onTop = (dyTop > dyBottom) ? true : false;
if (onTop) {
if (rootHeight > dyTop) {
yPos = 15;
} else {
yPos = anchorRect.top - rootHeight;
}
} else {
yPos = anchorRect.bottom;
if (rootHeight > dyBottom) {
}
}
setAnimationStyle(screenWidth, anchorRect.centerX(), onTop);
mWindow.showAtLocation(anchor, Gravity.NO_GRAVITY, xPos, yPos);
}
/**
* Set animation style
*
* @param screenWidth screen width
* @param requestedX distance from left edge
* @param onTop flag to indicate where the popup should be displayed. Set
* TRUE if displayed on top of anchor view and vice versa
*/
private void setAnimationStyle(int screenWidth, int requestedX, boolean onTop) {
int arrowPos = requestedX;
switch (mAnimStyle) {
case ANIM_GROW_FROM_LEFT:
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Left
: R.style.Animations_PopDownMenu_Left);
break;
case ANIM_GROW_FROM_RIGHT:
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Right
: R.style.Animations_PopDownMenu_Right);
break;
case ANIM_GROW_FROM_CENTER:
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Center
: R.style.Animations_PopDownMenu_Center);
break;
case ANIM_REFLECT:
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Reflect
: R.style.Animations_PopDownMenu_Reflect);
break;
case ANIM_AUTO:
if (arrowPos <= screenWidth / 4) {
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Left
: R.style.Animations_PopDownMenu_Left);
} else if (arrowPos > screenWidth / 4 && arrowPos < 3 * (screenWidth / 4)) {
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Center
: R.style.Animations_PopDownMenu_Center);
} else {
mWindow.setAnimationStyle((onTop) ? R.style.Animations_PopUpMenu_Right
: R.style.Animations_PopDownMenu_Right);
}
break;
}
}
}
| |
package io.cattle.platform.api.resource;
import io.cattle.platform.api.action.ActionHandler;
import io.cattle.platform.api.auth.Policy;
import io.cattle.platform.api.link.LinkHandler;
import io.cattle.platform.api.utils.ApiUtils;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.engine.manager.ProcessNotFoundException;
import io.cattle.platform.engine.process.ExitReason;
import io.cattle.platform.engine.process.ProcessInstanceException;
import io.cattle.platform.engine.process.impl.ProcessCancelException;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.meta.ActionDefinition;
import io.cattle.platform.object.meta.MapRelationship;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.meta.Relationship;
import io.cattle.platform.object.process.ObjectProcessManager;
import io.cattle.platform.object.process.StandardProcess;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.util.type.CollectionUtils;
import io.cattle.platform.util.type.InitializationTask;
import io.cattle.platform.util.type.NamedUtils;
import io.github.ibuildthecloud.gdapi.condition.Condition;
import io.github.ibuildthecloud.gdapi.condition.ConditionType;
import io.github.ibuildthecloud.gdapi.context.ApiContext;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import io.github.ibuildthecloud.gdapi.id.IdFormatter;
import io.github.ibuildthecloud.gdapi.id.IdentityFormatter;
import io.github.ibuildthecloud.gdapi.model.Action;
import io.github.ibuildthecloud.gdapi.model.Field;
import io.github.ibuildthecloud.gdapi.model.Include;
import io.github.ibuildthecloud.gdapi.model.ListOptions;
import io.github.ibuildthecloud.gdapi.model.Resource;
import io.github.ibuildthecloud.gdapi.model.Schema;
import io.github.ibuildthecloud.gdapi.model.Schema.Method;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.request.resource.ResourceManager;
import io.github.ibuildthecloud.gdapi.request.resource.impl.AbstractBaseResourceManager;
import io.github.ibuildthecloud.gdapi.url.UrlBuilder;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.config.DynamicIntProperty;
public abstract class AbstractObjectResourceManager extends AbstractBaseResourceManager implements InitializationTask {
private static final Logger log = LoggerFactory.getLogger(AbstractObjectResourceManager.class);
public static final String SCHEDULE_UPDATE = "scheduleUpdate";
private static final DynamicIntProperty REMOVE_DELAY = ArchaiusUtil.getInt("api.show.removed.for.seconds");
private static final IdFormatter IDENTITY_FORMATTER = new IdentityFormatter();
ObjectManager objectManager;
ObjectProcessManager objectProcessManager;
ObjectMetaDataManager metaDataManager;
Map<String, ActionHandler> actionHandlersMap;
List<ActionHandler> actionHandlers;
Map<String, List<LinkHandler>> linkHandlersMap;
List<LinkHandler> linkHandlers;
@Override
protected Object authorize(Object object) {
return ApiUtils.authorize(object);
}
@Override
protected Object createInternal(String type, ApiRequest request) {
Class<?> clz = getClassForType(request.getSchemaFactory(), type);
if (clz == null) {
return null;
}
return doCreate(type, clz, CollectionUtils.toMap(request.getRequestObject()));
}
protected <T> T doCreate(String type, Class<T> clz, Map<Object, Object> data) {
Map<String, Object> properties = getObjectManager().convertToPropertiesFor(clz, data);
if (!properties.containsKey(ObjectMetaDataManager.KIND_FIELD)) {
properties.put(ObjectMetaDataManager.KIND_FIELD, type);
}
return createAndScheduleObject(clz, properties);
}
@SuppressWarnings("unchecked")
protected <T> T createAndScheduleObject(Class<T> clz, Map<String, Object> properties) {
Object result = objectManager.create(clz, properties);
try {
scheduleProcess(StandardProcess.CREATE, result, properties);
result = objectManager.reload(result);
} catch (ProcessNotFoundException e) {
}
return (T) result;
}
protected Class<?> getClassForType(SchemaFactory schemaFactory, String type) {
Class<?> clz = schemaFactory.getSchemaClass(type);
if (clz == null) {
Schema schema = schemaFactory.getSchema(type);
if (schema != null && schema.getParent() != null) {
return getClassForType(schemaFactory, schema.getParent());
}
}
return clz;
}
@Override
protected Object deleteInternal(String type, String id, Object obj, ApiRequest request) {
try {
scheduleProcess(StandardProcess.REMOVE, obj, null);
return objectManager.reload(obj);
} catch (ProcessCancelException e) {
throw new ClientVisibleException(ResponseCodes.METHOD_NOT_ALLOWED);
} catch (ProcessNotFoundException e) {
return removeFromStore(type, id, obj, request);
}
}
protected abstract Object removeFromStore(String type, String id, Object obj, ApiRequest request);
@Override
protected Object updateInternal(String type, String id, Object obj, ApiRequest request) {
Map<String, Object> updates = CollectionUtils.toMap(request.getRequestObject());
Map<String, Object> existingValues = new HashMap<>();
Map<String, Object> filteredUpdates = new HashMap<>();
Map<String, Object> existing = createResource(obj, IDENTITY_FORMATTER, request).getFields();
Schema schema = request.getSchemaFactory().getSchema(type);
Map<String, Field> fields = schema.getResourceFields();
boolean schedule = false;
for (Map.Entry<String, Object> entry : updates.entrySet()) {
String key = entry.getKey();
Object existingValue = existing.get(key);
if (!Objects.equals(existingValue, entry.getValue())) {
filteredUpdates.put(key, entry.getValue());
existingValues.put(key, existingValue);
Field field = fields.get(key);
if (field != null) {
schedule |= Boolean.TRUE.equals(field.getAttributes().get(SCHEDULE_UPDATE));
}
}
}
Object result = objectManager.setFields(schema, obj, filteredUpdates);
if (schedule) {
filteredUpdates.put("old", existingValues);
objectProcessManager.scheduleStandardProcess(StandardProcess.UPDATE, obj, filteredUpdates);
result = objectManager.reload(result);
}
return result;
}
@Override
protected Object getLinkInternal(String type, String id, String link, ApiRequest request) {
List<LinkHandler> linkHandlers = linkHandlersMap.get(type);
if (linkHandlers != null) {
for (LinkHandler linkHandler : linkHandlers) {
if (linkHandler.handles(type, id, link, request)) {
Object currentObject = getById(type, id, new ListOptions(request));
if (currentObject == null) {
return null;
}
try {
return linkHandler.link(link, currentObject, request);
} catch (IOException e) {
log.error("Failed to process link [{}] for [{}:{}]", link, type, id, e);
return null;
}
}
}
}
Class<?> clz = request.getSchemaFactory().getSchemaClass(type, true);
Relationship relationship;
if (clz != null) {
relationship = getRelationship(clz, link);
} else {
relationship = getRelationship(type, link);
}
if (relationship == null) {
return null;
}
switch (relationship.getRelationshipType()) {
case CHILD:
return getChildLink(type, id, relationship, request);
case REFERENCE:
return getReferenceLink(type, id, relationship, request);
case MAP:
return getMapLink(type, id, (MapRelationship) relationship, request);
}
return null;
}
protected Object getMapLink(String type, String id, MapRelationship relationship, ApiRequest request) {
return null;
}
protected Object getChildLink(String type, String id, Relationship relationship, ApiRequest request) {
Schema otherSchema = request.getSchemaFactory().getSchema(relationship.getObjectType());
if (otherSchema == null) {
return Collections.EMPTY_LIST;
}
Object currentObject = getById(type, id, new ListOptions(request));
if (currentObject == null) {
return null;
}
String otherType = otherSchema.getId();
Field field = otherSchema.getResourceFields().get(relationship.getPropertyName());
if (field == null) {
return Collections.EMPTY_LIST;
}
if (otherSchema.getCollectionMethods().contains(Method.POST.toString())) {
Map<String, Object> createDefaults = new HashMap<>();
IdFormatter idFormatter = ApiContext.getContext().getIdFormatter();
createDefaults.put(relationship.getPropertyName(), idFormatter.formatId(type, id));
request.setCreateDefaults(createDefaults);
}
Map<Object, Object> criteria = getDefaultCriteria(false, true, otherType);
criteria.put(relationship.getPropertyName(), id);
ResourceManager resourceManager = locator.getResourceManagerByType(otherType);
return resourceManager.list(otherType, criteria, null);
}
protected Object getReferenceLink(String type, String id, Relationship relationship, ApiRequest request) {
SchemaFactory schemaFactory = request.getSchemaFactory();
Schema schema = schemaFactory.getSchema(type);
Schema otherSchema = schemaFactory.getSchema(relationship.getObjectType());
Field field = schema.getResourceFields().get(relationship.getPropertyName());
if (field == null || otherSchema == null) {
return null;
}
ListOptions options = new ListOptions(request);
Object currentObject = getById(type, id, options);
if (currentObject == null) {
return null;
}
Object fieldValue = field.getValue(currentObject);
if (fieldValue == null) {
return null;
}
Map<Object, Object> criteria = getDefaultCriteria(false, true, otherSchema.getId());
criteria.put(ObjectMetaDataManager.ID_FIELD, fieldValue);
ResourceManager resourceManager = locator.getResourceManagerByType(otherSchema.getId());
return ApiUtils.getFirstFromList(resourceManager.list(otherSchema.getId(), criteria, options));
}
protected Map<String, Relationship> getLinkRelationships(SchemaFactory schemaFactory, String type, Include include) {
if (include == null)
return Collections.emptyMap();
Map<String, Relationship> result = new HashMap<>();
Map<String, Relationship> links = metaDataManager.getLinkRelationships(schemaFactory, type);
for (String link : include.getLinks()) {
link = link.toLowerCase();
if (links.containsKey(link)) {
result.put(link, links.get(link));
}
}
return result;
}
@Override
protected Map<Object, Object> getDefaultCriteria(boolean byId, boolean byLink, String type) {
Map<Object, Object> criteria = super.getDefaultCriteria(byId, byLink, type);
Policy policy = ApiUtils.getPolicy();
addAccountAuthorization(byId, byLink, type, criteria, policy);
if (!showRemoved() && !byId) {
/* removed is null or removed >= (NOW() - delay) */
Condition or = new Condition(new Condition(ConditionType.NULL), new Condition(ConditionType.GTE, removedTime()));
criteria.put(ObjectMetaDataManager.REMOVED_FIELD, or);
/* remove_time is null or remove_time > NOW() */
or = new Condition(new Condition(ConditionType.NULL), new Condition(ConditionType.GT, new Date()));
criteria.put(ObjectMetaDataManager.REMOVE_TIME_FIELD, or);
}
return criteria;
}
protected boolean showRemoved() {
ApiRequest request = ApiContext.getContext().getApiRequest();
if (request == null) {
return false;
}
return request.getOptions().containsKey("_removed");
}
protected Date removedTime() {
return new Date(System.currentTimeMillis() - REMOVE_DELAY.get() * 1000);
}
protected void addAccountAuthorization(boolean byId, boolean byLink, String type, Map<Object, Object> criteria, Policy policy) {
if (!policy.isOption(Policy.LIST_ALL_ACCOUNTS)) {
if (policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS) && (byId || byLink)) {
return;
}
if ("account".equals(type)) {
criteria.put(ObjectMetaDataManager.ID_FIELD, policy.getAccountId());
} else {
criteria.put(ObjectMetaDataManager.ACCOUNT_FIELD, policy.getAccountId());
}
}
}
@Override
protected String getCollectionType(List<?> list, ApiRequest request) {
String link = request.getLink();
if (link == null) {
return request.getType();
} else {
Relationship relationship = getRelationship(request.getSchemaFactory()
.getSchemaClass(request.getType(), true), link);
return request.getSchemaFactory().getSchemaName(relationship.getObjectType());
}
}
@Override
protected Schema getSchemaForDisplay(SchemaFactory schemaFactory, Object obj) {
String schemaId = ApiUtils.getSchemaIdForDisplay(schemaFactory, obj);
Schema schema = schemaFactory.getSchema(schemaId);
if (schema == null) {
/* Check core schema because the parent might not be authorized */
schemaId = ApiUtils.getSchemaIdForDisplay(getObjectManager().getSchemaFactory(), obj);
/* Still get schema from request's schemaFactory */
schema = schemaFactory.getSchema(schemaId);
}
return schema;
}
@Override
protected Resource constructResource(IdFormatter idFormatter, SchemaFactory schemaFactory, Schema schema, Object obj, ApiRequest apiRequest) {
Map<String, Object> transitioningFields = metaDataManager.getTransitionFields(schema, obj);
return ApiUtils.createResourceWithAttachments(this, apiRequest, idFormatter, schemaFactory, schema, obj, transitioningFields);
}
@Override
protected Object resourceActionInternal(Object obj, ApiRequest request) {
String processName = getProcessName(obj, request);
ActionHandler handler = actionHandlersMap.get(processName);
if (handler != null) {
return handler.perform(processName, obj, request);
}
Map<String, Object> data = CollectionUtils.toMap(request.getRequestObject());
try {
scheduleProcess(getProcessName(obj, request), obj, data);
} catch (ProcessNotFoundException e) {
throw new ClientVisibleException(ResponseCodes.NOT_FOUND);
}
request.setResponseCode(ResponseCodes.ACCEPTED);
return objectManager.reload(obj);
}
protected String getProcessName(Object obj, ApiRequest request) {
String baseType = request.getSchemaFactory().getBaseType(request.getType());
return String.format("%s.%s", baseType == null ? request.getType() : baseType, request.getAction()).toLowerCase();
}
protected void scheduleProcess(final String processName, final Object resource, final Map<String, Object> data) {
scheduleProcess(new Runnable() {
@Override
public void run() {
objectProcessManager.scheduleProcessInstance(processName, resource, data);
}
});
}
protected void scheduleProcess(final StandardProcess process, final Object resource, final Map<String, Object> data) {
scheduleProcess(new Runnable() {
@Override
public void run() {
objectProcessManager.scheduleStandardProcess(process, resource, data);
}
});
}
protected void scheduleProcess(Runnable run) {
try {
run.run();
} catch (ProcessInstanceException e) {
if (e.getExitReason() == ExitReason.RESOURCE_BUSY || e.getExitReason() == ExitReason.CANCELED) {
throw new ClientVisibleException(ResponseCodes.CONFLICT);
} else {
throw e;
}
}
}
@Override
protected void addActions(Object obj, SchemaFactory schemaFactory, Schema schema, Resource resource) {
Object state = resource.getFields().get(ObjectMetaDataManager.STATE_FIELD);
Map<String, ActionDefinition> defs = metaDataManager.getActionDefinitions(obj);
if (state == null || defs == null) {
super.addActions(obj, schemaFactory, schema, resource);
return;
}
Map<String, Action> actions = schema.getResourceActions();
if (actions == null || actions.size() == 0) {
return;
}
UrlBuilder urlBuilder = ApiContext.getUrlBuilder();
for (Map.Entry<String, Action> entry : actions.entrySet()) {
String name = entry.getKey();
Action action = entry.getValue();
if ("restore".equals(name) || !isValidAction(obj, action)) {
continue;
}
ActionDefinition def = defs.get(name);
if (def == null || def.getValidStates().contains(state)) {
resource.getActions().put(name, urlBuilder.actionLink(resource, name));
}
}
}
@SuppressWarnings("unchecked")
protected boolean isValidAction(Object obj, Action action) {
Map<String, Object> attributes = action.getAttributes();
if (attributes == null || attributes.size() == 0) {
return true;
}
String capability = Objects.toString(attributes.get("capability"), null);
String state = Objects.toString(attributes.get(ObjectMetaDataManager.STATE_FIELD), null);
String currentState = io.cattle.platform.object.util.ObjectUtils.getState(obj);
if (!StringUtils.isBlank(capability) && !(ApiContext.getContext().getCapabilities(obj) != null ?
ApiContext.getContext().getCapabilities(obj).contains(capability) :
DataAccessor.fieldStringList(obj, ObjectMetaDataManager.CAPABILITIES_FIELD).contains(capability))) {
return false;
}
if (!StringUtils.isBlank(state) && !state.equals(currentState)) {
return false;
}
List<String> states = ((List<String>) attributes.get(ObjectMetaDataManager.STATES_FIELD));
if (states != null && !states.contains(currentState)){
return false;
}
return true;
}
@Override
public void start() {
actionHandlersMap = NamedUtils.createMapByName(actionHandlers);
linkHandlersMap = new HashMap<>();
for (LinkHandler handler : linkHandlers) {
for (String type : handler.getTypes()) {
CollectionUtils.addToMap(linkHandlersMap, type, handler, ArrayList.class);
}
}
}
protected Relationship getRelationship(String type, String linkName) {
return metaDataManager.getRelationship(type, linkName);
}
protected Relationship getRelationship(Class<?> clz, String linkName) {
return metaDataManager.getRelationship(clz, linkName);
}
@Override
protected Object collectionActionInternal(Object resources, ApiRequest request) {
return null;
}
@Override
protected Map<String, String> getLinks(SchemaFactory schemaFactory, Resource resource) {
return metaDataManager.getLinks(schemaFactory, resource.getType());
}
public ObjectManager getObjectManager() {
return objectManager;
}
@Inject
public void setObjectManager(ObjectManager objectManager) {
this.objectManager = objectManager;
}
public ObjectMetaDataManager getMetaDataManager() {
return metaDataManager;
}
@Inject
public void setMetaDataManager(ObjectMetaDataManager metaDataManager) {
this.metaDataManager = metaDataManager;
}
public ObjectProcessManager getObjectProcessManager() {
return objectProcessManager;
}
@Inject
public void setObjectProcessManager(ObjectProcessManager objectProcessManager) {
this.objectProcessManager = objectProcessManager;
}
public List<ActionHandler> getActionHandlers() {
return actionHandlers;
}
@Inject
public void setActionHandlers(List<ActionHandler> actionHandlers) {
this.actionHandlers = actionHandlers;
}
public List<LinkHandler> getLinkHandlers() {
return linkHandlers;
}
@Inject
public void setLinkHandlers(List<LinkHandler> linkHandlers) {
this.linkHandlers = linkHandlers;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.functions.source;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.io.CheckpointableInputFormat;
import org.apache.flink.api.common.io.InputFormat;
import org.apache.flink.api.common.io.RichInputFormat;
import org.apache.flink.api.common.operators.MailboxExecutor;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.Counter;
import org.apache.flink.runtime.state.JavaSerializer;
import org.apache.flink.runtime.state.StateInitializationContext;
import org.apache.flink.runtime.state.StateSnapshotContext;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.OutputTypeConfigurable;
import org.apache.flink.streaming.api.operators.StreamSourceContexts;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService;
import org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorImpl;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.function.RunnableWithException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Set;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;
/**
* The operator that reads the {@link TimestampedFileInputSplit splits} received from the preceding
* {@link ContinuousFileMonitoringFunction}. Contrary to the {@link
* ContinuousFileMonitoringFunction} which has a parallelism of 1, this operator can have DOP > 1.
*
* <p>This implementation uses {@link MailboxExecutor} to execute each action and state machine
* approach. The workflow is the following:
*
* <ol>
* <li>start in {@link ReaderState#IDLE IDLE}
* <li>upon receiving a split add it to the queue, switch to {@link ReaderState#OPENING OPENING}
* and enqueue a {@link org.apache.flink.streaming.runtime.tasks.mailbox.Mail mail} to process
* it
* <li>open file, switch to {@link ReaderState#READING READING}, read one record, re-enqueue self
* <li>if no more records or splits available, switch back to {@link ReaderState#IDLE IDLE}
* </ol>
*
* <p>On close:
*
* <ol>
* <li>if {@link ReaderState#IDLE IDLE} then close immediately
* <li>otherwise switch to {@link ReaderState#FINISHING CLOSING}, call {@link
* MailboxExecutor#yield() yield} in a loop until state is {@link ReaderState#FINISHED CLOSED}
* <li>{@link MailboxExecutor#yield() yield()} causes remaining records (and splits) to be
* processed in the same way as above
* </ol>
*
* <p>Using {@link MailboxExecutor} allows to avoid explicit synchronization. At most one mail
* should be enqueued at any given time.
*
* <p>Using FSM approach allows to explicitly define states and enforce {@link
* ReaderState#VALID_TRANSITIONS transitions} between them.
*/
@Internal
public class ContinuousFileReaderOperator<OUT, T extends TimestampedInputSplit>
extends AbstractStreamOperator<OUT>
implements OneInputStreamOperator<T, OUT>, OutputTypeConfigurable<OUT> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(ContinuousFileReaderOperator.class);
private enum ReaderState {
IDLE {
@Override
public <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) throws IOException {
throw new IllegalStateException("not processing any records in IDLE state");
}
},
/** A message is enqueued to process split, but no split is opened. */
OPENING { // the split was added and message to itself was enqueued to process it
public <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) throws IOException {
if (op.splits.isEmpty()) {
op.switchState(ReaderState.IDLE);
return false;
} else {
op.loadSplit(op.splits.poll());
op.switchState(ReaderState.READING);
return true;
}
}
},
/** A message is enqueued to process split and its processing was started. */
READING {
@Override
public <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) throws IOException {
return true;
}
@Override
public void onNoMoreData(ContinuousFileReaderOperator<?, ?> op) {
op.switchState(ReaderState.IDLE);
}
},
/**
* No further processing can be done; only state disposal transition to {@link #FINISHED}
* allowed.
*/
FAILED {
@Override
public <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) throws IOException {
throw new IllegalStateException("not processing any records in ERRORED state");
}
},
/**
* {@link #close()} was called but unprocessed data (records and splits) remains and needs
* to be processed. {@link #close()} caller is blocked.
*/
FINISHING {
@Override
public <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) throws IOException {
if (op.currentSplit == null && !op.splits.isEmpty()) {
op.loadSplit(op.splits.poll());
}
return true;
}
@Override
public void onNoMoreData(ContinuousFileReaderOperator<?, ?> op) {
// need one more mail to unblock possible yield() in close() method (todo: wait with
// timeout in yield)
op.enqueueProcessRecord();
op.switchState(FINISHED);
}
},
FINISHED {
@Override
public <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) {
LOG.warn("not processing any records while closed");
return false;
}
};
private static final Set<ReaderState> ACCEPT_SPLITS = EnumSet.of(IDLE, OPENING, READING);
/** Possible transition FROM each state. */
private static final Map<ReaderState, Set<ReaderState>> VALID_TRANSITIONS;
static {
Map<ReaderState, Set<ReaderState>> tmpTransitions = new HashMap<>();
tmpTransitions.put(IDLE, EnumSet.of(OPENING, FINISHED, FAILED));
tmpTransitions.put(OPENING, EnumSet.of(READING, FINISHING, FAILED));
tmpTransitions.put(READING, EnumSet.of(IDLE, OPENING, FINISHING, FAILED));
tmpTransitions.put(FINISHING, EnumSet.of(FINISHED, FAILED));
tmpTransitions.put(FAILED, EnumSet.of(FINISHED));
tmpTransitions.put(FINISHED, EnumSet.noneOf(ReaderState.class));
VALID_TRANSITIONS = new EnumMap<>(tmpTransitions);
}
public boolean isAcceptingSplits() {
return ACCEPT_SPLITS.contains(this);
}
public final boolean isTerminal() {
return this == FINISHED;
}
public boolean canSwitchTo(ReaderState next) {
return VALID_TRANSITIONS
.getOrDefault(this, EnumSet.noneOf(ReaderState.class))
.contains(next);
}
/**
* Prepare to process new record OR split.
*
* @return true if should read the record
*/
public abstract <T extends TimestampedInputSplit> boolean prepareToProcessRecord(
ContinuousFileReaderOperator<?, T> op) throws IOException;
public void onNoMoreData(ContinuousFileReaderOperator<?, ?> op) {}
}
private transient InputFormat<OUT, ? super T> format;
private TypeSerializer<OUT> serializer;
private transient MailboxExecutorImpl executor;
private transient OUT reusedRecord;
private transient SourceFunction.SourceContext<OUT> sourceContext;
private transient ListState<T> checkpointedState;
/** MUST only be changed via {@link #switchState(ReaderState) switchState}. */
private transient ReaderState state = ReaderState.IDLE;
private transient PriorityQueue<T> splits = new PriorityQueue<>();
private transient T
currentSplit; // can't work just on queue tail because it can change because it's PQ
private transient Counter completedSplitsCounter;
private final transient RunnableWithException processRecordAction =
() -> {
try {
processRecord();
} catch (Exception e) {
switchState(ReaderState.FAILED);
throw e;
}
};
ContinuousFileReaderOperator(
InputFormat<OUT, ? super T> format,
ProcessingTimeService processingTimeService,
MailboxExecutor mailboxExecutor) {
this.format = checkNotNull(format);
this.processingTimeService = checkNotNull(processingTimeService);
this.executor = (MailboxExecutorImpl) checkNotNull(mailboxExecutor);
}
@Override
public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
checkState(checkpointedState == null, "The reader state has already been initialized.");
// We are using JavaSerializer from the flink-runtime module here. This is very naughty and
// we shouldn't be doing it because ideally nothing in the API modules/connector depends
// directly on flink-runtime. We are doing it here because we need to maintain backwards
// compatibility with old state and because we will have to rework/remove this code soon.
checkpointedState =
context.getOperatorStateStore()
.getListState(new ListStateDescriptor<>("splits", new JavaSerializer<>()));
int subtaskIdx = getRuntimeContext().getIndexOfThisSubtask();
if (!context.isRestored()) {
LOG.info(
"No state to restore for the {} (taskIdx={}).",
getClass().getSimpleName(),
subtaskIdx);
return;
}
LOG.info(
"Restoring state for the {} (taskIdx={}).", getClass().getSimpleName(), subtaskIdx);
splits = splits == null ? new PriorityQueue<>() : splits;
for (T split : checkpointedState.get()) {
splits.add(split);
}
if (LOG.isDebugEnabled()) {
LOG.debug(
"{} (taskIdx={}) restored {}.", getClass().getSimpleName(), subtaskIdx, splits);
}
}
@Override
public void open() throws Exception {
super.open();
checkState(
this.serializer != null,
"The serializer has not been set. "
+ "Probably the setOutputType() was not called. Please report it.");
this.state = ReaderState.IDLE;
if (this.format instanceof RichInputFormat) {
((RichInputFormat<?, ?>) this.format).setRuntimeContext(getRuntimeContext());
}
this.format.configure(new Configuration());
this.sourceContext =
StreamSourceContexts.getSourceContext(
getOperatorConfig().getTimeCharacteristic(),
getProcessingTimeService(),
new Object(), // no actual locking needed
output,
getRuntimeContext().getExecutionConfig().getAutoWatermarkInterval(),
-1,
true);
this.reusedRecord = serializer.createInstance();
this.completedSplitsCounter = getMetricGroup().counter("numSplitsProcessed");
this.splits = this.splits == null ? new PriorityQueue<>() : this.splits;
if (!splits.isEmpty()) {
enqueueProcessRecord();
}
}
@Override
public void processElement(StreamRecord<T> element) throws Exception {
Preconditions.checkState(state.isAcceptingSplits());
splits.offer(element.getValue());
if (state == ReaderState.IDLE) {
enqueueProcessRecord();
}
}
private void enqueueProcessRecord() {
Preconditions.checkState(
!state.isTerminal(), "can't enqueue mail in terminal state %s", state);
executor.execute(processRecordAction, "ContinuousFileReaderOperator");
if (state == ReaderState.IDLE) {
switchState(ReaderState.OPENING);
}
}
private void processRecord() throws IOException {
do {
if (!state.prepareToProcessRecord(this)) {
return;
}
readAndCollectRecord();
if (format.reachedEnd()) {
onSplitProcessed();
return;
}
} while (executor
.isIdle()); // todo: consider moving this loop into MailboxProcessor (return boolean
// "re-execute" from enqueued action)
enqueueProcessRecord();
}
private void onSplitProcessed() throws IOException {
completedSplitsCounter.inc();
LOG.debug("split {} processed: {}", completedSplitsCounter.getCount(), currentSplit);
format.close();
currentSplit = null;
if (splits.isEmpty()) {
state.onNoMoreData(this);
return;
}
if (state == ReaderState.READING) {
switchState(ReaderState.OPENING);
}
enqueueProcessRecord();
}
private void readAndCollectRecord() throws IOException {
Preconditions.checkState(
state == ReaderState.READING || state == ReaderState.FINISHING,
"can't process record in state %s",
state);
if (format.reachedEnd()) {
return;
}
OUT out = format.nextRecord(this.reusedRecord);
if (out != null) {
sourceContext.collect(out);
}
}
private void loadSplit(T split) throws IOException {
Preconditions.checkState(
state != ReaderState.READING && state != ReaderState.FINISHED,
"can't load split in state %s",
state);
Preconditions.checkNotNull(split, "split is null");
LOG.debug("load split: {}", split);
currentSplit = split;
if (this.format instanceof RichInputFormat) {
((RichInputFormat<?, ?>) this.format).openInputFormat();
}
if (format instanceof CheckpointableInputFormat && currentSplit.getSplitState() != null) {
// recovering after a node failure with an input
// format that supports resetting the offset
((CheckpointableInputFormat<T, Serializable>) format)
.reopen(currentSplit, currentSplit.getSplitState());
} else {
// we either have a new split, or we recovered from a node
// failure but the input format does not support resetting the offset.
format.open(currentSplit);
}
// reset the restored state to null for the next iteration
currentSplit.resetSplitState();
}
private void switchState(ReaderState newState) {
if (state != newState) {
Preconditions.checkState(
state.canSwitchTo(newState),
"can't switch state from terminal state %s to %s",
state,
newState);
LOG.debug("switch state: {} -> {}", state, newState);
state = newState;
}
}
@Override
public void processWatermark(Watermark mark) throws Exception {
// we do nothing because we emit our own watermarks if needed.
}
@Override
public void finish() throws Exception {
LOG.debug("finishing");
super.finish();
switch (state) {
case IDLE:
switchState(ReaderState.FINISHED);
break;
case FINISHED:
LOG.warn("operator is already closed, doing nothing");
return;
default:
switchState(ReaderState.FINISHING);
while (!state.isTerminal()) {
executor.yield();
}
}
try {
sourceContext.emitWatermark(Watermark.MAX_WATERMARK);
} catch (Exception e) {
LOG.warn("unable to emit watermark while closing", e);
}
}
@Override
public void close() throws Exception {
Exception e = null;
try {
cleanUp();
} catch (Exception ex) {
e = ex;
}
{
checkpointedState = null;
completedSplitsCounter = null;
currentSplit = null;
executor = null;
format = null;
sourceContext = null;
reusedRecord = null;
serializer = null;
splits = null;
}
try {
super.close();
} catch (Exception ex) {
e = ExceptionUtils.firstOrSuppressed(ex, e);
}
if (e != null) {
throw e;
}
}
private void cleanUp() throws Exception {
LOG.debug("cleanup, state={}", state);
RunnableWithException[] runClose = {
() -> sourceContext.close(),
() -> format.close(),
() -> {
if (this.format instanceof RichInputFormat) {
((RichInputFormat<?, ?>) this.format).closeInputFormat();
}
}
};
Exception firstException = null;
for (RunnableWithException r : runClose) {
try {
r.run();
} catch (Exception e) {
firstException = ExceptionUtils.firstOrSuppressed(e, firstException);
}
}
currentSplit = null;
if (firstException != null) {
throw firstException;
}
}
@Override
public void snapshotState(StateSnapshotContext context) throws Exception {
super.snapshotState(context);
checkState(
checkpointedState != null, "The operator state has not been properly initialized.");
int subtaskIdx = getRuntimeContext().getIndexOfThisSubtask();
checkpointedState.clear();
List<T> readerState = getReaderState();
try {
for (T split : readerState) {
checkpointedState.add(split);
}
} catch (Exception e) {
checkpointedState.clear();
throw new Exception(
"Could not add timestamped file input splits to to operator "
+ "state backend of operator "
+ getOperatorName()
+ '.',
e);
}
if (LOG.isDebugEnabled()) {
LOG.debug(
"{} (taskIdx={}) checkpointed {} splits: {}.",
getClass().getSimpleName(),
subtaskIdx,
readerState.size(),
readerState);
}
}
private List<T> getReaderState() throws IOException {
List<T> snapshot = new ArrayList<>(splits.size());
if (currentSplit != null) {
if (this.format instanceof CheckpointableInputFormat && state == ReaderState.READING) {
Serializable formatState =
((CheckpointableInputFormat<T, Serializable>) this.format)
.getCurrentState();
this.currentSplit.setSplitState(formatState);
}
snapshot.add(this.currentSplit);
}
snapshot.addAll(splits);
return snapshot;
}
@Override
public void setOutputType(TypeInformation<OUT> outTypeInfo, ExecutionConfig executionConfig) {
this.serializer = outTypeInfo.createSerializer(executionConfig);
}
}
| |
package io.zrz.graphql.zulu.engine;
import java.util.Objects;
import javax.annotation.Nullable;
import io.zrz.graphql.core.parser.GQLException;
import io.zrz.graphql.core.parser.GQLSourceLocation;
import io.zrz.graphql.core.parser.Lexer.LineInfo;
import io.zrz.graphql.core.parser.SyntaxErrorException;
import io.zrz.graphql.zulu.doc.DefaultGQLPreparedOperation.OpInputField;
import io.zrz.graphql.zulu.doc.GQLPreparedDocument;
import io.zrz.graphql.zulu.doc.GQLPreparedSelection;
import io.zrz.graphql.zulu.executable.ExecutableElement;
import io.zrz.graphql.zulu.executable.ExecutableOutputField;
import io.zrz.graphql.zulu.executable.ExecutableOutputFieldParam;
import io.zrz.graphql.zulu.executable.ExecutableReceiverType;
import io.zrz.graphql.zulu.executable.ExecutableType;
import io.zrz.zulu.types.ZField;
public interface ZuluWarning {
public class IncompatibleTypes extends AbstractWarning<ExecutableOutputFieldParam> {
private final ZField provided;
public IncompatibleTypes(final ExecutableOutputFieldParam param, final ZField provided, final GQLPreparedSelection sel) {
super(ZuluWarningKind.INCOMPATIBLE_TYPE, param, sel);
this.provided = provided;
}
@Override
public String detail() {
return "input of type " + provided.fieldType() + " for argument '" + type.fieldName() + "' of "
+ type.enclosingType().typeName()
+ "."
+ type.enclosingField().fieldName()
+ " is incompatible with required type "
+ type.fieldType().logicalType();
}
@Override
public ExecutableType context() {
return this.type.enclosingType();
}
}
public class UnknownTypeSymbol extends AbstractWarning<ExecutableElement> {
private final String symbol;
public UnknownTypeSymbol(final ExecutableElement context, final String symbol, final GQLPreparedSelection sel) {
super(ZuluWarningKind.UNKNOWN_TYPE, context, sel);
this.symbol = symbol;
}
@Override
public String detail() {
return "type '" + symbol + "' is unknown";
}
@Override
public ExecutableType context() {
return null;
}
}
public class DocumentWarning implements ZuluWarning {
private final GQLPreparedDocument doc;
private final ZuluWarningKind kind;
private final String operationName;
public DocumentWarning(final ZuluWarningKind kind, final GQLPreparedDocument doc, final String operationName) {
this.doc = doc;
this.kind = kind;
this.operationName = operationName;
}
public DocumentWarning(final ZuluWarningKind kind, final GQLPreparedDocument doc) {
this(kind, doc, null);
}
@Override
public String detail() {
return kind.detail(this);
}
public String operationName() {
return this.operationName;
}
@Override
public ExecutableType context() {
return null;
}
@Override
public ZuluWarningKind warningKind() {
return this.kind;
}
@Override
public GQLSourceLocation sourceLocation() {
return null;
}
@Override
public GQLPreparedSelection selection() {
return null;
}
@Override
public Throwable cause() {
return null;
}
}
public class ParseWarning implements ZuluWarning {
private final ZuluWarningKind kind;
private final String input;
private final GQLException error;
public ParseWarning(final ZuluWarningKind kind, final String input, final GQLException error) {
this.error = error;
this.input = input;
this.kind = kind;
}
@Override
public String detail() {
return kind.detail(this);
}
public String input() {
return this.input;
}
@Override
public ExecutableType context() {
return null;
}
@Override
public ZuluWarningKind warningKind() {
return this.kind;
}
@Override
public GQLSourceLocation sourceLocation() {
if (error instanceof SyntaxErrorException) {
final SyntaxErrorException err = (SyntaxErrorException) error;
final LineInfo info = err.lineInfo();
if (info == null) {
return null;
}
return GQLSourceLocation.builder()
.lineNumber(info.lineNumber())
.lineOffset(info.lineOffset() + 1)
.input(info.source())
.sourceOffset(0)
.build();
}
return null;
}
@Override
public GQLPreparedSelection selection() {
return null;
}
@Override
public Throwable cause() {
return error;
}
@Override
public String toString() {
return "ParseWarning[" + kind + "](" + detail() + "): " + input + cause();
}
}
ZuluWarningKind warningKind();
String detail();
@Nullable
GQLSourceLocation sourceLocation();
ExecutableElement context();
GQLPreparedSelection selection();
Throwable cause();
abstract class AbstractWarning<T extends ExecutableElement> implements ZuluWarning {
protected ZuluWarningKind kind;
protected GQLPreparedSelection sel;
protected T type;
protected Throwable cause;
public AbstractWarning(final ZuluWarningKind kind, final T type) {
this.kind = kind;
this.type = type;
}
public AbstractWarning(final ZuluWarningKind kind, final T type, final GQLPreparedSelection sel) {
this.kind = kind;
this.sel = sel;
this.type = type;
}
public AbstractWarning(final ZuluWarningKind kind, final ZuluSelection sel, final T type) {
this.kind = kind;
this.sel = sel;
this.type = type;
}
@Override
public GQLSourceLocation sourceLocation() {
if (sel != null) {
return sel.sourceLocation();
}
return null;
}
public T element() {
return this.type;
}
@Override
public ZuluWarningKind warningKind() {
return kind;
}
@Override
public String detail() {
return kind.toString();
}
@Override
public GQLPreparedSelection selection() {
return sel;
}
@Override
public Throwable cause() {
return cause;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
if (sourceLocation() != null) {
sb.append("(line ");
sb.append(sourceLocation().lineNumber());
sb.append(", col ");
sb.append(sourceLocation().lineOffset());
sb.append(", selection ");
sb.append(selection().path());
sb.append("): ");
}
sb.append(detail());
return sb.toString();
}
}
public class OutputTypeWarning extends AbstractWarning<ExecutableReceiverType> {
private final ExecutableReceiverType type;
public OutputTypeWarning(final ZuluWarningKind kind, final ExecutableReceiverType type, final GQLPreparedSelection sel) {
super(kind, type, sel);
this.type = type;
}
@Override
public ExecutableType context() {
return this.type;
}
@Override
public String detail() {
return kind.detail(this);
}
}
public class OutputFieldWarning extends AbstractWarning<ExecutableOutputField> {
private String message;
public OutputFieldWarning(final ZuluWarningKind kind, final ExecutableOutputField field, final GQLPreparedSelection sel) {
super(kind, field, sel);
}
public OutputFieldWarning(final ZuluWarningKind kind, final ExecutableOutputField field, final GQLPreparedSelection sel, final String message) {
super(kind, field, sel);
this.message = message;
}
@Override
public ExecutableType context() {
return this.type.receiverType();
}
@Override
public String detail() {
if (this.message != null) {
return this.message;
}
return kind.detail(this);
}
}
public class MissingField extends AbstractWarning<ExecutableReceiverType> {
public MissingField(final ExecutableReceiverType type, final GQLPreparedSelection sel) {
super(ZuluWarningKind.UNKNOWN_FIELD, type, sel);
}
@Override
public String detail() {
return "field " + type.typeName() + "." + sel.fieldName() + " doesn't exist";
}
@Override
public ExecutableType context() {
return this.type;
}
@Override
public GQLPreparedSelection selection() {
return sel;
}
}
public class MissingRequiredParameter extends AbstractWarning<ExecutableOutputFieldParam> {
private final ExecutableOutputFieldParam param;
public MissingRequiredParameter(final ExecutableOutputFieldParam param, final GQLPreparedSelection sel) {
super(ZuluWarningKind.MISSING_PARAMETER, param, sel);
this.param = param;
}
@Override
public String detail() {
return "missing required parameter '" + param.fieldName() + "' for "
+ param.enclosingType().typeName()
+ "."
+ param.enclosingField().fieldName();
}
@Override
public ExecutableType context() {
return this.param.enclosingType();
}
}
public class MissingRequiredVariable extends AbstractWarning<ZuluExecutable> {
private final OpInputField param;
public MissingRequiredVariable(final OpInputField param, final ZuluExecutable executable) {
super(ZuluWarningKind.MISSING_VARIABLE, executable);
this.param = param;
}
@Override
public String detail() {
return "missing required input variable '" + param.fieldName() + "'";
}
@Override
public ExecutableElement context() {
return null;
}
}
public class ExecutionError extends AbstractWarning<ExecutableElement> {
private final Throwable error;
private final Object context;
private final ExecutableReceiverType type;
private final ZuluSelection selection;
public ExecutionError(final ZuluLeafSelection leaf, final Throwable ex, final Object context) {
super(ZuluWarningKind.INVOCATION_EXCEPTION, leaf, leaf.element());
this.error = ex;
this.context = context;
this.type = leaf.contextType();
this.selection = leaf;
}
public ExecutionError(final ZuluContainerSelection container, final Throwable ex, final Object context) {
super(ZuluWarningKind.INVOCATION_EXCEPTION, container, container.element());
this.error = ex;
this.context = context;
this.type = container.contextType();
this.selection = container;
}
@Override
public ZuluSelection selection() {
return this.selection;
}
@Override
public ExecutableType context() {
return this.type;
}
@Override
public Throwable cause() {
return this.error;
}
@Override
public String detail() {
if (error.getMessage() == null) {
return error.toString();
}
return error.getMessage();
}
}
public class InternalError extends AbstractWarning<ExecutableElement> {
private final Throwable error;
private final String detail;
private final ExecutableReceiverType etype;
public InternalError(final ExecutableOutputFieldParam param, final GQLPreparedSelection sel, final String message) {
super(ZuluWarningKind.INTERNAL_ERROR, param, sel);
this.etype = param.enclosingType();
this.detail = Objects.requireNonNull(message);
try {
throw new RuntimeException();
}
catch (final RuntimeException ex) {
this.error = ex;
}
}
@Override
public ExecutableType context() {
return this.etype;
}
@Override
public Throwable cause() {
return this.error;
}
@Override
public String detail() {
if (this.detail != null) {
return this.detail;
}
if (error.getMessage() == null) {
return error.toString();
}
return error.getMessage();
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* %%Ignore-License
*/
package com.spotify.apollo;
import java.net.URI;
import java.net.URLDecoder;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Splits an HTTP query string into a path string and key-value parameter pairs.
* This decoder is for one time use only. Create a new instance for each URI:
* <pre>
* {@link QueryStringDecoder} decoder = new {@link QueryStringDecoder}("/hello?recipient=world&x=1;y=2");
* assert decoder.path().equals("/hello");
* assert decoder.parameters().get("recipient").get(0).equals("world");
* assert decoder.parameters().get("x").get(0).equals("1");
* assert decoder.parameters().get("y").get(0).equals("2");
* </pre>
*
* This decoder can also decode the content of an HTTP POST request whose
* content type is <tt>application/x-www-form-urlencoded</tt>:
* <pre>
* {@link QueryStringDecoder} decoder = new {@link QueryStringDecoder}("recipient=world&x=1;y=2", false);
* ...
* </pre>
*
* <h3>HashDOS vulnerability fix</h3>
*
* As a workaround to the <a href="http://netty.io/s/hashdos">HashDOS</a> vulnerability, the decoder
* limits the maximum number of decoded key-value parameter pairs, up to {@literal 1024} by
* default, and you can configure it when you construct the decoder by passing an additional
* integer parameter.
*/
class QueryStringDecoder {
private static final int DEFAULT_MAX_PARAMS = 1024;
private static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
private final Charset charset;
private final String uri;
private final boolean hasPath;
private final int maxParams;
private String path;
private Map<String, List<String>> params;
private int nParams;
/**
* Creates a new decoder that decodes the specified URI. The decoder will
* assume that the query string is encoded in UTF-8.
*/
public QueryStringDecoder(String uri) {
this(uri, DEFAULT_CHARSET);
}
/**
* Creates a new decoder that decodes the specified URI encoded in the
* specified charset.
*/
public QueryStringDecoder(String uri, boolean hasPath) {
this(uri, DEFAULT_CHARSET, hasPath);
}
/**
* Creates a new decoder that decodes the specified URI encoded in the
* specified charset.
*/
public QueryStringDecoder(String uri, Charset charset) {
this(uri, charset, true);
}
/**
* Creates a new decoder that decodes the specified URI encoded in the
* specified charset.
*/
public QueryStringDecoder(String uri, Charset charset, boolean hasPath) {
this(uri, charset, hasPath, DEFAULT_MAX_PARAMS);
}
/**
* Creates a new decoder that decodes the specified URI encoded in the
* specified charset.
*/
public QueryStringDecoder(String uri, Charset charset, boolean hasPath, int maxParams) {
if (uri == null) {
throw new NullPointerException("getUri");
}
if (charset == null) {
throw new NullPointerException("charset");
}
if (maxParams <= 0) {
throw new IllegalArgumentException(
"maxParams: " + maxParams + " (expected: a positive integer)");
}
this.uri = uri;
this.charset = charset;
this.maxParams = maxParams;
this.hasPath = hasPath;
}
/**
* Creates a new decoder that decodes the specified URI. The decoder will
* assume that the query string is encoded in UTF-8.
*/
public QueryStringDecoder(URI uri) {
this(uri, DEFAULT_CHARSET);
}
/**
* Creates a new decoder that decodes the specified URI encoded in the
* specified charset.
*/
public QueryStringDecoder(URI uri, Charset charset) {
this(uri, charset, DEFAULT_MAX_PARAMS);
}
/**
* Creates a new decoder that decodes the specified URI encoded in the
* specified charset.
*/
public QueryStringDecoder(URI uri, Charset charset, int maxParams) {
if (uri == null) {
throw new NullPointerException("getUri");
}
if (charset == null) {
throw new NullPointerException("charset");
}
if (maxParams <= 0) {
throw new IllegalArgumentException(
"maxParams: " + maxParams + " (expected: a positive integer)");
}
String rawPath = uri.getRawPath();
if (rawPath != null) {
hasPath = true;
} else {
rawPath = "";
hasPath = false;
}
// Also take care of cut of things like "http://localhost"
this.uri = rawPath + (uri.getRawQuery() == null? "" : '?' + uri.getRawQuery());
this.charset = charset;
this.maxParams = maxParams;
}
/**
* Returns the uri used to initialize this {@link QueryStringDecoder}.
*/
public String uri() {
return uri;
}
/**
* Returns the decoded path string of the URI.
*/
public String path() {
if (path == null) {
if (!hasPath) {
return path = "";
}
int pathEndPos = uri.indexOf('?');
if (pathEndPos < 0) {
path = uri;
} else {
return path = uri.substring(0, pathEndPos);
}
}
return path;
}
/**
* Returns the decoded key-value parameter pairs of the URI.
*/
public Map<String, List<String>> parameters() {
if (params == null) {
if (hasPath) {
int pathLength = path().length();
if (uri.length() == pathLength) {
return Collections.emptyMap();
}
decodeParams(uri.substring(pathLength + 1));
} else {
if (uri.isEmpty()) {
return Collections.emptyMap();
}
decodeParams(uri);
}
}
return params;
}
private void decodeParams(String s) {
Map<String, List<String>> params = this.params = new LinkedHashMap<String, List<String>>();
nParams = 0;
String name = null;
int pos = 0; // Beginning of the unprocessed region
int i; // End of the unprocessed region
char c; // Current character
for (i = 0; i < s.length(); i++) {
c = s.charAt(i);
if (c == '=' && name == null) {
if (pos != i) {
name = decodeComponent(s.substring(pos, i), charset);
}
pos = i + 1;
// http://www.w3.org/TR/html401/appendix/notes.html#h-B.2.2
} else if (c == '&' || c == ';') {
if (name == null && pos != i) {
// We haven't seen an `=' so far but moved forward.
// Must be a param of the form '&a&' so add it with
// an empty value.
if (!addParam(params, decodeComponent(s.substring(pos, i), charset), "")) {
return;
}
} else if (name != null) {
if (!addParam(params, name, decodeComponent(s.substring(pos, i), charset))) {
return;
}
name = null;
}
pos = i + 1;
}
}
if (pos != i) { // Are there characters we haven't dealt with?
if (name == null) { // Yes and we haven't seen any `='.
addParam(params, decodeComponent(s.substring(pos, i), charset), "");
} else { // Yes and this must be the last value.
addParam(params, name, decodeComponent(s.substring(pos, i), charset));
}
} else if (name != null) { // Have we seen a name without value?
addParam(params, name, "");
}
}
private boolean addParam(Map<String, List<String>> params, String name, String value) {
if (nParams >= maxParams) {
return false;
}
List<String> values = params.get(name);
if (values == null) {
values = new ArrayList<String>(1); // Often there's only 1 value.
params.put(name, values);
}
values.add(value);
nParams ++;
return true;
}
/**
* Decodes a bit of an URL encoded by a browser.
* <p>
* This is equivalent to calling {@link #decodeComponent(String, Charset)}
* with the UTF-8 charset (recommended to comply with RFC 3986, Section 2).
* @param s The string to decode (can be empty).
* @return The decoded string, or {@code s} if there's nothing to decode.
* If the string to decode is {@code null}, returns an empty string.
* @throws IllegalArgumentException if the string contains a malformed
* escape sequence.
*/
public static String decodeComponent(final String s) {
return decodeComponent(s, DEFAULT_CHARSET);
}
/**
* Decodes a bit of an URL encoded by a browser.
* <p>
* The string is expected to be encoded as per RFC 3986, Section 2.
* This is the encoding used by JavaScript functions {@code encodeURI}
* and {@code encodeURIComponent}, but not {@code escape}. For example
* in this encoding, é (in Unicode {@code U+00E9} or in UTF-8
* {@code 0xC3 0xA9}) is encoded as {@code %C3%A9} or {@code %c3%a9}.
* <p>
* This is essentially equivalent to calling
* {@link URLDecoder#decode(String, String) URLDecoder.decode(s, charset.name())}
* except that it's over 2x faster and generates less garbage for the GC.
* Actually this function doesn't allocate any memory if there's nothing
* to decode, the argument itself is returned.
* @param s The string to decode (can be empty).
* @param charset The charset to use to decode the string (should really
* be {@code UTF_8}.
* @return The decoded string, or {@code s} if there's nothing to decode.
* If the string to decode is {@code null}, returns an empty string.
* @throws IllegalArgumentException if the string contains a malformed
* escape sequence.
*/
public static String decodeComponent(final String s, final Charset charset) {
if (s == null) {
return "";
}
final int size = s.length();
boolean modified = false;
for (int i = 0; i < size; i++) {
final char c = s.charAt(i);
if (c == '%' || c == '+') {
modified = true;
break;
}
}
if (!modified) {
return s;
}
final byte[] buf = new byte[size];
int pos = 0; // position in `buf'.
for (int i = 0; i < size; i++) {
char c = s.charAt(i);
switch (c) {
case '+':
buf[pos++] = ' '; // "+" -> " "
break;
case '%':
if (i == size - 1) {
throw new IllegalArgumentException("unterminated escape"
+ " sequence at end of string: " + s);
}
c = s.charAt(++i);
if (c == '%') {
buf[pos++] = '%'; // "%%" -> "%"
break;
}
if (i == size - 1) {
throw new IllegalArgumentException("partial escape"
+ " sequence at end of string: " + s);
}
c = decodeHexNibble(c);
final char c2 = decodeHexNibble(s.charAt(++i));
if (c == Character.MAX_VALUE || c2 == Character.MAX_VALUE) {
throw new IllegalArgumentException(
"invalid escape sequence `%" + s.charAt(i - 1)
+ s.charAt(i) + "' at index " + (i - 2)
+ " of: " + s);
}
c = (char) (c * 16 + c2);
// Fall through.
default:
buf[pos++] = (byte) c;
break;
}
}
return new String(buf, 0, pos, charset);
}
/**
* Helper to decode half of a hexadecimal number from a string.
* @param c The ASCII character of the hexadecimal number to decode.
* Must be in the range {@code [0-9a-fA-F]}.
* @return The hexadecimal value represented in the ASCII character
* given, or {@link Character#MAX_VALUE} if the character is invalid.
*/
private static char decodeHexNibble(final char c) {
if ('0' <= c && c <= '9') {
return (char) (c - '0');
} else if ('a' <= c && c <= 'f') {
return (char) (c - 'a' + 10);
} else if ('A' <= c && c <= 'F') {
return (char) (c - 'A' + 10);
} else {
return Character.MAX_VALUE;
}
}
}
| |
/*
* Copyright 2006 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.intelliLang.util;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiConstantEvaluationHelperImpl;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashSet;
import java.util.Set;
/**
* Contains some extended utility functions for dealing with annotations.
*/
public class AnnotationUtilEx {
private static final PsiConstantEvaluationHelperImpl CONSTANT_EVALUATION_HELPER = new PsiConstantEvaluationHelperImpl();
private AnnotationUtilEx() {
}
/**
* @see AnnotationUtilEx#getAnnotatedElementFor(com.intellij.psi.PsiElement, LookupType)
*/
public enum LookupType {
PREFER_CONTEXT, PREFER_DECLARATION, CONTEXT_ONLY, DECLARATION_ONLY
}
/**
* Determines the PsiModifierListOwner for the passed element depending of the specified LookupType. The LookupType
* decides whether to prefer the element a reference expressions resolves to, or the element that is implied by the
* usage context ("expected type").
*/
@Nullable
public static PsiModifierListOwner getAnnotatedElementFor(@Nullable PsiElement element, LookupType type) {
while (element != null) {
if (type == LookupType.PREFER_DECLARATION || type == LookupType.DECLARATION_ONLY) {
if (element instanceof PsiReferenceExpression) {
final PsiElement e = ((PsiReferenceExpression)element).resolve();
if (e instanceof PsiModifierListOwner) {
return (PsiModifierListOwner)e;
}
if (type == LookupType.DECLARATION_ONLY) {
return null;
}
}
}
element = ContextComputationProcessor.getTopLevelInjectionTarget(element);
final PsiElement parent = element.getParent();
if (element instanceof PsiAssignmentExpression && ((PsiAssignmentExpression)element).getOperationTokenType() == JavaTokenType.PLUSEQ) {
element = ((PsiAssignmentExpression)element).getLExpression();
continue;
}
else if (parent instanceof PsiAssignmentExpression) {
final PsiAssignmentExpression p = (PsiAssignmentExpression)parent;
if (p.getRExpression() == element) {
element = p.getLExpression();
continue;
}
}
else if (parent instanceof PsiReturnStatement) {
final PsiMethod m = PsiTreeUtil.getParentOfType(parent, PsiMethod.class);
if (m != null) {
return m;
}
}
else if (parent instanceof PsiModifierListOwner) {
return (PsiModifierListOwner)parent;
}
else if (parent instanceof PsiArrayInitializerMemberValue) {
final PsiArrayInitializerMemberValue value = (PsiArrayInitializerMemberValue)parent;
final PsiElement pair = value.getParent();
if (pair instanceof PsiNameValuePair) {
return AnnotationUtil.getAnnotationMethod((PsiNameValuePair)pair);
}
}
else if (parent instanceof PsiNameValuePair) {
return AnnotationUtil.getAnnotationMethod((PsiNameValuePair)parent);
}
else {
return PsiUtilEx.getParameterForArgument(element);
}
// If no annotation has been found through the usage context, check if the element
// (i.e. the element the reference refers to) is annotated itself
if (type != LookupType.DECLARATION_ONLY) {
if (element instanceof PsiReferenceExpression) {
final PsiElement e = ((PsiReferenceExpression)element).resolve();
if (e instanceof PsiModifierListOwner) {
return (PsiModifierListOwner)e;
}
}
}
return null;
}
return null;
}
public interface AnnotatedElementVisitor {
boolean visitMethodParameter(PsiExpression expression, PsiCallExpression psiCallExpression);
boolean visitMethodReturnStatement(PsiReturnStatement parent, PsiMethod method);
boolean visitVariable(PsiVariable variable);
boolean visitAnnotationParameter(PsiNameValuePair nameValuePair, PsiAnnotation psiAnnotation);
boolean visitReference(PsiReferenceExpression expression);
}
public static void visitAnnotatedElements(@Nullable PsiElement element, AnnotatedElementVisitor visitor) {
if (element == null) return;
for (PsiElement cur = ContextComputationProcessor.getTopLevelInjectionTarget(element); cur != null; cur = cur.getParent()) {
if (!visitAnnotatedElementInner(cur, visitor)) return;
}
}
private static boolean visitAnnotatedElementInner(PsiElement element, AnnotatedElementVisitor visitor) {
final PsiElement parent = element.getParent();
if (element instanceof PsiReferenceExpression) {
if (!visitor.visitReference((PsiReferenceExpression)element)) return false;
}
else if (element instanceof PsiNameValuePair && parent != null && parent.getParent() instanceof PsiAnnotation) {
return visitor.visitAnnotationParameter((PsiNameValuePair)element, (PsiAnnotation)parent.getParent());
}
if (parent instanceof PsiAssignmentExpression) {
final PsiAssignmentExpression p = (PsiAssignmentExpression)parent;
if (p.getRExpression() == element || p.getOperationTokenType() == JavaTokenType.PLUSEQ) {
final PsiExpression left = p.getLExpression();
if (left instanceof PsiReferenceExpression) {
if (!visitor.visitReference((PsiReferenceExpression)left)) return false;
}
}
}
else if (parent instanceof PsiConditionalExpression && ((PsiConditionalExpression)parent).getCondition() == element) {
return false;
}
else if (parent instanceof PsiReturnStatement) {
final PsiMethod m = PsiTreeUtil.getParentOfType(parent, PsiMethod.class);
if (m != null) {
if (!visitor.visitMethodReturnStatement((PsiReturnStatement)parent, m)) return false;
}
}
else if (parent instanceof PsiVariable) {
return visitor.visitVariable((PsiVariable)parent);
}
else if (parent instanceof PsiModifierListOwner) {
return false; // PsiClass/PsiClassInitializer/PsiCodeBlock
}
else if (parent instanceof PsiArrayInitializerMemberValue || parent instanceof PsiNameValuePair) {
return true;
}
else if (parent instanceof PsiExpressionList && parent.getParent() instanceof PsiCallExpression) {
return visitor.visitMethodParameter((PsiExpression)element, (PsiCallExpression)parent.getParent());
}
return true;
}
/**
* Utility method to obtain annotations of a specific type from the supplied PsiModifierListOwner.
* For optimization reasons, this method only looks at elements of type java.lang.String.
* <p/>
* The parameter <code>allowIndirect</code> determines if the method should look for indirect annotations, i.e.
* annotations which have themselves been annotated by the supplied annotation name. Currently, this only allows
* one level of indirection and returns an array of [base-annotation, indirect annotation]
* <p/>
* The <code>annotationName</code> parameter is a pair of the target annotation class' fully qualified name as a
* String and as a Set. This is done for performance reasons because the Set is required by the
* {@link com.intellij.codeInsight.AnnotationUtil} utility class and allows to avoid unnecessary object constructions.
*/
@NotNull
public static PsiAnnotation[] getAnnotationFrom(PsiModifierListOwner owner,
Pair<String, ? extends Set<String>> annotationName,
boolean allowIndirect,
boolean inHierarchy) {
if (!PsiUtilEx.isLanguageAnnotationTarget(owner)) return PsiAnnotation.EMPTY_ARRAY;
return getAnnotationsFromImpl(owner, annotationName, allowIndirect, inHierarchy);
}
/**
* The parameter <code>allowIndirect</code> determines if the method should look for indirect annotations, i.e.
* annotations which have themselves been annotated by the supplied annotation name. Currently, this only allows
* one level of indirection and returns an array of [base-annotation, indirect annotation]
* <p/>
* The <code>annotationName</code> parameter is a pair of the target annotation class' fully qualified name as a
* String and as a Set. This is done for performance reasons because the Set is required by the
* {@link com.intellij.codeInsight.AnnotationUtil} utility class and allows to avoid unnecessary object constructions.
*/
public static PsiAnnotation[] getAnnotationsFromImpl(PsiModifierListOwner owner,
Pair<String, ? extends Set<String>> annotationName,
boolean allowIndirect, boolean inHierarchy) {
final PsiAnnotation directAnnotation = inHierarchy?
AnnotationUtil.findAnnotationInHierarchy(owner, annotationName.second) :
AnnotationUtil.findAnnotation(owner, annotationName.second);
if (directAnnotation != null) {
return new PsiAnnotation[]{directAnnotation};
}
if (allowIndirect) {
final PsiAnnotation[] annotations = getAnnotations(owner, inHierarchy);
for (PsiAnnotation annotation : annotations) {
PsiJavaCodeReferenceElement nameReference = annotation.getNameReferenceElement();
if (nameReference == null) continue;
PsiElement resolved = nameReference.resolve();
if (resolved instanceof PsiClass) {
final PsiAnnotation psiAnnotation = AnnotationUtil.findAnnotationInHierarchy((PsiModifierListOwner)resolved, annotationName.second);
if (psiAnnotation != null) {
return new PsiAnnotation[]{psiAnnotation, annotation};
}
}
}
}
return PsiAnnotation.EMPTY_ARRAY;
}
public static PsiAnnotation[] getAnnotationFrom(@NotNull PsiModifierListOwner owner,
@NotNull Pair<String, ? extends Set<String>> annotationName,
boolean allowIndirect) {
return getAnnotationFrom(owner, annotationName, allowIndirect, true);
}
/**
* Calculates the value of the annotation's attribute referenced by the <code>attr</code> parameter by trying to
* find the attribute in the supplied list of annotations and calculating the constant value for the first attribute
* it finds.
*/
@Nullable
public static String calcAnnotationValue(PsiAnnotation[] annotation, @NonNls String attr) {
for (PsiAnnotation psiAnnotation : annotation) {
final String value = calcAnnotationValue(psiAnnotation, attr);
if (value != null) return value;
}
return null;
}
@Nullable
public static String calcAnnotationValue(@NotNull PsiAnnotation annotation, @NonNls String attr) {
PsiElement value = annotation.findAttributeValue(attr);
Object o = CONSTANT_EVALUATION_HELPER.computeConstantExpression(value);
if (o instanceof String) {
return (String)o;
}
return null;
}
/**
* Returns all annotations for <code>listOwner</code>, possibly walking up the method hierarchy.
*
* @see com.intellij.codeInsight.AnnotationUtil#isAnnotated(com.intellij.psi.PsiModifierListOwner, java.lang.String, boolean)
*/
private static PsiAnnotation[] getAnnotations(@NotNull final PsiModifierListOwner listOwner, final boolean inHierarchy) {
final PsiModifierList modifierList = listOwner.getModifierList();
if (!inHierarchy) {
return modifierList == null ? PsiAnnotation.EMPTY_ARRAY : modifierList.getAnnotations();
}
return CachedValuesManager.getCachedValue(listOwner, new CachedValueProvider<PsiAnnotation[]>() {
@Nullable
@Override
public Result<PsiAnnotation[]> compute() {
return Result.create(getHierarchyAnnotations(listOwner), PsiModificationTracker.MODIFICATION_COUNT);
}
});
}
private static PsiAnnotation[] getHierarchyAnnotations(PsiModifierListOwner listOwner) {
final Set<PsiAnnotation> all = new HashSet<PsiAnnotation>() {
public boolean add(PsiAnnotation o) {
// don't overwrite "higher level" annotations
return !contains(o) && super.add(o);
}
};
PsiModifierList modifierList = listOwner.getModifierList();
if (modifierList != null) {
ContainerUtil.addAll(all, modifierList.getAnnotations());
}
for (PsiModifierListOwner superOwner : AnnotationUtil.getSuperAnnotationOwners(listOwner)) {
modifierList = superOwner.getModifierList();
if (modifierList != null) {
ContainerUtil.addAll(all, modifierList.getAnnotations());
}
}
return all.isEmpty() ? PsiAnnotation.EMPTY_ARRAY : all.toArray(new PsiAnnotation[all.size()]);
}
}
| |
package com.composum.nodes.setup.impl;
import org.apache.jackrabbit.vault.fs.api.ImportMode;
import org.apache.jackrabbit.vault.fs.io.ImportOptions;
import org.apache.jackrabbit.vault.packaging.JcrPackage;
import org.apache.jackrabbit.vault.packaging.JcrPackageManager;
import org.apache.jackrabbit.vault.packaging.PackageException;
import org.apache.jackrabbit.vault.packaging.PackageId;
import org.apache.jackrabbit.vault.packaging.Packaging;
import org.apache.sling.api.resource.LoginException;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.commons.classloader.DynamicClassLoaderManager;
import org.apache.sling.event.jobs.Job;
import org.apache.sling.event.jobs.JobManager;
import org.apache.sling.event.jobs.consumer.JobConsumer;
import org.jetbrains.annotations.NotNull;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleException;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jcr.Node;
import javax.jcr.NodeIterator;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.osgi.framework.Bundle.STARTING;
import static org.osgi.framework.Bundle.UNINSTALLED;
/**
* Ensures orderly removal of obsolete (e.g. Composum Core V1) bundles / configurations and replacing / updating
* new Composum Nore bundles and replacing the content in {@value #NODES_CONTENT_PATH}.
* The service registers itself as a job (to be executed after the bundle is active) for this. The process is as follows:
* <ol>
* <li>Remove all obsolete and to be replaced bundles / configurations from {@value #NODES_BUNDLES_PATH}</li>
* <li>Moves all bundles in {@value #SETUP_BUNDLES_PATH} to {@value #NODES_BUNDLES_PATH} to be picked up by the OSGI installer and then removes {@value #SETUP_BUNDLES_PATH}</li>
* <li>Replaces the content of {@value #NODES_CONTENT_PATH} with the content of {@value #SETUP_NODES_FOLDER}</li>
* <li>Installs the composum-nodes-jslibs-package again to make sure it wasn't trashed by a bundle removal</li>
* <li>Removes the bundle containing this and it's LoginAdminWhitelist configuration (even if something previous failed).</li>
* </ol>
* It's sensible to check whether there were any errors logged after this (search for "Composum Nodes setup").
*/
@Component(
immediate = true,
property = {
JobConsumer.PROPERTY_TOPICS + "=" + NodesSetupService.JOB_TOPIC
}
)
public class NodesSetupService implements JobConsumer {
private static final Logger LOG = LoggerFactory.getLogger(NodesSetupService.class);
public static final String JOB_TOPIC = "com/composum/nodes/setup";
public static final String SETUP_EXT = ".setup";
public static final String INSTALL_FOLDER = "install";
public static final String UPLOAD_FOLDER = "upload";
public static final String NODES_CONTENT_PATH = "/libs/composum/nodes";
/** {@value #NODES_BUNDLES_PATH} - the path where artefacts to install are put for automatical installation and from where obsolete / replaced bundles are deleted. */
public static final String NODES_BUNDLES_PATH = NODES_CONTENT_PATH + "/" + INSTALL_FOLDER;
/** {@value #SETUP_NODES_FOLDER} - temporary place for new content to replace the stuff in {@value #NODES_CONTENT_PATH} . */
public static final String SETUP_NODES_FOLDER = NODES_CONTENT_PATH + SETUP_EXT;
/** {@value #SETUP_BUNDLES_PATH} a place the sling package puts the new bundles temporarily, to be moved by this service. */
public static final String SETUP_BUNDLES_PATH = SETUP_NODES_FOLDER + "/" + UPLOAD_FOLDER;
public static final String NODES_PACKAGES_PATH = "com/composum/nodes/";
public static final Pattern VERSION_PATTERN = Pattern.compile(
"^(.*/)?composum-nodes-package-setup-bundle-(?<version>(.+))\\.jar$");
/** Matches the artifacts belonging to this setup bundle, which have to be removed later (used only for installation). */
public static final Pattern[] INSTALL_ARTIFACTS = new Pattern[]{
Pattern.compile("^composum-nodes-package-setup-bundle-(.+)\\.jar$"),
Pattern.compile("^.*\\.LoginAdminWhitelist\\.fragment-composum_nodes_setup\\.config$")
};
/** Matches the old (Composum Core) bundles to be removed and the new (Composum Nodes) bundles to be replaced. */
public static final Pattern[] BUNDLES_TO_UNINSTALL = new Pattern[]{
Pattern.compile("^(.*/)?composum-nodes-jslibs-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-sling-core-jslibs-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-nodes-usermgr-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-sling-user-management-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-nodes-pckgmgr-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-sling-package-manager-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-nodes-console-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-sling-core-console-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-nodes-config-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-sling-core-config-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-nodes-commons-.*\\.jar$"),
Pattern.compile("^(.*/)?composum-sling-core-commons-.*\\.jar$")
};
protected static final String REP_POLICY = "rep:policy";
protected static final String REP_ACCESS_CONTROLLABLE = "rep:AccessControllable";
@Reference
private DynamicClassLoaderManager dynamicClassLoaderManager;
@Reference
private ResourceResolverFactory resolverFactory;
@Reference
protected JobManager jobManager;
@Reference
private Packaging packaging;
private volatile BundleContext bundleContext;
@Activate
protected void activate(BundleContext bundleContext) {
this.bundleContext = bundleContext;
LOG.info("Composum Nodes setup.activate...");
Job job = jobManager.createJob(JOB_TOPIC).add();
LOG.info("Composum Nodes setup: job created: [ {} ]", job.getId());
}
@Deactivate
protected void deactivate() {
LOG.info("Composum Nodes setup.deactivate...");
bundleContext = null;
}
@Override
public JobResult process(Job job) {
wait(1);
LOG.info("\n\nComposum Nodes setup: processing...\n");
try (ResourceResolver resolver = resolverFactory.getAdministrativeResourceResolver(null)) {
Session session = resolver.adaptTo(Session.class);
if (session != null) {
BundleContext origBundleContext = this.bundleContext;
try {
// check for the setup folder and perform installation if found
Node nodesSetupFolder = session.getNode(SETUP_NODES_FOLDER);
if (removeNodesBundles(session)) {
wait(3); // wait if bundles has been removed; maybe this bundle is also restarted...
}
// check for new activation of the setup bundle during bundle removal
if (origBundleContext.equals(this.bundleContext)) {
setupNodesBundlesAndContent(session);
session.save();
LOG.info("\n\nComposum Nodes setup: SUCCESS.\n");
} else { // the bundle might have been restarted during the uninstallation.
LOG.info("\n\nComposum Nodes setup: process stopped - waiting for a following job.\n");
}
} catch (PathNotFoundException ignore) {
LOG.info("\n\nComposum Nodes setup: no Nodes install folder found [ {} ]\n", SETUP_NODES_FOLDER);
} catch (RepositoryException | PackageException | IOException | RuntimeException ex) {
LOG.error("Composum Nodes setup: " + ex.getMessage(), ex);
LOG.error("\n\nComposum Nodes setup: process FAILED! Please retry or continue manually.\n");
} finally {
if (origBundleContext.equals(this.bundleContext)) { // only remove ourselves if we haven't been restarted
try {
// final removal of the Nodes setup artifacts (setup-bundle and the configuration)
session.refresh(false); // remove any half-committed changes that might block this
removeInstallArtifacts(session);
session.save();
} catch (RepositoryException exx) {
LOG.error("Composum Nodes setup: setup bundle + conf could not be removed - please remove manually.", exx);
}
}
}
} else {
LOG.error("Composum Nodes setup: can't adapt to Session, failed!");
}
} catch (LoginException lex) {
LOG.error("Composum Nodes setup: cannot get administrative resolver - {}" + lex.getMessage());
LOG.error("\n\nComposum Nodes setup: process failed!\n");
}
return JobResult.OK; // always OK, no retry!
}
protected boolean removeNodesBundles(@NotNull final Session session)
throws RepositoryException {
boolean bundlesRemoved = false;
try {
// check for the setup folder and perform installation only if found
Node nodesSetupFolder = session.getNode(SETUP_NODES_FOLDER);
// remove existing bundles and install new bundles
bundlesRemoved = removeUploadedBundles(session);
if (bundlesRemoved) {
wait(3);
}
bundlesRemoved = uninstallBundles() || bundlesRemoved;
} catch (PathNotFoundException ignore) {
LOG.info("\n\nComposum Nodes setup: no Nodes install folder found [ {} ]\n", SETUP_NODES_FOLDER);
}
return bundlesRemoved;
}
/**
* setup new Nodes bundles and content installed in the setup folder during package install
*
* @param session the JCR session of the executed setup job
* @throws RepositoryException ...
*/
protected void setupNodesBundlesAndContent(@NotNull final Session session)
throws RepositoryException, PackageException, IOException {
try {
// check for the setup folder and perform installation if found
Node nodesSetupFolder = session.getNode(SETUP_NODES_FOLDER);
try {
Node nodesSetupBundles = session.getNode(SETUP_BUNDLES_PATH);
// setup of the new nodes bundles (move from 'upload' to the 'install' folder)
NodeIterator iterator = nodesSetupBundles.getNodes();
while (iterator.hasNext()) {
Node node = iterator.nextNode();
String targetPath = NODES_BUNDLES_PATH + "/" + node.getName();
LOG.info("Composum Nodes setup: installing node [ {} ]", targetPath);
session.move(node.getPath(), targetPath);
}
session.save();
// remove the empty 'upload' folder
nodesSetupBundles.remove();
session.save();
} catch (PathNotFoundException ignore) {
}
moveAcl(session.getNode(NODES_CONTENT_PATH), nodesSetupFolder, false);
// remove the content resources
removeNodesContent(session);
// setup of the new Nodes content resources (move from 'nodes.setup' to 'nodes')
NodeIterator iterator = nodesSetupFolder.getNodes();
while (iterator.hasNext()) {
Node node = iterator.nextNode();
String targetPath = NODES_CONTENT_PATH + "/" + node.getName();
LOG.info("Composum Nodes setup: installing node [ {} ]", targetPath);
session.move(node.getPath(), targetPath);
}
session.save();
// remove the empty 'nodes.setup' folder
nodesSetupFolder.remove();
session.save();
// it's possible that 'inital content' has been deleted triggered by a bundle removal
// reinstall the libraries content subpackage if a bundle has been removed...
Matcher matcher = VERSION_PATTERN.matcher(bundleContext.getBundle().getLocation());
if (matcher.matches()) {
wait(3);
String version = matcher.group("version");
installPackage(session, NODES_PACKAGES_PATH + "composum-nodes-jslibs-package-" + version + ".zip");
} else {
LOG.warn("Composum Nodes setup: Couldn't determine our version from {}", bundleContext.getBundle().getLocation());
}
} catch (PathNotFoundException ignore) {
LOG.info("\n\nComposum Nodes setup: no Nodes install folder found [ {} ]\n", SETUP_NODES_FOLDER);
}
}
/**
* This moves any ACL restrictions from the subnodes of src to the nodes of target. This violates the constraints for the
* src since the rep:policy nodes are removed, but src will be deleted, anyway.
*/
protected void moveAcl(Node src, Node target, boolean isSubnode) throws RepositoryException {
for (NodeIterator it = src.getNodes(); it.hasNext(); ) {
Node srcChild = it.nextNode();
String name = srcChild.getName();
if (!REP_POLICY.equals(name) && target.hasNode(name)) {
Node targetChild = target.getNode(name);
moveAcl(srcChild, targetChild, true);
}
}
if (isSubnode && src.isNodeType(REP_ACCESS_CONTROLLABLE)) {
target.addMixin(REP_ACCESS_CONTROLLABLE);
LOG.info("Composum Nodes setup: keeping ACL on {}", src.getPath());
src.getSession().move(src.getNode(REP_POLICY).getPath(), target.getPath() + '/' + REP_POLICY);
}
}
/**
* (re-)install a package or subpackage if subpackages are specified
*
* @param session the current session
* @param packagePath the relative path of the package to install
* @param subpckgPatterns a set of subpackage name patterns, each matching subpackage will be installed
*/
protected void installPackage(@NotNull final Session session, @NotNull final String packagePath,
@NotNull final Pattern... subpckgPatterns)
throws RepositoryException, PackageException, IOException {
JcrPackageManager manager = packaging.getPackageManager(session);
Node pckgRoot = manager.getPackageRoot();
Node pckgNode = pckgRoot.getNode(packagePath);
if (pckgNode != null) {
JcrPackage jcrPackage = manager.open(pckgNode, true);
if (jcrPackage != null) {
ImportOptions importOptions = createPackageImportOptions();
if (subpckgPatterns == null || subpckgPatterns.length < 1) {
LOG.info("Composum Nodes setup: package install [ {} ]", packagePath);
jcrPackage.install(importOptions);
} else {
PackageId[] subpackages = jcrPackage.extractSubpackages(importOptions);
for (PackageId pckgId : subpackages) {
JcrPackage subpackage = manager.open(pckgId);
if (subpackage != null) {
for (Pattern pattern : subpckgPatterns) {
Node subpckgNode = subpackage.getNode();
if (subpckgNode != null) {
String subpckgName = subpckgNode.getName();
if (pattern.matcher(subpckgName).matches()) {
LOG.info("Composum Nodes setup: subpackage install [ {} ]", subpckgName);
subpackage.install(importOptions);
}
}
}
}
}
}
}
}
}
protected ImportOptions createPackageImportOptions() {
ImportOptions options = new ImportOptions();
options.setDryRun(false);
options.setAutoSaveThreshold(1024);
options.setImportMode(ImportMode.REPLACE);
options.setHookClassLoader(dynamicClassLoaderManager.getDynamicClassLoader());
return options;
}
/**
* remove the Nodes modules content resources to prepare a clean install
*/
protected void removeNodesContent(@NotNull final Session session)
throws RepositoryException {
try {
Node nodesContent = session.getNode(NODES_CONTENT_PATH);
// remove existing content
NodeIterator iterator = nodesContent.getNodes();
while (iterator.hasNext()) {
Node node = iterator.nextNode();
if (!INSTALL_FOLDER.equals(node.getName()) && !REP_POLICY.equals(node.getName())) {
LOG.info("Composum Nodes setup: removing node [ {} ]", node.getPath());
node.remove();
}
}
session.save();
} catch (PathNotFoundException ignore) {
}
}
/**
* removes all matching artifacts in the bundle install repository folder (removes bundle files uploaded before)
*
* @return true if at least one artifact has been removed
*/
protected boolean removeUploadedBundles(@NotNull final Session session) {
boolean result = false;
try {
Node bundlesFolder = session.getNode(NODES_BUNDLES_PATH);
NodeIterator iterator = bundlesFolder.getNodes();
while (iterator.hasNext()) {
try {
Node bundleNode = iterator.nextNode();
if (!isInstallArtifact(bundleNode.getName())) {
LOG.info("Composum Nodes setup: removing node [ {} ]", bundleNode.getPath());
bundleNode.remove();
result = true;
}
} catch (RepositoryException ex) {
LOG.error("Composum Nodes setup: could not remove bundles / configs from " + NODES_BUNDLES_PATH, ex);
}
}
session.save();
} catch (PathNotFoundException ignore) {
} catch (RepositoryException ex) {
LOG.error("Composum Nodes setup: could not remove bundles / configs from " + NODES_BUNDLES_PATH, ex);
}
return result;
}
/**
* uninstalls all bundles matching to the unistall pattern list (removes bundles installed before)
*
* @return true if at least one bundle has been uninstalled
*/
protected boolean uninstallBundles() {
boolean result = false;
for (Pattern pattern : BUNDLES_TO_UNINSTALL) {
/* if setup bundle is deactivated while job processing the bundle content is 'null'! */
if (bundleContext != null) {
for (Bundle bundle : bundleContext.getBundles()) {
Matcher matcher = pattern.matcher(bundle.getLocation());
if (matcher.matches()) {
try {
int state = bundle.getState();
if (state != UNINSTALLED) {
result = true;
LOG.info("Composum Nodes setup: uninstalling bundle [ {} ]", bundle.getLocation());
if (state == Bundle.ACTIVE || state == STARTING) {
bundle.stop();
}
bundle.uninstall();
}
} catch (BundleException | IllegalStateException ex) {
LOG.error("Composum Nodes setup: trouble uninstalling bundle {}", bundle.getLocation(), ex);
}
}
}
}
}
return result;
}
/**
* @return 'true' if the name references an artifact od the setup itself
*/
protected boolean isInstallArtifact(@NotNull final String name) {
for (Pattern pattern : INSTALL_ARTIFACTS) {
Matcher matcher = pattern.matcher(name);
if (matcher.matches()) {
return true;
}
}
return false;
}
/**
* the final removal of the install artifacts (setup-bundle and the related configuration)
*/
protected void removeInstallArtifacts(@NotNull final Session session)
throws RepositoryException {
try {
Node bundlesFolder = session.getNode(NODES_BUNDLES_PATH);
NodeIterator iterator = bundlesFolder.getNodes();
while (iterator.hasNext()) {
try {
Node bundleNode = iterator.nextNode();
if (isInstallArtifact(bundleNode.getName())) {
LOG.info("Composum Nodes setup: removing install node [ {} ]", bundleNode.getPath());
bundleNode.remove();
}
} catch (RepositoryException ex) {
LOG.error("Composum Nodes setup: trouble cleaning up " + NODES_BUNDLES_PATH,ex);
}
}
} catch (PathNotFoundException ignore) {
}
}
protected void wait(int seconds) {
try {
TimeUnit.SECONDS.sleep(seconds);
} catch (InterruptedException ex) {
LOG.warn(ex.toString());
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ecs.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class RegisterContainerInstanceRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster with
* which to register your container instance. If you do not specify a
* cluster, the default cluster is assumed..
* </p>
*/
private String cluster;
/**
* <p>
* The instance identity document for the EC2 instance to register. This
* document can be found by running the following command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
* </p>
*/
private String instanceIdentityDocument;
/**
* <p>
* The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following command
* from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
* </p>
*/
private String instanceIdentityDocumentSignature;
/**
* <p>
* The resources available on the instance.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Resource> totalResources;
/**
* <p>
* The version information for the Amazon ECS container agent and Docker
* daemon running on the container instance.
* </p>
*/
private VersionInfo versionInfo;
/**
* <p>
* The Amazon Resource Name (ARN) of the container instance (if it was
* previously registered).
* </p>
*/
private String containerInstanceArn;
/**
* <p>
* The container instance attributes that this container instance supports.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Attribute> attributes;
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster with
* which to register your container instance. If you do not specify a
* cluster, the default cluster is assumed..
* </p>
*
* @param cluster
* The short name or full Amazon Resource Name (ARN) of the cluster
* with which to register your container instance. If you do not
* specify a cluster, the default cluster is assumed..
*/
public void setCluster(String cluster) {
this.cluster = cluster;
}
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster with
* which to register your container instance. If you do not specify a
* cluster, the default cluster is assumed..
* </p>
*
* @return The short name or full Amazon Resource Name (ARN) of the cluster
* with which to register your container instance. If you do not
* specify a cluster, the default cluster is assumed..
*/
public String getCluster() {
return this.cluster;
}
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster with
* which to register your container instance. If you do not specify a
* cluster, the default cluster is assumed..
* </p>
*
* @param cluster
* The short name or full Amazon Resource Name (ARN) of the cluster
* with which to register your container instance. If you do not
* specify a cluster, the default cluster is assumed..
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withCluster(String cluster) {
setCluster(cluster);
return this;
}
/**
* <p>
* The instance identity document for the EC2 instance to register. This
* document can be found by running the following command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
* </p>
*
* @param instanceIdentityDocument
* The instance identity document for the EC2 instance to register.
* This document can be found by running the following command from
* the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
*/
public void setInstanceIdentityDocument(String instanceIdentityDocument) {
this.instanceIdentityDocument = instanceIdentityDocument;
}
/**
* <p>
* The instance identity document for the EC2 instance to register. This
* document can be found by running the following command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
* </p>
*
* @return The instance identity document for the EC2 instance to register.
* This document can be found by running the following command from
* the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
*/
public String getInstanceIdentityDocument() {
return this.instanceIdentityDocument;
}
/**
* <p>
* The instance identity document for the EC2 instance to register. This
* document can be found by running the following command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
* </p>
*
* @param instanceIdentityDocument
* The instance identity document for the EC2 instance to register.
* This document can be found by running the following command from
* the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/document/</code>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withInstanceIdentityDocument(
String instanceIdentityDocument) {
setInstanceIdentityDocument(instanceIdentityDocument);
return this;
}
/**
* <p>
* The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following command
* from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
* </p>
*
* @param instanceIdentityDocumentSignature
* The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following
* command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
*/
public void setInstanceIdentityDocumentSignature(
String instanceIdentityDocumentSignature) {
this.instanceIdentityDocumentSignature = instanceIdentityDocumentSignature;
}
/**
* <p>
* The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following command
* from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
* </p>
*
* @return The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following
* command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
*/
public String getInstanceIdentityDocumentSignature() {
return this.instanceIdentityDocumentSignature;
}
/**
* <p>
* The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following command
* from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
* </p>
*
* @param instanceIdentityDocumentSignature
* The instance identity document signature for the EC2 instance to
* register. This signature can be found by running the following
* command from the instance:
* <code>curl http://169.254.169.254/latest/dynamic/instance-identity/signature/</code>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withInstanceIdentityDocumentSignature(
String instanceIdentityDocumentSignature) {
setInstanceIdentityDocumentSignature(instanceIdentityDocumentSignature);
return this;
}
/**
* <p>
* The resources available on the instance.
* </p>
*
* @return The resources available on the instance.
*/
public java.util.List<Resource> getTotalResources() {
if (totalResources == null) {
totalResources = new com.amazonaws.internal.SdkInternalList<Resource>();
}
return totalResources;
}
/**
* <p>
* The resources available on the instance.
* </p>
*
* @param totalResources
* The resources available on the instance.
*/
public void setTotalResources(java.util.Collection<Resource> totalResources) {
if (totalResources == null) {
this.totalResources = null;
return;
}
this.totalResources = new com.amazonaws.internal.SdkInternalList<Resource>(
totalResources);
}
/**
* <p>
* The resources available on the instance.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTotalResources(java.util.Collection)} or
* {@link #withTotalResources(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param totalResources
* The resources available on the instance.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withTotalResources(
Resource... totalResources) {
if (this.totalResources == null) {
setTotalResources(new com.amazonaws.internal.SdkInternalList<Resource>(
totalResources.length));
}
for (Resource ele : totalResources) {
this.totalResources.add(ele);
}
return this;
}
/**
* <p>
* The resources available on the instance.
* </p>
*
* @param totalResources
* The resources available on the instance.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withTotalResources(
java.util.Collection<Resource> totalResources) {
setTotalResources(totalResources);
return this;
}
/**
* <p>
* The version information for the Amazon ECS container agent and Docker
* daemon running on the container instance.
* </p>
*
* @param versionInfo
* The version information for the Amazon ECS container agent and
* Docker daemon running on the container instance.
*/
public void setVersionInfo(VersionInfo versionInfo) {
this.versionInfo = versionInfo;
}
/**
* <p>
* The version information for the Amazon ECS container agent and Docker
* daemon running on the container instance.
* </p>
*
* @return The version information for the Amazon ECS container agent and
* Docker daemon running on the container instance.
*/
public VersionInfo getVersionInfo() {
return this.versionInfo;
}
/**
* <p>
* The version information for the Amazon ECS container agent and Docker
* daemon running on the container instance.
* </p>
*
* @param versionInfo
* The version information for the Amazon ECS container agent and
* Docker daemon running on the container instance.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withVersionInfo(
VersionInfo versionInfo) {
setVersionInfo(versionInfo);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the container instance (if it was
* previously registered).
* </p>
*
* @param containerInstanceArn
* The Amazon Resource Name (ARN) of the container instance (if it
* was previously registered).
*/
public void setContainerInstanceArn(String containerInstanceArn) {
this.containerInstanceArn = containerInstanceArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the container instance (if it was
* previously registered).
* </p>
*
* @return The Amazon Resource Name (ARN) of the container instance (if it
* was previously registered).
*/
public String getContainerInstanceArn() {
return this.containerInstanceArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the container instance (if it was
* previously registered).
* </p>
*
* @param containerInstanceArn
* The Amazon Resource Name (ARN) of the container instance (if it
* was previously registered).
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withContainerInstanceArn(
String containerInstanceArn) {
setContainerInstanceArn(containerInstanceArn);
return this;
}
/**
* <p>
* The container instance attributes that this container instance supports.
* </p>
*
* @return The container instance attributes that this container instance
* supports.
*/
public java.util.List<Attribute> getAttributes() {
if (attributes == null) {
attributes = new com.amazonaws.internal.SdkInternalList<Attribute>();
}
return attributes;
}
/**
* <p>
* The container instance attributes that this container instance supports.
* </p>
*
* @param attributes
* The container instance attributes that this container instance
* supports.
*/
public void setAttributes(java.util.Collection<Attribute> attributes) {
if (attributes == null) {
this.attributes = null;
return;
}
this.attributes = new com.amazonaws.internal.SdkInternalList<Attribute>(
attributes);
}
/**
* <p>
* The container instance attributes that this container instance supports.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setAttributes(java.util.Collection)} or
* {@link #withAttributes(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param attributes
* The container instance attributes that this container instance
* supports.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withAttributes(
Attribute... attributes) {
if (this.attributes == null) {
setAttributes(new com.amazonaws.internal.SdkInternalList<Attribute>(
attributes.length));
}
for (Attribute ele : attributes) {
this.attributes.add(ele);
}
return this;
}
/**
* <p>
* The container instance attributes that this container instance supports.
* </p>
*
* @param attributes
* The container instance attributes that this container instance
* supports.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RegisterContainerInstanceRequest withAttributes(
java.util.Collection<Attribute> attributes) {
setAttributes(attributes);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCluster() != null)
sb.append("Cluster: " + getCluster() + ",");
if (getInstanceIdentityDocument() != null)
sb.append("InstanceIdentityDocument: "
+ getInstanceIdentityDocument() + ",");
if (getInstanceIdentityDocumentSignature() != null)
sb.append("InstanceIdentityDocumentSignature: "
+ getInstanceIdentityDocumentSignature() + ",");
if (getTotalResources() != null)
sb.append("TotalResources: " + getTotalResources() + ",");
if (getVersionInfo() != null)
sb.append("VersionInfo: " + getVersionInfo() + ",");
if (getContainerInstanceArn() != null)
sb.append("ContainerInstanceArn: " + getContainerInstanceArn()
+ ",");
if (getAttributes() != null)
sb.append("Attributes: " + getAttributes());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RegisterContainerInstanceRequest == false)
return false;
RegisterContainerInstanceRequest other = (RegisterContainerInstanceRequest) obj;
if (other.getCluster() == null ^ this.getCluster() == null)
return false;
if (other.getCluster() != null
&& other.getCluster().equals(this.getCluster()) == false)
return false;
if (other.getInstanceIdentityDocument() == null
^ this.getInstanceIdentityDocument() == null)
return false;
if (other.getInstanceIdentityDocument() != null
&& other.getInstanceIdentityDocument().equals(
this.getInstanceIdentityDocument()) == false)
return false;
if (other.getInstanceIdentityDocumentSignature() == null
^ this.getInstanceIdentityDocumentSignature() == null)
return false;
if (other.getInstanceIdentityDocumentSignature() != null
&& other.getInstanceIdentityDocumentSignature().equals(
this.getInstanceIdentityDocumentSignature()) == false)
return false;
if (other.getTotalResources() == null
^ this.getTotalResources() == null)
return false;
if (other.getTotalResources() != null
&& other.getTotalResources().equals(this.getTotalResources()) == false)
return false;
if (other.getVersionInfo() == null ^ this.getVersionInfo() == null)
return false;
if (other.getVersionInfo() != null
&& other.getVersionInfo().equals(this.getVersionInfo()) == false)
return false;
if (other.getContainerInstanceArn() == null
^ this.getContainerInstanceArn() == null)
return false;
if (other.getContainerInstanceArn() != null
&& other.getContainerInstanceArn().equals(
this.getContainerInstanceArn()) == false)
return false;
if (other.getAttributes() == null ^ this.getAttributes() == null)
return false;
if (other.getAttributes() != null
&& other.getAttributes().equals(this.getAttributes()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getCluster() == null) ? 0 : getCluster().hashCode());
hashCode = prime
* hashCode
+ ((getInstanceIdentityDocument() == null) ? 0
: getInstanceIdentityDocument().hashCode());
hashCode = prime
* hashCode
+ ((getInstanceIdentityDocumentSignature() == null) ? 0
: getInstanceIdentityDocumentSignature().hashCode());
hashCode = prime
* hashCode
+ ((getTotalResources() == null) ? 0 : getTotalResources()
.hashCode());
hashCode = prime
* hashCode
+ ((getVersionInfo() == null) ? 0 : getVersionInfo().hashCode());
hashCode = prime
* hashCode
+ ((getContainerInstanceArn() == null) ? 0
: getContainerInstanceArn().hashCode());
hashCode = prime * hashCode
+ ((getAttributes() == null) ? 0 : getAttributes().hashCode());
return hashCode;
}
@Override
public RegisterContainerInstanceRequest clone() {
return (RegisterContainerInstanceRequest) super.clone();
}
}
| |
/**
* Copyright 2013 Impetus Infotech.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.impetus.client.oraclenosql.datatypes.tests;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.persistence.Query;
import junit.framework.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.impetus.client.oraclenosql.datatypes.entities.StudentOracleNoSQLChar;
/**
* @author vivek.mishra
*
*/
public class StudentOracleNoSQLCharTest extends OracleNoSQLBase
{
private static final String keyspace = "KunderaTests";
private EntityManagerFactory emf;
@Before
public void setUp() throws Exception
{
emf = Persistence.createEntityManagerFactory("oracleNosqlDataTypeTest");
}
@After
public void tearDown() throws Exception
{
emf.close();
}
@Test
public void testExecuteUseSameEm()
{
testPersist(true);
testFindById(true);
testMerge(true);
// testFindByQuery(true);
// testNamedQueryUseSameEm(true);
testDelete(true);
}
@Test
public void testExecute()
{
testPersist(false);
testFindById(false);
testMerge(false);
// testFindByQuery(false);
// testNamedQuery(false);
testDelete(false);
}
public void testPersist(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
// Insert max value of char
StudentOracleNoSQLChar studentMax = new StudentOracleNoSQLChar();
studentMax.setAge((Short) getMaxValue(short.class));
studentMax.setId((Character) getMaxValue(char.class));
studentMax.setName((String) getMaxValue(String.class));
em.persist(studentMax);
// Insert min value of char
StudentOracleNoSQLChar studentMin = new StudentOracleNoSQLChar();
studentMin.setAge((Short) getMinValue(short.class));
studentMin.setId((Character) getMinValue(char.class));
studentMin.setName((String) getMinValue(String.class));
em.persist(studentMin);
// Insert random value of char
StudentOracleNoSQLChar student = new StudentOracleNoSQLChar();
student.setAge((Short) getRandomValue(short.class));
student.setId((Character) getRandomValue(char.class));
student.setName((String) getRandomValue(String.class));
em.persist(student);
em.close();
}
public void testFindById(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
StudentOracleNoSQLChar studentMax = em.find(StudentOracleNoSQLChar.class, getMaxValue(char.class));
Assert.assertNotNull(studentMax);
Assert.assertEquals(getMaxValue(short.class), studentMax.getAge());
Assert.assertEquals(getMaxValue(String.class), studentMax.getName());
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLChar studentMin = em.find(StudentOracleNoSQLChar.class, getMinValue(char.class));
Assert.assertNotNull(studentMin);
Assert.assertEquals(getMinValue(short.class), studentMin.getAge());
Assert.assertEquals(getMinValue(String.class), studentMin.getName());
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLChar student = em.find(StudentOracleNoSQLChar.class, getRandomValue(char.class));
Assert.assertNotNull(student);
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
em.close();
}
public void testMerge(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
StudentOracleNoSQLChar student = em.find(StudentOracleNoSQLChar.class, getMaxValue(char.class));
Assert.assertNotNull(student);
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals(getMaxValue(String.class), student.getName());
student.setName("Kuldeep");
em.merge(student);
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLChar newStudent = em.find(StudentOracleNoSQLChar.class, getMaxValue(char.class));
Assert.assertNotNull(newStudent);
Assert.assertEquals(getMaxValue(short.class), newStudent.getAge());
Assert.assertEquals("Kuldeep", newStudent.getName());
}
public void testFindByQuery(boolean useSameEm)
{
findAllQuery();
findByName();
findByAge();
findByNameAndAgeGTAndLT();
findByNameAndAgeGTEQAndLTEQ();
findByNameAndAgeGTAndLTEQ();
findByNameAndAgeWithOrClause();
findByAgeAndNameGTAndLT();
findByNameAndAGEBetween();
findByRange();
}
private void findByAgeAndNameGTAndLT()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.age = " + getMinValue(short.class)
+ " and s.name > Amresh and s.name <= " + getMaxValue(String.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
Assert.assertEquals(getMinValue(char.class), student.getId());
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByRange()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.id between ?1 and ?2";
q = em.createQuery(query);
q.setParameter(1, getMinValue(char.class));
q.setParameter(2, getMaxValue(char.class));
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(3, students.size());
int count = 0;
for (StudentOracleNoSQLChar student : students)
{
if (student.getId() == ((Character) getMaxValue(char.class)).charValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else if (student.getId() == ((Character) getMinValue(char.class)).charValue())
{
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
else
{
Assert.assertEquals(getRandomValue(char.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(3, count);
em.close();
}
private void findByNameAndAgeWithOrClause()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.name = Kuldeep and s.age > " + getMinValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
Assert.assertEquals(getMaxValue(char.class), student.getId());
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByNameAndAgeGTAndLTEQ()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.name = Kuldeep and s.age > " + getMinValue(short.class)
+ " and s.age <= " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
Assert.assertEquals(getMaxValue(char.class), student.getId());
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
public void testNamedQueryUseSameEm(boolean useSameEm)
{
updateNamed(true);
deleteNamed(true);
}
public void testNamedQuery(boolean useSameEm)
{
updateNamed(false);
deleteNamed(false);
}
public void testDelete(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
StudentOracleNoSQLChar studentMax = em.find(StudentOracleNoSQLChar.class, getMaxValue(char.class));
Assert.assertNotNull(studentMax);
Assert.assertEquals(getMaxValue(short.class), studentMax.getAge());
Assert.assertEquals("Kuldeep", studentMax.getName());
em.remove(studentMax);
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
studentMax = em.find(StudentOracleNoSQLChar.class, getMaxValue(char.class));
Assert.assertNull(studentMax);
em.close();
}
/**
*
*/
private void deleteNamed(boolean useSameEm)
{
String deleteQuery = "Delete From StudentOracleNoSQLChar s where s.name=Vivek";
EntityManager em = emf.createEntityManager();
Query q = em.createQuery(deleteQuery);
q.executeUpdate();
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLChar newStudent = em.find(StudentOracleNoSQLChar.class, getRandomValue(char.class));
Assert.assertNull(newStudent);
em.close();
}
/**
* @return
*/
private void updateNamed(boolean useSameEm)
{
EntityManager em = emf.createEntityManager();
String updateQuery = "Update StudentOracleNoSQLChar s SET s.name=Vivek where s.name=Amresh";
Query q = em.createQuery(updateQuery);
q.executeUpdate();
if (!useSameEm)
{
em.close();
em = emf.createEntityManager();
}
StudentOracleNoSQLChar newStudent = em.find(StudentOracleNoSQLChar.class, getRandomValue(char.class));
Assert.assertNotNull(newStudent);
Assert.assertEquals(getRandomValue(short.class), newStudent.getAge());
Assert.assertEquals("Vivek", newStudent.getName());
em.close();
}
private void findByNameAndAGEBetween()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.name = Amresh and s.age between "
+ getMinValue(short.class) + " and " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
Assert.assertEquals(getRandomValue(char.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByNameAndAgeGTAndLT()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.name = Amresh and s.age > " + getMinValue(short.class)
+ " and s.age < " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
Assert.assertEquals(getRandomValue(char.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
private void findByNameAndAgeGTEQAndLTEQ()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.name = Kuldeep and s.age >= "
+ getMinValue(short.class) + " and s.age <= " + getMaxValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(2, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
if (student.getId() == ((Character) getMaxValue(char.class)).charValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else
{
Assert.assertEquals(getMinValue(char.class), student.getId());
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(2, count);
em.close();
}
private void findByAge()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.age = " + getRandomValue(short.class);
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(1, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
Assert.assertEquals(getRandomValue(char.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
Assert.assertEquals(1, count);
em.close();
}
/**
*
*/
private void findByName()
{
EntityManager em;
String query;
Query q;
List<StudentOracleNoSQLChar> students;
int count;
em = emf.createEntityManager();
query = "Select s From StudentOracleNoSQLChar s where s.name = Kuldeep";
q = em.createQuery(query);
students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(2, students.size());
count = 0;
for (StudentOracleNoSQLChar student : students)
{
if (student.getId() == ((Character) getMaxValue(char.class)).charValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else
{
Assert.assertEquals(getMinValue(char.class), student.getId());
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(2, count);
em.close();
}
/**
*
*/
private void findAllQuery()
{
EntityManager em = emf.createEntityManager();
// Selet all query.
String query = "Select s From StudentOracleNoSQLChar s ";
Query q = em.createQuery(query);
List<StudentOracleNoSQLChar> students = q.getResultList();
Assert.assertNotNull(students);
Assert.assertEquals(3, students.size());
int count = 0;
for (StudentOracleNoSQLChar student : students)
{
if (student.getId() == ((Character) getMaxValue(char.class)).charValue())
{
Assert.assertEquals(getMaxValue(short.class), student.getAge());
Assert.assertEquals("Kuldeep", student.getName());
count++;
}
else if (student.getId() == ((Character) getMinValue(char.class)).charValue())
{
Assert.assertEquals(getMinValue(short.class), student.getAge());
Assert.assertEquals(getMinValue(String.class), student.getName());
count++;
}
else
{
Assert.assertEquals(getRandomValue(char.class), student.getId());
Assert.assertEquals(getRandomValue(short.class), student.getAge());
Assert.assertEquals(getRandomValue(String.class), student.getName());
count++;
}
}
Assert.assertEquals(3, count);
em.close();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.file;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.avatica.util.DateTimeUtils;
import org.apache.calcite.linq4j.Enumerator;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.ImmutableIntList;
import org.apache.calcite.util.ImmutableNullableList;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Source;
import org.apache.commons.lang3.time.FastDateFormat;
import au.com.bytecode.opencsv.CSVReader;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicBoolean;
/** Enumerator that reads from a CSV file.
*
* @param <E> Row type
*/
public class CsvEnumerator<E> implements Enumerator<E> {
private final CSVReader reader;
private final List<String> filterValues;
private final AtomicBoolean cancelFlag;
private final RowConverter<E> rowConverter;
private E current;
private static final FastDateFormat TIME_FORMAT_DATE;
private static final FastDateFormat TIME_FORMAT_TIME;
private static final FastDateFormat TIME_FORMAT_TIMESTAMP;
static {
final TimeZone gmt = TimeZone.getTimeZone("GMT");
TIME_FORMAT_DATE = FastDateFormat.getInstance("yyyy-MM-dd", gmt);
TIME_FORMAT_TIME = FastDateFormat.getInstance("HH:mm:ss", gmt);
TIME_FORMAT_TIMESTAMP =
FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss", gmt);
}
public CsvEnumerator(Source source, AtomicBoolean cancelFlag,
List<CsvFieldType> fieldTypes, List<Integer> fields) {
//noinspection unchecked
this(source, cancelFlag, false, null,
(RowConverter<E>) converter(fieldTypes, fields));
}
public CsvEnumerator(Source source, AtomicBoolean cancelFlag, boolean stream,
String[] filterValues, RowConverter<E> rowConverter) {
this.cancelFlag = cancelFlag;
this.rowConverter = rowConverter;
this.filterValues = filterValues == null ? null
: ImmutableNullableList.copyOf(filterValues);
try {
if (stream) {
this.reader = new CsvStreamReader(source);
} else {
this.reader = openCsv(source);
}
this.reader.readNext(); // skip header row
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static RowConverter<?> converter(List<CsvFieldType> fieldTypes,
List<Integer> fields) {
if (fields.size() == 1) {
final int field = fields.get(0);
return new SingleColumnRowConverter(fieldTypes.get(field), field);
} else {
return arrayConverter(fieldTypes, fields, false);
}
}
public static RowConverter<Object[]> arrayConverter(
List<CsvFieldType> fieldTypes, List<Integer> fields, boolean stream) {
return new ArrayRowConverter(fieldTypes, fields, stream);
}
/** Deduces the names and types of a table's columns by reading the first line
* of a CSV file. */
static RelDataType deduceRowType(JavaTypeFactory typeFactory, Source source,
List<CsvFieldType> fieldTypes) {
return deduceRowType(typeFactory, source, fieldTypes, false);
}
/** Deduces the names and types of a table's columns by reading the first line
* of a CSV file. */
public static RelDataType deduceRowType(JavaTypeFactory typeFactory,
Source source, List<CsvFieldType> fieldTypes, Boolean stream) {
final List<RelDataType> types = new ArrayList<>();
final List<String> names = new ArrayList<>();
if (stream) {
names.add(FileSchemaFactory.ROWTIME_COLUMN_NAME);
types.add(typeFactory.createSqlType(SqlTypeName.TIMESTAMP));
}
try (CSVReader reader = openCsv(source)) {
String[] strings = reader.readNext();
if (strings == null) {
strings = new String[]{"EmptyFileHasNoColumns:boolean"};
}
for (String string : strings) {
final String name;
final CsvFieldType fieldType;
final int colon = string.indexOf(':');
if (colon >= 0) {
name = string.substring(0, colon);
String typeString = string.substring(colon + 1);
fieldType = CsvFieldType.of(typeString);
if (fieldType == null) {
System.out.println("WARNING: Found unknown type: "
+ typeString + " in file: " + source.path()
+ " for column: " + name
+ ". Will assume the type of column is string");
}
} else {
name = string;
fieldType = null;
}
final RelDataType type;
if (fieldType == null) {
type = typeFactory.createSqlType(SqlTypeName.VARCHAR);
} else {
type = fieldType.toType(typeFactory);
}
names.add(name);
types.add(type);
if (fieldTypes != null) {
fieldTypes.add(fieldType);
}
}
} catch (IOException e) {
// ignore
}
if (names.isEmpty()) {
names.add("line");
types.add(typeFactory.createSqlType(SqlTypeName.VARCHAR));
}
return typeFactory.createStructType(Pair.zip(names, types));
}
static CSVReader openCsv(Source source) throws IOException {
Objects.requireNonNull(source, "source");
return new CSVReader(source.reader());
}
@Override public E current() {
return current;
}
@Override public boolean moveNext() {
try {
outer:
for (;;) {
if (cancelFlag.get()) {
return false;
}
final String[] strings = reader.readNext();
if (strings == null) {
if (reader instanceof CsvStreamReader) {
try {
Thread.sleep(CsvStreamReader.DEFAULT_MONITOR_DELAY);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
continue;
}
current = null;
reader.close();
return false;
}
if (filterValues != null) {
for (int i = 0; i < strings.length; i++) {
String filterValue = filterValues.get(i);
if (filterValue != null) {
if (!filterValue.equals(strings[i])) {
continue outer;
}
}
}
}
current = rowConverter.convertRow(strings);
return true;
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override public void reset() {
throw new UnsupportedOperationException();
}
@Override public void close() {
try {
reader.close();
} catch (IOException e) {
throw new RuntimeException("Error closing CSV reader", e);
}
}
/** Returns an array of integers {0, ..., n - 1}. */
public static int[] identityList(int n) {
int[] integers = new int[n];
for (int i = 0; i < n; i++) {
integers[i] = i;
}
return integers;
}
/** Row converter.
*
* @param <E> element type */
abstract static class RowConverter<E> {
abstract E convertRow(String[] rows);
@SuppressWarnings("JdkObsolete")
protected Object convert(CsvFieldType fieldType, String string) {
if (fieldType == null) {
return string;
}
switch (fieldType) {
case BOOLEAN:
if (string.length() == 0) {
return null;
}
return Boolean.parseBoolean(string);
case BYTE:
if (string.length() == 0) {
return null;
}
return Byte.parseByte(string);
case SHORT:
if (string.length() == 0) {
return null;
}
return Short.parseShort(string);
case INT:
if (string.length() == 0) {
return null;
}
return Integer.parseInt(string);
case LONG:
if (string.length() == 0) {
return null;
}
return Long.parseLong(string);
case FLOAT:
if (string.length() == 0) {
return null;
}
return Float.parseFloat(string);
case DOUBLE:
if (string.length() == 0) {
return null;
}
return Double.parseDouble(string);
case DATE:
if (string.length() == 0) {
return null;
}
try {
Date date = TIME_FORMAT_DATE.parse(string);
return (int) (date.getTime() / DateTimeUtils.MILLIS_PER_DAY);
} catch (ParseException e) {
return null;
}
case TIME:
if (string.length() == 0) {
return null;
}
try {
Date date = TIME_FORMAT_TIME.parse(string);
return (int) date.getTime();
} catch (ParseException e) {
return null;
}
case TIMESTAMP:
if (string.length() == 0) {
return null;
}
try {
Date date = TIME_FORMAT_TIMESTAMP.parse(string);
return date.getTime();
} catch (ParseException e) {
return null;
}
case STRING:
default:
return string;
}
}
}
/** Array row converter. */
static class ArrayRowConverter extends RowConverter<Object[]> {
/** Field types. List must not be null, but any element may be null. */
private final List<CsvFieldType> fieldTypes;
private final ImmutableIntList fields;
/** Whether the row to convert is from a stream. */
private final boolean stream;
ArrayRowConverter(List<CsvFieldType> fieldTypes, List<Integer> fields,
boolean stream) {
this.fieldTypes = ImmutableNullableList.copyOf(fieldTypes);
this.fields = ImmutableIntList.copyOf(fields);
this.stream = stream;
}
@Override public Object[] convertRow(String[] strings) {
if (stream) {
return convertStreamRow(strings);
} else {
return convertNormalRow(strings);
}
}
public Object[] convertNormalRow(String[] strings) {
final Object[] objects = new Object[fields.size()];
for (int i = 0; i < fields.size(); i++) {
int field = fields.get(i);
objects[i] = convert(fieldTypes.get(field), strings[field]);
}
return objects;
}
public Object[] convertStreamRow(String[] strings) {
final Object[] objects = new Object[fields.size() + 1];
objects[0] = System.currentTimeMillis();
for (int i = 0; i < fields.size(); i++) {
int field = fields.get(i);
objects[i + 1] = convert(fieldTypes.get(field), strings[field]);
}
return objects;
}
}
/** Single column row converter. */
private static class SingleColumnRowConverter extends RowConverter<Object> {
private final CsvFieldType fieldType;
private final int fieldIndex;
private SingleColumnRowConverter(CsvFieldType fieldType, int fieldIndex) {
this.fieldType = fieldType;
this.fieldIndex = fieldIndex;
}
@Override public Object convertRow(String[] strings) {
return convert(fieldType, strings[fieldIndex]);
}
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* ResetSnapshotAttributesGroup.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* ResetSnapshotAttributesGroup bean class
*/
public class ResetSnapshotAttributesGroup
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = ResetSnapshotAttributesGroup
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/** Whenever a new property is set ensure all others are unset
* There can be only one choice and the last one wins
*/
private void clearAllSettingTrackers() {
localCreateVolumePermissionTracker = false;
}
/**
* field for CreateVolumePermission
*/
protected com.amazon.ec2.EmptyElementType localCreateVolumePermission ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localCreateVolumePermissionTracker = false ;
/**
* Auto generated getter method
* @return com.amazon.ec2.EmptyElementType
*/
public com.amazon.ec2.EmptyElementType getCreateVolumePermission(){
return localCreateVolumePermission;
}
/**
* Auto generated setter method
* @param param CreateVolumePermission
*/
public void setCreateVolumePermission(com.amazon.ec2.EmptyElementType param){
clearAllSettingTrackers();
if (param != null){
//update the setting tracker
localCreateVolumePermissionTracker = true;
} else {
localCreateVolumePermissionTracker = false;
}
this.localCreateVolumePermission=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
ResetSnapshotAttributesGroup.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":ResetSnapshotAttributesGroup",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"ResetSnapshotAttributesGroup",
xmlWriter);
}
}
if (localCreateVolumePermissionTracker){
if (localCreateVolumePermission==null){
throw new org.apache.axis2.databinding.ADBException("createVolumePermission cannot be null!!");
}
localCreateVolumePermission.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","createVolumePermission"),
factory,xmlWriter);
}
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
if (localCreateVolumePermissionTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"createVolumePermission"));
if (localCreateVolumePermission==null){
throw new org.apache.axis2.databinding.ADBException("createVolumePermission cannot be null!!");
}
elementList.add(localCreateVolumePermission);
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static ResetSnapshotAttributesGroup parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
ResetSnapshotAttributesGroup object =
new ResetSnapshotAttributesGroup();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","createVolumePermission").equals(reader.getName())){
object.setCreateVolumePermission(com.amazon.ec2.EmptyElementType.Factory.parse(reader));
reader.next();
} // End of if for expected property start element
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.aggregation;
import io.siddhi.core.config.SiddhiAppContext;
import io.siddhi.core.event.ComplexEventChunk;
import io.siddhi.core.event.Event;
import io.siddhi.core.event.stream.MetaStreamEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.event.stream.StreamEventFactory;
import io.siddhi.core.query.StoreQueryRuntime;
import io.siddhi.core.table.Table;
import io.siddhi.core.util.IncrementalTimeConverterUtil;
import io.siddhi.core.util.parser.StoreQueryParser;
import io.siddhi.core.window.Window;
import io.siddhi.query.api.aggregation.TimePeriod;
import io.siddhi.query.api.execution.query.StoreQuery;
import io.siddhi.query.api.execution.query.input.store.InputStore;
import io.siddhi.query.api.execution.query.selection.OrderByAttribute;
import io.siddhi.query.api.execution.query.selection.Selector;
import io.siddhi.query.api.expression.Expression;
import io.siddhi.query.api.expression.condition.Compare;
import java.util.List;
import java.util.Map;
import static io.siddhi.core.util.SiddhiConstants.AGG_SHARD_ID_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_START_TIMESTAMP_COL;
/**
* This class is used to recreate in-memory data from the tables (Such as RDBMS) in incremental aggregation.
* This ensures that the aggregation calculations are done correctly in case of server restart
*/
public class IncrementalExecutorsInitialiser {
private final List<TimePeriod.Duration> incrementalDurations;
private final Map<TimePeriod.Duration, Table> aggregationTables;
private final Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap;
private final boolean isDistributed;
private final String shardId;
private final StreamEventFactory streamEventFactory;
private final SiddhiAppContext siddhiAppContext;
private final Map<String, Table> tableMap;
private final Map<String, Window> windowMap;
private final Map<String, AggregationRuntime> aggregationMap;
private boolean isInitialised;
public IncrementalExecutorsInitialiser(List<TimePeriod.Duration> incrementalDurations,
Map<TimePeriod.Duration, Table> aggregationTables,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
boolean isDistributed, String shardId, SiddhiAppContext siddhiAppContext,
MetaStreamEvent metaStreamEvent, Map<String, Table> tableMap,
Map<String, Window> windowMap,
Map<String, AggregationRuntime> aggregationMap) {
this.incrementalDurations = incrementalDurations;
this.aggregationTables = aggregationTables;
this.incrementalExecutorMap = incrementalExecutorMap;
this.isDistributed = isDistributed;
this.shardId = shardId;
this.streamEventFactory = new StreamEventFactory(metaStreamEvent);
this.siddhiAppContext = siddhiAppContext;
this.tableMap = tableMap;
this.windowMap = windowMap;
this.aggregationMap = aggregationMap;
this.isInitialised = false;
}
public synchronized void initialiseExecutors() {
if (this.isInitialised) {
// Only cleared when executors change from reading to processing state in one node deployment
return;
}
Event[] events;
Long endOFLatestEventTimestamp = null;
// Get max(AGG_TIMESTAMP) from table corresponding to max duration
Table tableForMaxDuration = aggregationTables.get(incrementalDurations.get(incrementalDurations.size() - 1));
StoreQuery storeQuery = getStoreQuery(tableForMaxDuration, true, endOFLatestEventTimestamp);
storeQuery.setType(StoreQuery.StoreQueryType.FIND);
StoreQueryRuntime storeQueryRuntime = StoreQueryParser.parse(storeQuery, siddhiAppContext, tableMap, windowMap,
aggregationMap);
// Get latest event timestamp in tableForMaxDuration and get the end time of the aggregation record
events = storeQueryRuntime.execute();
if (events != null) {
Long lastData = (Long) events[events.length - 1].getData(0);
endOFLatestEventTimestamp = IncrementalTimeConverterUtil
.getNextEmitTime(lastData, incrementalDurations.get(incrementalDurations.size() - 1), null);
}
for (int i = incrementalDurations.size() - 1; i > 0; i--) {
TimePeriod.Duration recreateForDuration = incrementalDurations.get(i);
IncrementalExecutor incrementalExecutor = incrementalExecutorMap.get(recreateForDuration);
// Get the table previous to the duration for which we need to recreate (e.g. if we want to recreate
// for minute duration, take the second table [provided that aggregation is done for seconds])
// This lookup is filtered by endOFLatestEventTimestamp
Table recreateFromTable = aggregationTables.get(incrementalDurations.get(i - 1));
storeQuery = getStoreQuery(recreateFromTable, false, endOFLatestEventTimestamp);
storeQuery.setType(StoreQuery.StoreQueryType.FIND);
storeQueryRuntime = StoreQueryParser.parse(storeQuery, siddhiAppContext, tableMap, windowMap,
aggregationMap);
events = storeQueryRuntime.execute();
if (events != null) {
long referenceToNextLatestEvent = (Long) events[events.length - 1].getData(0);
endOFLatestEventTimestamp = IncrementalTimeConverterUtil
.getNextEmitTime(referenceToNextLatestEvent, incrementalDurations.get(i - 1), null);
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<>(false);
for (Event event : events) {
StreamEvent streamEvent = streamEventFactory.newInstance();
streamEvent.setOutputData(event.getData());
complexEventChunk.add(streamEvent);
}
incrementalExecutor.execute(complexEventChunk);
if (i == 1) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(rootDuration);
long emitTimeOfLatestEventInTable = IncrementalTimeConverterUtil.getNextEmitTime(
referenceToNextLatestEvent, rootDuration, null);
rootIncrementalExecutor.setEmitTime(emitTimeOfLatestEventInTable);
}
}
}
this.isInitialised = true;
}
private StoreQuery getStoreQuery(Table table, boolean isLargestGranularity, Long endOFLatestEventTimestamp) {
Selector selector = Selector.selector();
if (isLargestGranularity) {
selector = selector
.orderBy(
Expression.variable(AGG_START_TIMESTAMP_COL), OrderByAttribute.Order.DESC)
.limit(Expression.value(1));
} else {
selector = selector.orderBy(Expression.variable(AGG_START_TIMESTAMP_COL));
}
InputStore inputStore;
if (!this.isDistributed) {
if (endOFLatestEventTimestamp == null) {
inputStore = InputStore.store(table.getTableDefinition().getId());
} else {
inputStore = InputStore.store(table.getTableDefinition().getId())
.on(Expression.compare(
Expression.variable(AGG_START_TIMESTAMP_COL),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.value(endOFLatestEventTimestamp)
));
}
} else {
if (endOFLatestEventTimestamp == null) {
inputStore = InputStore.store(table.getTableDefinition().getId()).on(
Expression.compare(Expression.variable(AGG_SHARD_ID_COL), Compare.Operator.EQUAL,
Expression.value(shardId)));
} else {
inputStore = InputStore.store(table.getTableDefinition().getId()).on(
Expression.and(
Expression.compare(
Expression.variable(AGG_SHARD_ID_COL),
Compare.Operator.EQUAL,
Expression.value(shardId)),
Expression.compare(
Expression.variable(AGG_START_TIMESTAMP_COL),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.value(endOFLatestEventTimestamp))));
}
}
return StoreQuery.query().from(inputStore).select(selector);
}
}
| |
/*
* This file is part of Mixin, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.asm.mixin.injection.modify;
import java.util.Collection;
import java.util.ListIterator;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.tree.AbstractInsnNode;
import org.objectweb.asm.tree.InsnList;
import org.objectweb.asm.tree.VarInsnNode;
import org.spongepowered.asm.mixin.injection.InjectionPoint.AtCode;
import org.spongepowered.asm.mixin.injection.ModifyVariable;
import org.spongepowered.asm.mixin.injection.modify.LocalVariableDiscriminator.Context;
import org.spongepowered.asm.mixin.injection.modify.ModifyVariableInjector.LocalVariableInjectionPoint;
import org.spongepowered.asm.mixin.injection.struct.InjectionInfo;
import org.spongepowered.asm.mixin.injection.struct.InjectionPointData;
import org.spongepowered.asm.mixin.injection.struct.Target;
/**
* <p>This injection point is a companion for the {@link ModifyVariable}
* injector which searches for LOAD operations which match the local variables
* described by the injector's defined discriminators.</p>
*
* <p>This allows you consumers to specify an injection immediately before a
* local variable is accessed in a method. Specify an <tt>ordinal</tt> of <tt>n
* </tt> to match the <em>n + 1<sup>th</sup></em> access of the variable in
* question.</p>
*
* <dl>
* <dt>ordinal</dt>
* <dd>The ordinal position of the LOAD opcode for the matching local variable
* to search for, if not specified then the injection point returns <em>all
* </em> opcodes for which the parent annotation's discriminators match. The
* default value is <b>-1</b> which supresses ordinal checking.</dd>
* </dl>
*
* <p>Example:</p>
* <blockquote><pre>
* @ModifyVariable(
* method = "md",
* ordinal = 1,
* at = @At(
* value = "LOAD",
* ordinal = 0
* )
* )</pre>
* </blockquote>
* <p>Note that if <em>value</em> is the only parameter specified, it can be
* omitted:</p>
* <blockquote><pre>
* @At("LOAD")</pre>
* </blockquote>
*/
@AtCode("LOAD")
public class BeforeLoadLocal extends LocalVariableInjectionPoint {
/**
* Keeps track of state within {@link #find}
*/
class SearchState {
private static final int INVALID_IMPLICIT = -2;
/**
* Print LVT search, be permissive
*/
private final boolean print;
/**
* The current ordinal
*/
private int currentOrdinal = 0;
/**
* Flag to defer a {@link check} to the next opcode, to honour the after
* semantics of {@link AfterStoreLocal}.
*/
private boolean pendingCheck = false;
/**
* True if one or more opcodes was matched
*/
private boolean found = false;
/**
* Var node, captured for when deferring processing to the next opcode
*/
private VarInsnNode varNode;
SearchState() {
this.print = BeforeLoadLocal.this.discriminator.printLVT();
}
boolean success() {
return this.found;
}
boolean isPendingCheck() {
return this.pendingCheck;
}
void setPendingCheck() {
this.pendingCheck = true;
}
void register(VarInsnNode node) {
this.varNode = node;
}
void check(InjectionInfo info, Target target, Collection<AbstractInsnNode> nodes, AbstractInsnNode insn) {
Context context = new Context(info, BeforeLoadLocal.this.returnType, BeforeLoadLocal.this.discriminator.isArgsOnly(), target, insn);
int local = SearchState.INVALID_IMPLICIT;
try {
local = BeforeLoadLocal.this.discriminator.findLocal(context);
} catch (InvalidImplicitDiscriminatorException ex) {
BeforeLoadLocal.this.addMessage("%s has invalid IMPLICIT discriminator for opcode %d in %s: %s",
BeforeLoadLocal.this.toString(context), target.indexOf(insn), target, ex.getMessage());
}
this.pendingCheck = false;
if (local != this.varNode.var && (local > SearchState.INVALID_IMPLICIT || !this.print)) {
this.varNode = null;
return;
}
if (BeforeLoadLocal.this.ordinal == -1 || BeforeLoadLocal.this.ordinal == this.currentOrdinal) {
nodes.add(insn);
this.found = true;
}
this.currentOrdinal++;
this.varNode = null;
}
}
/**
* Return type of the handler, also the type of the local variable we're
* interested in
*/
protected final Type returnType;
/**
* Discriminator, parsed from parent annotation
*/
protected final LocalVariableDiscriminator discriminator;
/**
* Target opcode, inflected from return type
*/
protected final int opcode;
/**
* Target ordinal
*/
protected final int ordinal;
/**
* True if this injection point should capture the opcode after a matching
* opcode, used by {@link AfterStoreLocal}.
*/
private boolean opcodeAfter;
protected BeforeLoadLocal(InjectionPointData data) {
this(data, Opcodes.ILOAD, false);
}
protected BeforeLoadLocal(InjectionPointData data, int opcode, boolean opcodeAfter) {
super(data);
this.returnType = data.getMethodReturnType();
this.discriminator = data.getLocalVariableDiscriminator();
this.opcode = data.getOpcode(this.returnType.getOpcode(opcode));
this.ordinal = data.getOrdinal();
this.opcodeAfter = opcodeAfter;
}
@Override
boolean find(InjectionInfo info, InsnList insns, Collection<AbstractInsnNode> nodes, Target target) {
SearchState state = new SearchState();
ListIterator<AbstractInsnNode> iter = insns.iterator();
while (iter.hasNext()) {
AbstractInsnNode insn = iter.next();
if (state.isPendingCheck()) {
state.check(info, target, nodes, insn);
} else if (insn instanceof VarInsnNode && insn.getOpcode() == this.opcode && (this.ordinal == -1 || !state.success())) {
state.register((VarInsnNode)insn);
if (this.opcodeAfter) {
state.setPendingCheck();
} else {
state.check(info, target, nodes, insn);
}
}
}
return state.success();
}
// No synthetic
@Override
protected void addMessage(String format, Object... args) {
super.addMessage(format, args);
}
@Override
public String toString() {
return String.format("@At(\"%s\" %s)", this.getAtCode(), this.discriminator.toString());
}
public String toString(Context context) {
return String.format("@At(\"%s\" %s)", this.getAtCode(), this.discriminator.toString(context));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws2.msk;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.spi.UriPath;
import software.amazon.awssdk.core.Protocol;
import software.amazon.awssdk.services.kafka.KafkaClient;
@UriParams
public class MSK2Configuration implements Cloneable {
@UriPath(description = "Logical name")
@Metadata(required = true)
private String label;
@UriParam
@Metadata(autowired = true)
private KafkaClient mskClient;
@UriParam(label = "security", secret = true)
private String accessKey;
@UriParam(label = "security", secret = true)
private String secretKey;
@UriParam
@Metadata(required = true)
private MSK2Operations operation;
@UriParam(enums = "HTTP,HTTPS", defaultValue = "HTTPS")
private Protocol proxyProtocol = Protocol.HTTPS;
@UriParam
private String proxyHost;
@UriParam
private Integer proxyPort;
@UriParam
private String region;
@UriParam(defaultValue = "false")
private boolean pojoRequest;
@UriParam(defaultValue = "false")
private boolean trustAllCertificates;
@UriParam(defaultValue = "false")
private boolean overrideEndpoint;
@UriParam
private String uriEndpointOverride;
@UriParam(defaultValue = "false")
private boolean useDefaultCredentialsProvider;
public KafkaClient getMskClient() {
return mskClient;
}
/**
* To use a existing configured AWS MSK as client
*/
public void setMskClient(KafkaClient mskClient) {
this.mskClient = mskClient;
}
public String getAccessKey() {
return accessKey;
}
/**
* Amazon AWS Access Key
*/
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
public String getSecretKey() {
return secretKey;
}
/**
* Amazon AWS Secret Key
*/
public void setSecretKey(String secretKey) {
this.secretKey = secretKey;
}
public MSK2Operations getOperation() {
return operation;
}
/**
* The operation to perform
*/
public void setOperation(MSK2Operations operation) {
this.operation = operation;
}
public Protocol getProxyProtocol() {
return proxyProtocol;
}
/**
* To define a proxy protocol when instantiating the MSK client
*/
public void setProxyProtocol(Protocol proxyProtocol) {
this.proxyProtocol = proxyProtocol;
}
public String getProxyHost() {
return proxyHost;
}
/**
* To define a proxy host when instantiating the MSK client
*/
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public Integer getProxyPort() {
return proxyPort;
}
/**
* To define a proxy port when instantiating the MSK client
*/
public void setProxyPort(Integer proxyPort) {
this.proxyPort = proxyPort;
}
public String getRegion() {
return region;
}
/**
* The region in which MSK client needs to work. When using this parameter, the configuration will expect the
* lowercase name of the region (for example ap-east-1) You'll need to use the name Region.EU_WEST_1.id()
*/
public void setRegion(String region) {
this.region = region;
}
public boolean isPojoRequest() {
return pojoRequest;
}
/**
* If we want to use a POJO request as body or not
*/
public void setPojoRequest(boolean pojoRequest) {
this.pojoRequest = pojoRequest;
}
public boolean isTrustAllCertificates() {
return trustAllCertificates;
}
/**
* If we want to trust all certificates in case of overriding the endpoint
*/
public void setTrustAllCertificates(boolean trustAllCertificates) {
this.trustAllCertificates = trustAllCertificates;
}
public boolean isOverrideEndpoint() {
return overrideEndpoint;
}
/**
* Set the need for overidding the endpoint. This option needs to be used in combination with uriEndpointOverride
* option
*/
public void setOverrideEndpoint(boolean overrideEndpoint) {
this.overrideEndpoint = overrideEndpoint;
}
public String getUriEndpointOverride() {
return uriEndpointOverride;
}
/**
* Set the overriding uri endpoint. This option needs to be used in combination with overrideEndpoint option
*/
public void setUriEndpointOverride(String uriEndpointOverride) {
this.uriEndpointOverride = uriEndpointOverride;
}
/**
* Set whether the Kafka client should expect to load credentials through a default credentials provider or to
* expect static credentials to be passed in.
*/
public void setUseDefaultCredentialsProvider(Boolean useDefaultCredentialsProvider) {
this.useDefaultCredentialsProvider = useDefaultCredentialsProvider;
}
public Boolean isUseDefaultCredentialsProvider() {
return useDefaultCredentialsProvider;
}
// *************************************************
//
// *************************************************
public MSK2Configuration copy() {
try {
return (MSK2Configuration) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.gallery3d.glrenderer;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.graphics.RectF;
import javax.microedition.khronos.opengles.GL11;
//
// GLCanvas gives a convenient interface to draw using OpenGL.
//
// When a rectangle is specified in this interface, it means the region
// [x, x+width) * [y, y+height)
//
public interface GLCanvas {
public GLId getGLId();
// Tells GLCanvas the size of the underlying GL surface. This should be
// called before first drawing and when the size of GL surface is changed.
// This is called by GLRoot and should not be called by the clients
// who only want to draw on the GLCanvas. Both width and height must be
// nonnegative.
public abstract void setSize(int width, int height);
// Clear the drawing buffers. This should only be used by GLRoot.
public abstract void clearBuffer();
public abstract void clearBuffer(float[] argb);
// Sets and gets the current alpha, alpha must be in [0, 1].
public abstract void setAlpha(float alpha);
public abstract float getAlpha();
// (current alpha) = (current alpha) * alpha
public abstract void multiplyAlpha(float alpha);
// Change the current transform matrix.
public abstract void translate(float x, float y, float z);
public abstract void translate(float x, float y);
public abstract void scale(float sx, float sy, float sz);
public abstract void rotate(float angle, float x, float y, float z);
public abstract void multiplyMatrix(float[] mMatrix, int offset);
// Pushes the configuration state (matrix, and alpha) onto
// a private stack.
public abstract void save();
// Same as save(), but only save those specified in saveFlags.
public abstract void save(int saveFlags);
public static final int SAVE_FLAG_ALL = 0xFFFFFFFF;
public static final int SAVE_FLAG_ALPHA = 0x01;
public static final int SAVE_FLAG_MATRIX = 0x02;
// Pops from the top of the stack as current configuration state (matrix,
// alpha, and clip). This call balances a previous call to save(), and is
// used to remove all modifications to the configuration state since the
// last save call.
public abstract void restore();
// Draws a line using the specified paint from (x1, y1) to (x2, y2).
// (Both end points are included).
public abstract void drawLine(float x1, float y1, float x2, float y2, GLPaint paint);
// Draws a rectangle using the specified paint from (x1, y1) to (x2, y2).
// (Both end points are included).
public abstract void drawRect(float x1, float y1, float x2, float y2, GLPaint paint);
// Fills the specified rectangle with the specified color.
public abstract void fillRect(float x, float y, float width, float height, int color);
// Draws a texture to the specified rectangle.
public abstract void drawTexture(BasicTexture texture, int x, int y, int width, int height);
public abstract void drawMesh(BasicTexture tex, int x, int y, int xyBuffer, int uvBuffer,
int indexBuffer, int indexCount);
// Draws the source rectangle part of the texture to the target rectangle.
public abstract void drawTexture(BasicTexture texture, RectF source, RectF target);
// Draw a texture with a specified texture transform.
public abstract void drawTexture(BasicTexture texture, float[] mTextureTransform, int x, int y, int w,
int h);
// Draw two textures to the specified rectangle. The actual texture used is
// from * (1 - ratio) + to * ratio
// The two textures must have the same size.
public abstract void drawMixed(BasicTexture from, int toColor, float ratio, int x, int y, int w, int h);
// Draw a region of a texture and a specified color to the specified
// rectangle. The actual color used is from * (1 - ratio) + to * ratio.
// The region of the texture is defined by parameter "src". The target
// rectangle is specified by parameter "target".
public abstract void drawMixed(BasicTexture from, int toColor, float ratio, RectF src, RectF target);
// Unloads the specified texture from the canvas. The resource allocated
// to draw the texture will be released. The specified texture will return
// to the unloaded state. This function should be called only from
// BasicTexture or its descendant
public abstract boolean unloadTexture(BasicTexture texture);
// Delete the specified buffer object, similar to unloadTexture.
public abstract void deleteBuffer(int bufferId);
// Delete the textures and buffers in GL side. This function should only be
// called in the GL thread.
public abstract void deleteRecycledResources();
// Dump statistics information and clear the counters. For debug only.
public abstract void dumpStatisticsAndClear();
public abstract void beginRenderTarget(RawTexture texture);
public abstract void endRenderTarget();
/**
* Sets texture parameters to use GL_CLAMP_TO_EDGE for both
* GL_TEXTURE_WRAP_S and GL_TEXTURE_WRAP_T. Sets texture parameters to be
* GL_LINEAR for GL_TEXTURE_MIN_FILTER and GL_TEXTURE_MAG_FILTER.
* bindTexture() must be called prior to this.
*
* @param texture
* The texture to set parameters on.
*/
public abstract void setTextureParameters(BasicTexture texture);
/**
* Initializes the texture to a size by calling texImage2D on it.
*
* @param texture
* The texture to initialize the size.
* @param format
* The texture format (e.g. GL_RGBA)
* @param type
* The texture type (e.g. GL_UNSIGNED_BYTE)
*/
public abstract void initializeTextureSize(BasicTexture texture, int format, int type);
/**
* Initializes the texture to a size by calling texImage2D on it.
*
* @param texture
* The texture to initialize the size.
* @param bitmap
* The bitmap to initialize the bitmap with.
*/
public abstract void initializeTexture(BasicTexture texture, Bitmap bitmap);
/**
* Calls glTexSubImage2D to upload a bitmap to the texture.
*
* @param texture
* The target texture to write to.
* @param xOffset
* Specifies a texel offset in the x direction within the texture
* array.
* @param yOffset
* Specifies a texel offset in the y direction within the texture
* array.
* @param format
* The texture format (e.g. GL_RGBA)
* @param type
* The texture type (e.g. GL_UNSIGNED_BYTE)
*/
public abstract void texSubImage2D(BasicTexture texture, int xOffset, int yOffset, Bitmap bitmap,
int format, int type);
/**
* Generates buffers and uploads the buffer data.
*
* @param buffer
* The buffer to upload
* @return The buffer ID that was generated.
*/
public abstract int uploadBuffer(java.nio.FloatBuffer buffer);
/**
* Generates buffers and uploads the element array buffer data.
*
* @param buffer
* The buffer to upload
* @return The buffer ID that was generated.
*/
public abstract int uploadBuffer(java.nio.ByteBuffer buffer);
/**
* After LightCycle makes GL calls, this method is called to restore the GL
* configuration to the one expected by GLCanvas.
*/
public abstract void recoverFromLightCycle();
/**
* Gets the bounds given by x, y, width, and height as well as the internal
* matrix state. There is no special handling for non-90-degree rotations.
* It only considers the lower-left and upper-right corners as the bounds.
*
* @param bounds
* The output bounds to write to.
* @param x
* The left side of the input rectangle.
* @param y
* The bottom of the input rectangle.
* @param width
* The width of the input rectangle.
* @param height
* The height of the input rectangle.
*/
public abstract void getBounds(Rect bounds, int x, int y, int width, int height);
}
| |
package io.dropwizard.hibernate;
import com.codahale.metrics.MetricRegistry;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.jackson.Jackson;
import io.dropwizard.jersey.DropwizardResourceConfig;
import io.dropwizard.jersey.errors.ErrorMessage;
import io.dropwizard.jersey.jackson.JacksonFeature;
import io.dropwizard.jersey.optional.EmptyOptionalExceptionMapper;
import io.dropwizard.lifecycle.setup.LifecycleEnvironment;
import io.dropwizard.logging.BootstrapLogging;
import io.dropwizard.setup.Environment;
import org.glassfish.jersey.client.ClientConfig;
import org.glassfish.jersey.test.JerseyTest;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import javax.annotation.Nullable;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.Collections;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class JerseyIntegrationTest extends JerseyTest {
static {
BootstrapLogging.bootstrap();
}
public static class PersonDAO extends AbstractDAO<Person> {
public PersonDAO(SessionFactory sessionFactory) {
super(sessionFactory);
}
public Optional<Person> findByName(String name) {
return Optional.ofNullable(get(name));
}
@Override
public Person persist(Person entity) {
return super.persist(entity);
}
}
@Path("/people/{name}")
@Produces(MediaType.APPLICATION_JSON)
public static class PersonResource {
private final PersonDAO dao;
public PersonResource(PersonDAO dao) {
this.dao = dao;
}
@GET
@UnitOfWork(readOnly = true)
public Optional<Person> find(@PathParam("name") String name) {
return dao.findByName(name);
}
@PUT
@UnitOfWork
public void save(Person person) {
dao.persist(person);
}
}
@Nullable
private SessionFactory sessionFactory;
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
}
@Override
@AfterEach
public void tearDown() throws Exception {
super.tearDown();
if (sessionFactory != null) {
sessionFactory.close();
}
}
@Override
protected Application configure() {
final MetricRegistry metricRegistry = new MetricRegistry();
final SessionFactoryFactory factory = new SessionFactoryFactory();
final DataSourceFactory dbConfig = new DataSourceFactory();
dbConfig.setProperties(Collections.singletonMap("hibernate.jdbc.time_zone", "UTC"));
final HibernateBundle<?> bundle = mock(HibernateBundle.class);
final Environment environment = mock(Environment.class);
final LifecycleEnvironment lifecycleEnvironment = mock(LifecycleEnvironment.class);
when(environment.lifecycle()).thenReturn(lifecycleEnvironment);
when(environment.metrics()).thenReturn(metricRegistry);
dbConfig.setUrl("jdbc:hsqldb:mem:DbTest-" + System.nanoTime() + "?hsqldb.translate_dti_types=false");
dbConfig.setUser("sa");
dbConfig.setDriverClass("org.hsqldb.jdbcDriver");
dbConfig.setValidationQuery("SELECT 1 FROM INFORMATION_SCHEMA.SYSTEM_USERS");
this.sessionFactory = factory.build(bundle,
environment,
dbConfig,
Collections.singletonList(Person.class));
try (Session session = sessionFactory.openSession()) {
Transaction transaction = session.beginTransaction();
session.createNativeQuery("DROP TABLE people IF EXISTS").executeUpdate();
session.createNativeQuery(
"CREATE TABLE people (name varchar(100) primary key, email varchar(16), birthday timestamp with time zone)")
.executeUpdate();
session.createNativeQuery(
"INSERT INTO people VALUES ('Coda', 'coda@example.com', '1979-01-02 00:22:00+0:00')")
.executeUpdate();
transaction.commit();
}
final DropwizardResourceConfig config = DropwizardResourceConfig.forTesting();
config.register(new UnitOfWorkApplicationListener("hr-db", sessionFactory));
config.register(new PersonResource(new PersonDAO(sessionFactory)));
config.register(new PersistenceExceptionMapper());
config.register(new JacksonFeature(Jackson.newObjectMapper()));
config.register(new DataExceptionMapper());
config.register(new EmptyOptionalExceptionMapper());
return config;
}
@Override
protected void configureClient(ClientConfig config) {
config.register(new JacksonFeature(Jackson.newObjectMapper()));
}
@Test
public void findsExistingData() throws Exception {
final Person coda = target("/people/Coda").request(MediaType.APPLICATION_JSON).get(Person.class);
assertThat(coda.getName())
.isEqualTo("Coda");
assertThat(coda.getEmail())
.isEqualTo("coda@example.com");
assertThat(coda.getBirthday())
.isEqualTo(new DateTime(1979, 1, 2, 0, 22, DateTimeZone.UTC));
}
@Test
public void doesNotFindMissingData() throws Exception {
try {
target("/people/Poof").request(MediaType.APPLICATION_JSON)
.get(Person.class);
failBecauseExceptionWasNotThrown(WebApplicationException.class);
} catch (WebApplicationException e) {
assertThat(e.getResponse().getStatus())
.isEqualTo(404);
}
}
@Test
public void createsNewData() throws Exception {
final Person person = new Person();
person.setName("Hank");
person.setEmail("hank@example.com");
person.setBirthday(new DateTime(1971, 3, 14, 19, 12, DateTimeZone.UTC));
target("/people/Hank").request().put(Entity.entity(person, MediaType.APPLICATION_JSON));
final Person hank = target("/people/Hank")
.request(MediaType.APPLICATION_JSON)
.get(Person.class);
assertThat(hank.getName())
.isEqualTo("Hank");
assertThat(hank.getEmail())
.isEqualTo("hank@example.com");
assertThat(hank.getBirthday())
.isEqualTo(person.getBirthday());
}
@Test
public void testSqlExceptionIsHandled() throws Exception {
final Person person = new Person();
person.setName("Jeff");
person.setEmail("jeff.hammersmith@targetprocessinc.com");
person.setBirthday(new DateTime(1984, 2, 11, 0, 0, DateTimeZone.UTC));
final Response response = target("/people/Jeff").request().
put(Entity.entity(person, MediaType.APPLICATION_JSON));
assertThat(response.getStatusInfo()).isEqualTo(Response.Status.BAD_REQUEST);
assertThat(response.getHeaderString(HttpHeaders.CONTENT_TYPE)).isEqualTo(MediaType.APPLICATION_JSON);
assertThat(response.readEntity(ErrorMessage.class).getMessage()).isEqualTo("Wrong email");
}
}
| |
/**
* (The MIT License)
*
* Copyright (c) 2008 - 2011:
*
* * {Aaron Patterson}[http://tenderlovemaking.com]
* * {Mike Dalessio}[http://mike.daless.io]
* * {Charles Nutter}[http://blog.headius.com]
* * {Sergio Arbeo}[http://www.serabe.com]
* * {Patrick Mahoney}[http://polycrystal.org]
* * {Yoko Harada}[http://yokolet.blogspot.com]
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* 'Software'), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package nokogiri;
import static nokogiri.internals.NokogiriHelpers.adjustSystemIdIfNecessary;
import static nokogiri.internals.NokogiriHelpers.getNokogiriClass;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import javax.xml.XMLConstants;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import nokogiri.internals.IgnoreSchemaErrorsErrorHandler;
import nokogiri.internals.SchemaErrorHandler;
import nokogiri.internals.XmlDomParserContext;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyClass;
import org.jruby.RubyFixnum;
import org.jruby.RubyObject;
import org.jruby.anno.JRubyClass;
import org.jruby.anno.JRubyMethod;
import org.jruby.exceptions.RaiseException;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.Visibility;
import org.jruby.runtime.builtin.IRubyObject;
import org.w3c.dom.Document;
import org.w3c.dom.ls.LSInput;
import org.w3c.dom.ls.LSResourceResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
/**
* Class for Nokogiri::XML::Schema
*
* @author sergio
* @author Yoko Harada <yokolet@gmail.com>
*/
@JRubyClass(name="Nokogiri::XML::Schema")
public class XmlSchema extends RubyObject {
private Validator validator;
public XmlSchema(Ruby ruby, RubyClass klazz) {
super(ruby, klazz);
}
/**
* Create and return a copy of this object.
*
* @return a clone of this object
*/
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
private Schema getSchema(Source source, String currentDir, String scriptFileName) throws SAXException {
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
SchemaResourceResolver resourceResolver = new SchemaResourceResolver(currentDir, scriptFileName, null);
schemaFactory.setResourceResolver(resourceResolver);
schemaFactory.setErrorHandler(new IgnoreSchemaErrorsErrorHandler());
return schemaFactory.newSchema(source);
}
private void setValidator(Validator validator) {
this.validator = validator;
}
static XmlSchema createSchemaInstance(ThreadContext context, RubyClass klazz, Source source) {
Ruby runtime = context.getRuntime();
XmlSchema xmlSchema = (XmlSchema) NokogiriService.XML_SCHEMA_ALLOCATOR.allocate(runtime, klazz);
xmlSchema.setInstanceVariable("@errors", runtime.newEmptyArray());
try {
Schema schema = xmlSchema.getSchema(source, context.getRuntime().getCurrentDirectory(), context.getRuntime().getInstanceConfig().getScriptFileName());
xmlSchema.setValidator(schema.newValidator());
return xmlSchema;
} catch (SAXException ex) {
throw context.getRuntime().newRuntimeError("Could not parse document: " + ex.getMessage());
}
}
/*
* call-seq:
* from_document(doc)
*
* Create a new Schema from the Nokogiri::XML::Document +doc+
*/
@JRubyMethod(meta=true)
public static IRubyObject from_document(ThreadContext context, IRubyObject klazz, IRubyObject document) {
XmlDocument doc = ((XmlDocument) ((XmlNode) document).document(context));
RubyArray errors = (RubyArray) doc.getInstanceVariable("@errors");
if (!errors.isEmpty()) {
throw new RaiseException((XmlSyntaxError) errors.first());
}
DOMSource source = new DOMSource(doc.getDocument());
IRubyObject uri = doc.url(context);
if (!uri.isNil()) {
source.setSystemId(uri.convertToString().asJavaString());
}
return getSchema(context, (RubyClass)klazz, source);
}
private static IRubyObject getSchema(ThreadContext context, RubyClass klazz, Source source) {
String moduleName = klazz.getName();
if ("Nokogiri::XML::Schema".equals(moduleName)) {
return XmlSchema.createSchemaInstance(context, klazz, source);
} else if ("Nokogiri::XML::RelaxNG".equals(moduleName)) {
return XmlRelaxng.createSchemaInstance(context, klazz, source);
}
return context.getRuntime().getNil();
}
@JRubyMethod(meta=true)
public static IRubyObject read_memory(ThreadContext context, IRubyObject klazz, IRubyObject content) {
String data = content.convertToString().asJavaString();
return getSchema(context, (RubyClass) klazz, new StreamSource(new StringReader(data)));
}
@JRubyMethod(visibility=Visibility.PRIVATE)
public IRubyObject validate_document(ThreadContext context, IRubyObject document) {
return validate_document_or_file(context, (XmlDocument)document);
}
@JRubyMethod(visibility=Visibility.PRIVATE)
public IRubyObject validate_file(ThreadContext context, IRubyObject file) {
Ruby ruby = context.getRuntime();
XmlDomParserContext ctx = new XmlDomParserContext(ruby, RubyFixnum.newFixnum(ruby, 1L));
ctx.setInputSourceFile(context, file);
XmlDocument xmlDocument = ctx.parse(context, getNokogiriClass(ruby, "Nokogiri::XML::Document"), ruby.getNil());
return validate_document_or_file(context, xmlDocument);
}
IRubyObject validate_document_or_file(ThreadContext context, XmlDocument xmlDocument) {
RubyArray errors = (RubyArray) this.getInstanceVariable("@errors");
ErrorHandler errorHandler = new SchemaErrorHandler(context.getRuntime(), errors);
setErrorHandler(errorHandler);
try {
validate(xmlDocument.getDocument());
} catch(SAXException ex) {
XmlSyntaxError xmlSyntaxError = (XmlSyntaxError) NokogiriService.XML_SYNTAXERROR_ALLOCATOR.allocate(context.getRuntime(), getNokogiriClass(context.getRuntime(), "Nokogiri::XML::SyntaxError"));
xmlSyntaxError.setException(ex);
errors.append(xmlSyntaxError);
} catch (IOException ex) {
throw context.getRuntime().newIOError(ex.getMessage());
}
return errors;
}
protected void setErrorHandler(ErrorHandler errorHandler) {
validator.setErrorHandler(errorHandler);
}
protected void validate(Document document) throws SAXException, IOException {
DOMSource docSource = new DOMSource(document);
validator.validate(docSource);
}
private class SchemaResourceResolver implements LSResourceResolver {
SchemaLSInput lsInput = new SchemaLSInput();
String currentDir;
String scriptFileName;
//String defaultURI;
SchemaResourceResolver(String currentDir, String scriptFileName, Object input) {
this.currentDir = currentDir;
this.scriptFileName = scriptFileName;
if (input == null) return;
if (input instanceof String) {
lsInput.setStringData((String)input);
} else if (input instanceof Reader) {
lsInput.setCharacterStream((Reader)input);
} else if (input instanceof InputStream) {
lsInput.setByteStream((InputStream)input);
}
}
@Override
public LSInput resolveResource(String type, String namespaceURI, String publicId, String systemId, String baseURI) {
String adjusted = adjustSystemIdIfNecessary(currentDir, scriptFileName, baseURI, systemId);
lsInput.setPublicId(publicId);
lsInput.setSystemId(adjusted != null? adjusted : systemId);
lsInput.setBaseURI(baseURI);
return lsInput;
}
}
private class SchemaLSInput implements LSInput {
protected String fPublicId;
protected String fSystemId;
protected String fBaseSystemId;
protected InputStream fByteStream;
protected Reader fCharStream;
protected String fData;
protected String fEncoding;
protected boolean fCertifiedText = false;
@Override
public String getBaseURI() {
return fBaseSystemId;
}
@Override
public InputStream getByteStream() {
return fByteStream;
}
@Override
public boolean getCertifiedText() {
return fCertifiedText;
}
@Override
public Reader getCharacterStream() {
return fCharStream;
}
@Override
public String getEncoding() {
return fEncoding;
}
@Override
public String getPublicId() {
return fPublicId;
}
@Override
public String getStringData() {
return fData;
}
@Override
public String getSystemId() {
return fSystemId;
}
@Override
public void setBaseURI(String baseURI) {
fBaseSystemId = baseURI;
}
@Override
public void setByteStream(InputStream byteStream) {
fByteStream = byteStream;
}
@Override
public void setCertifiedText(boolean certified) {
fCertifiedText = certified;
}
@Override
public void setCharacterStream(Reader charStream) {
fCharStream = charStream;
}
@Override
public void setEncoding(String encoding) {
fEncoding = encoding;
}
@Override
public void setPublicId(String pubId) {
fPublicId = pubId;
}
@Override
public void setStringData(String stringData) {
fData = stringData;
}
@Override
public void setSystemId(String sysId) {
fSystemId = sysId;
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.analysis;
import com.intellij.codeInsight.daemon.ProblemHighlightFilter;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectCoreUtil;
import com.intellij.openapi.project.ProjectUtilCore;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.libraries.LibraryUtil;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileFilter;
import com.intellij.openapi.vfs.VirtualFileVisitor;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.GlobalSearchScopesCore;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Processor;
import gnu.trove.THashSet;
import org.intellij.lang.annotations.MagicConstant;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
/**
* @author max
*/
public class AnalysisScope {
private static final Logger LOG = Logger.getInstance("#com.intellij.analysis.AnalysisScope");
public static final int PROJECT = 1;
public static final int DIRECTORY = 2;
public static final int FILE = 3;
public static final int MODULE = 4;
public static final int INVALID = 6;
public static final int MODULES = 7;
public static final int CUSTOM = 8;
public static final int VIRTUAL_FILES = 9;
public static final int UNCOMMITTED_FILES = 10;
@MagicConstant(intValues = {PROJECT, DIRECTORY, FILE, MODULE, INVALID, MODULES, CUSTOM, VIRTUAL_FILES, UNCOMMITTED_FILES})
public @interface Type { }
@NotNull
private final Project myProject;
protected List<Module> myModules;
protected Module myModule;
protected PsiElement myElement;
private final SearchScope myScope;
private boolean mySearchInLibraries;
private GlobalSearchScope myFilter;
@Type protected int myType;
private final Set<VirtualFile> myVFiles; // initial files and directories the scope is configured on
Set<VirtualFile> myFilesSet; // set of files (not directories) this scope consists of. calculated in initFilesSet()
private boolean myIncludeTestSource = true;
public AnalysisScope(@NotNull Project project) {
myProject = project;
myElement = null;
myModules = null;
myModule = null;
myScope = null;
myType = PROJECT;
myVFiles = null;
}
public AnalysisScope(@NotNull Module module) {
myProject = module.getProject();
myElement = null;
myModules = null;
myScope = null;
myModule = module;
myType = MODULE;
myVFiles = null;
}
public AnalysisScope(@NotNull Module[] modules) {
myModules = Arrays.asList(modules);
myModule = null;
myProject = modules[0].getProject();
myElement = null;
myScope = null;
myType = MODULES;
myVFiles = null;
}
public AnalysisScope(@NotNull PsiDirectory psiDirectory) {
myProject = psiDirectory.getProject();
myModules = null;
myModule = null;
myScope = null;
myElement = psiDirectory;
myType = DIRECTORY;
myVFiles = null;
}
public AnalysisScope(@NotNull PsiFile psiFile) {
myProject = psiFile.getProject();
myElement = psiFile;
myModule = null;
myModules = null;
myScope = null;
myType = FILE;
myVFiles = null;
}
public AnalysisScope(@NotNull SearchScope scope, @NotNull Project project) {
myProject = project;
myElement = null;
myModule = null;
myModules = null;
myScope = scope;
myType = CUSTOM;
mySearchInLibraries = scope instanceof GlobalSearchScope && ((GlobalSearchScope)scope).isSearchInLibraries();
myVFiles = null;
}
public AnalysisScope(@NotNull Project project, @NotNull Collection<VirtualFile> virtualFiles) {
myProject = project;
myElement = null;
myModule = null;
myModules = null;
myScope = null;
myVFiles = new HashSet<>(virtualFiles);
myType = VIRTUAL_FILES;
}
public void setSearchInLibraries(final boolean searchInLibraries) {
mySearchInLibraries = searchInLibraries;
}
public void setIncludeTestSource(final boolean includeTestSource) {
myIncludeTestSource = includeTestSource;
}
@NotNull
PsiElementVisitor createFileSearcher() {
final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
if (indicator != null) {
indicator.setText(AnalysisScopeBundle.message("scanning.scope.progress.title"));
}
return new PsiElementVisitor() {
@Override
public void visitFile(@NotNull PsiFile file) {
if (mySearchInLibraries || !(file instanceof PsiCompiledElement)) {
final VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return;
if (isFiltered(virtualFile)) {
return;
}
if (!shouldHighlightFile(file)) return;
myFilesSet.add(virtualFile);
}
}
};
}
private boolean isFiltered(VirtualFile virtualFile) {
if (myFilter != null && !myFilter.contains(virtualFile)) {
return true;
}
return !myIncludeTestSource && TestSourcesFilter.isTestSources(virtualFile, myProject);
}
@NotNull
private FileIndex getFileIndex() {
final FileIndex fileIndex;
if (myModule != null) {
fileIndex = ModuleRootManager.getInstance(myModule).getFileIndex();
}
else {
fileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
}
return fileIndex;
}
private static String displayProjectRelativePath(@NotNull PsiFileSystemItem item) {
VirtualFile virtualFile = item.getVirtualFile();
LOG.assertTrue(virtualFile != null, item);
return ProjectUtilCore.displayUrlRelativeToProject(virtualFile, virtualFile.getPresentableUrl(), item.getProject(), true, false);
}
public boolean contains(@NotNull PsiElement psiElement) {
VirtualFile file = psiElement.getContainingFile().getVirtualFile();
return file != null && contains(file);
}
public boolean contains(@NotNull VirtualFile file) {
if (myFilesSet == null) {
if (myType == CUSTOM) {
// optimization
if (myScope != null) return myScope.contains(file);
}
if (myType == PROJECT) { //optimization
final ProjectFileIndex index = ProjectRootManager.getInstance(myProject).getFileIndex();
return index.isInContent(file) && !isFiltered(file);
}
initFilesSet();
}
return myFilesSet.contains(file);
}
protected void initFilesSet() {
if (myType == FILE) {
myFilesSet = new HashSet<>(1);
myFilesSet.add(((PsiFileSystemItem)myElement).getVirtualFile());
}
else if (myType == DIRECTORY || myType == PROJECT || myType == MODULES || myType == MODULE || myType == CUSTOM) {
myFilesSet = new THashSet<>();
accept(createFileSearcher(), false);
}
else if (myType == VIRTUAL_FILES) {
myFilesSet = new THashSet<>();
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
for (Iterator<VirtualFile> iterator = myVFiles.iterator(); iterator.hasNext(); ) {
final VirtualFile vFile = iterator.next();
VfsUtilCore.visitChildrenRecursively(vFile, new VirtualFileVisitor() {
@NotNull
@Override
public Result visitFileEx(@NotNull VirtualFile file) {
boolean ignored = fileIndex.isExcluded(file);
if (!ignored && !file.isDirectory()) {
myFilesSet.add(file);
}
return ignored ? SKIP_CHILDREN : CONTINUE;
}
});
if (vFile.isDirectory()) {
iterator.remove();
}
}
}
}
public void accept(@NotNull final PsiElementVisitor visitor) {
accept(visitor, true);
}
private void accept(@NotNull final PsiElementVisitor visitor, final boolean clearResolveCache) {
final boolean needReadAction = !ApplicationManager.getApplication().isReadAccessAllowed();
final PsiManager psiManager = PsiManager.getInstance(myProject);
final FileIndex fileIndex = getFileIndex();
accept(file -> {
if (file.isDirectory()) return true;
if (ProjectCoreUtil.isProjectOrWorkspaceFile(file, file.getFileType())) return true;
if (fileIndex.isInContent(file) && !isFiltered(file)
&& !GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(file, myProject)) {
return processFile(file, visitor, psiManager, needReadAction, clearResolveCache);
}
return true;
});
}
public boolean accept(@NotNull final Processor<VirtualFile> processor) {
if (myType == VIRTUAL_FILES) {
if (myFilesSet == null) initFilesSet();
for (final VirtualFile file : myFilesSet) {
if (isFiltered(file)) continue;
if (!processor.process(file)) return false;
}
return true;
}
final FileIndex projectFileIndex = ProjectRootManager.getInstance(myProject).getFileIndex();
if (myScope instanceof GlobalSearchScope) {
final ContentIterator contentIterator = createScopeIterator(processor, myScope);
if (!projectFileIndex.iterateContent(contentIterator)) return false;
if (mySearchInLibraries) {
final VirtualFile[] libraryRoots = LibraryUtil.getLibraryRoots(myProject, false, false);
for (VirtualFile libraryRoot : libraryRoots) {
if (!VfsUtilCore.iterateChildrenRecursively(libraryRoot, VirtualFileFilter.ALL, contentIterator)) return false;
}
}
return true;
}
if (myScope instanceof LocalSearchScope) {
final PsiElement[] psiElements = ((LocalSearchScope)myScope).getScope();
final Set<VirtualFile> files = new THashSet<>();
for (final PsiElement element : psiElements) {
VirtualFile file = ReadAction.compute(() -> PsiUtilCore.getVirtualFile(element));
if (file != null && files.add(file)) {
if (!processor.process(file)) return false;
}
}
return true;
}
List<Module> modules = myModule != null ? Collections.singletonList(myModule) : myModules;
if (modules != null) {
for (final Module module : modules) {
final FileIndex moduleFileIndex = ModuleRootManager.getInstance(module).getFileIndex();
if (!moduleFileIndex.iterateContent(createScopeIterator(processor, null))) {
return false;
}
}
return true;
}
if (myElement instanceof PsiDirectory) {
return accept((PsiDirectory)myElement, processor);
}
if (myElement != null) {
VirtualFile file = ReadAction.compute(() -> PsiUtilCore.getVirtualFile(myElement));
return file == null || processor.process(file);
}
return projectFileIndex.iterateContent(createScopeIterator(processor, null));
}
@NotNull
private ContentIterator createScopeIterator(@NotNull final Processor<VirtualFile> processor,
@Nullable final SearchScope searchScope) {
return fileOrDir -> {
final boolean isInScope = ReadAction.compute(() -> {
if (isFiltered(fileOrDir)) return false;
if (GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(fileOrDir, myProject)) return false;
return searchScope == null || searchScope.contains(fileOrDir);
});
return !isInScope || processor.process(fileOrDir);
};
}
private static boolean processFile(@NotNull final VirtualFile vFile,
@NotNull final PsiElementVisitor visitor,
@NotNull final PsiManager psiManager,
final boolean needReadAction,
final boolean clearResolveCache) {
final Runnable runnable = () -> doProcessFile(visitor, psiManager, vFile, clearResolveCache);
if (needReadAction && !ApplicationManager.getApplication().isDispatchThread()) {
commitAndRunInSmartMode(runnable, psiManager.getProject());
}
else {
runnable.run();
}
final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
return indicator == null || !indicator.isCanceled();
}
private static void commitAndRunInSmartMode(final Runnable runnable, final Project project) {
while (true) {
final DumbService dumbService = DumbService.getInstance(project);
dumbService.waitForSmartMode();
boolean passed = PsiDocumentManager.getInstance(project).commitAndRunReadAction(() -> {
if (dumbService.isDumb()) return false;
runnable.run();
return true;
});
if (passed) {
break;
}
}
}
private static boolean shouldHighlightFile(@NotNull PsiFile file) {
return ProblemHighlightFilter.shouldProcessFileInBatch(file);
}
public boolean containsModule(@NotNull Module module) {
switch (myType) {
case PROJECT:
return true;
case MODULE:
return myModule == module;
case MODULES:
return myModules.contains(module);
default:
return false;
}
}
private static void doProcessFile(@NotNull PsiElementVisitor visitor, @NotNull PsiManager psiManager, @NotNull VirtualFile vFile,
boolean clearResolveCache) {
if (!vFile.isValid()) return;
PsiFile psiFile = psiManager.findFile(vFile);
if (psiFile == null || !shouldHighlightFile(psiFile)) return;
psiFile.accept(visitor);
if (clearResolveCache) {
psiManager.dropResolveCaches();
InjectedLanguageManager.getInstance(psiManager.getProject()).dropFileCaches(psiFile);
}
}
protected boolean accept(@NotNull final PsiDirectory dir, @NotNull final Processor<VirtualFile> processor) {
final Project project = dir.getProject();
//we should analyze generated source files only if the action is explicitly invoked for a directory located under generated roots
final boolean processGeneratedFiles = GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(dir.getVirtualFile(), project);
return VfsUtilCore.iterateChildrenRecursively(dir.getVirtualFile(), VirtualFileFilter.ALL, fileOrDir -> {
if (isFiltered(fileOrDir)) return true;
if (!processGeneratedFiles && GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(fileOrDir, project)) return true;
if (!fileOrDir.isDirectory()) {
return processor.process(fileOrDir);
}
return true;
});
}
public boolean isValid() {
if (myModules != null){
for (Module module : myModules) {
if (module.isDisposed()) return false;
}
return true;
}
if (myModule != null) return !myModule.isDisposed();
if (myElement != null) {
return myElement.isValid();
}
return myType == VIRTUAL_FILES || myType == CUSTOM || myType == PROJECT;
}
@Type
public int getScopeType() {
return myType;
}
@NotNull
public String getDisplayName() {
switch (myType) {
case CUSTOM:
return myScope.getDisplayName();
case MODULE:
return AnalysisScopeBundle.message("scope.option.module", pathToName(myModule.getModuleFilePath()));
case MODULES:
String modules = StringUtil.join(myModules, module -> pathToName(module.getModuleFilePath()), ", ");
return AnalysisScopeBundle.message("scope.module.list", modules, myModules.size());
case PROJECT:
return AnalysisScopeBundle.message("scope.project", myProject.getName());
case FILE:
return AnalysisScopeBundle.message("scope.file", displayProjectRelativePath((PsiFileSystemItem)myElement));
case DIRECTORY:
return AnalysisScopeBundle.message("scope.directory", displayProjectRelativePath((PsiFileSystemItem)myElement));
case VIRTUAL_FILES:
return AnalysisScopeBundle.message("scope.virtual.files");
}
return "";
}
@NotNull
public String getShortenName(){
switch (myType) {
case CUSTOM:
return myScope.getDisplayName();
case MODULE:
return AnalysisScopeBundle.message("scope.option.module", myModule.getName());
case MODULES:
String modules = StringUtil.join(myModules, Module::getName, ", ");
return AnalysisScopeBundle.message("scope.module.list", modules, myModules.size());
case PROJECT:
return AnalysisScopeBundle.message("scope.project", myProject.getName());
case FILE:
final String relativePath = getRelativePath();
return AnalysisScopeBundle.message("scope.file", relativePath);
case DIRECTORY:
final String relativeDirPath = getRelativePath();
return AnalysisScopeBundle.message("scope.directory", relativeDirPath);
case VIRTUAL_FILES:
return AnalysisScopeBundle.message("scope.selected.files");
}
return "";
}
@Nullable
private String getRelativePath() {
final String relativePath = displayProjectRelativePath((PsiFileSystemItem)myElement);
if (relativePath.length() > 100) {
return ((PsiFileSystemItem)myElement).getName();
}
return relativePath;
}
@NotNull
private static String pathToName(@NotNull String path) {
File file = new File(path);
return FileUtil.getNameWithoutExtension(file);
}
public int getFileCount() {
if (myFilesSet == null) initFilesSet();
final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
if (indicator != null) { //clear text after building analysis scope set
indicator.setText("");
indicator.setText2("");
}
return myFilesSet.size();
}
public void invalidate(){
if (myType == VIRTUAL_FILES) {
myVFiles.removeIf(virtualFile -> virtualFile == null || !virtualFile.isValid());
}
else {
myFilesSet = null;
}
}
public boolean containsSources(boolean isTest) {
if (myElement != null) {
final Project project = myElement.getProject();
final ProjectFileIndex index = ProjectRootManager.getInstance(project).getFileIndex();
if (myElement instanceof PsiDirectory) {
final VirtualFile directory = ((PsiFileSystemItem)myElement).getVirtualFile();
if (index.isInSourceContent(directory)) {
return isTest == TestSourcesFilter.isTestSources(directory, myProject);
}
} else if (myElement instanceof PsiFile) {
final VirtualFile file = ((PsiFileSystemItem)myElement).getVirtualFile();
if (file != null) {
return isTest == TestSourcesFilter.isTestSources(file, myProject);
}
}
}
return true;
}
@NotNull
public AnalysisScope getNarrowedComplementaryScope(@NotNull Project defaultProject) {
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(defaultProject).getFileIndex();
final HashSet<Module> modules = new HashSet<>();
if (myType == FILE || myType == DIRECTORY) {
final VirtualFile vFile = ((PsiFileSystemItem)myElement).getVirtualFile();
modules.addAll(getAllInterestingModules(fileIndex, vFile));
}
else if (myType == MODULE) {
modules.add(myModule);
}
else if (myType == MODULES) {
modules.addAll(myModules);
}
return collectScopes(defaultProject, modules);
}
@NotNull
static AnalysisScope collectScopes(@NotNull final Project defaultProject, @NotNull final HashSet<Module> modules) {
if (modules.isEmpty()) {
return new AnalysisScope(defaultProject);
}
final Module[] allModules = ModuleManager.getInstance(defaultProject).getModules();
Set<Module> modulesToAnalyze = new HashSet<>();
for (final Module module : modules) {
modulesToAnalyze.addAll(getDirectBackwardDependencies(module, allModules));
modulesToAnalyze.addAll(getExportBackwardDependencies(module, allModules));
modulesToAnalyze.add(module);
}
return new AnalysisScope(modulesToAnalyze.toArray(new Module[modulesToAnalyze.size()]));
}
@NotNull
private static Set<Module> getExportBackwardDependencies(@NotNull Module fromModule, @NotNull Module[] allModules) {
Set<Module> result = new HashSet<>();
for (Module module : allModules) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
final OrderEntry[] orderEntries = moduleRootManager.getOrderEntries();
for (OrderEntry orderEntry : orderEntries) {
if (orderEntry instanceof ModuleOrderEntry && ((ExportableOrderEntry)orderEntry).isExported() &&
fromModule == ((ModuleOrderEntry)orderEntry).getModule()) {
result.addAll(getDirectBackwardDependencies(module, allModules));
}
}
}
return result;
}
@NotNull
private static Set<Module> getDirectBackwardDependencies(@NotNull Module module, @NotNull Module[] allModules) {
Set<Module> result = new HashSet<>();
for (Module dependency : allModules) {
if (ArrayUtil.find(ModuleRootManager.getInstance(dependency).getDependencies(), module) > -1) {
result.add(dependency);
}
}
return result;
}
@NotNull
static HashSet<Module> getAllInterestingModules(@NotNull final ProjectFileIndex fileIndex, @NotNull final VirtualFile vFile) {
final HashSet<Module> modules = new HashSet<>();
if (fileIndex.isInLibrarySource(vFile) || fileIndex.isInLibraryClasses(vFile)) {
for (OrderEntry orderEntry : fileIndex.getOrderEntriesForFile(vFile)) {
modules.add(orderEntry.getOwnerModule());
}
}
else {
modules.add(fileIndex.getModuleForFile(vFile));
}
return modules;
}
@NotNull
public SearchScope toSearchScope() {
ApplicationManager.getApplication().assertReadAccessAllowed();
switch (myType) {
case CUSTOM:
return myScope;
case DIRECTORY:
return GlobalSearchScopesCore.directoryScope((PsiDirectory)myElement, true);
case FILE:
return new LocalSearchScope(myElement);
case INVALID:
return LocalSearchScope.EMPTY;
case MODULE:
GlobalSearchScope moduleScope = GlobalSearchScope.moduleScope(myModule);
return myIncludeTestSource ? moduleScope : GlobalSearchScope.notScope(GlobalSearchScopesCore.projectTestScope(myModule.getProject())).intersectWith(moduleScope);
case MODULES:
SearchScope scope = GlobalSearchScope.EMPTY_SCOPE;
for (Module module : myModules) {
scope = scope.union(GlobalSearchScope.moduleScope(module));
}
return scope;
case PROJECT:
return myIncludeTestSource ? GlobalSearchScope.projectScope(myProject) : GlobalSearchScopesCore.projectProductionScope(myProject);
case VIRTUAL_FILES:
return new GlobalSearchScope() {
@Override
public boolean contains(@NotNull VirtualFile file) {
return myFilesSet.contains(file);
}
@Override
public int compare(@NotNull VirtualFile file1, @NotNull VirtualFile file2) {
return 0;
}
@Override
public boolean isSearchInModuleContent(@NotNull Module aModule) {
return false;
}
@Override
public boolean isSearchInLibraries() {
return false;
}
};
default:
LOG.error("invalid type " + myType);
return GlobalSearchScope.EMPTY_SCOPE;
}
}
boolean isAnalyzeTestsByDefault() {
switch (myType) {
case DIRECTORY:
return TestSourcesFilter.isTestSources(((PsiDirectory)myElement).getVirtualFile(), myElement.getProject());
case FILE:
final PsiFile containingFile = myElement.getContainingFile();
return TestSourcesFilter.isTestSources(containingFile.getVirtualFile(), containingFile.getProject());
case MODULE:
return isTestOnly(myModule);
case MODULES:
for (Module module : myModules) {
if (!isTestOnly(module)) return false;
}
return true;
}
return false;
}
private static boolean isTestOnly(@NotNull Module module) {
return ModuleRootManager.getInstance(module).getSourceRootUrls(false).length == 0;
}
public boolean isIncludeTestSource() {
return myIncludeTestSource;
}
public void setFilter(GlobalSearchScope filter) {
myFilter = filter;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.IllegalConfigurationException;
import org.apache.flink.runtime.concurrent.FutureUtils;
import org.apache.flink.runtime.io.network.netty.SSLHandlerFactory;
import org.apache.flink.runtime.net.RedirectingSslHandler;
import org.apache.flink.runtime.rest.handler.PipelineErrorHandler;
import org.apache.flink.runtime.rest.handler.RestHandlerSpecification;
import org.apache.flink.runtime.rest.handler.router.Router;
import org.apache.flink.runtime.rest.handler.router.RouterHandler;
import org.apache.flink.runtime.rest.versioning.RestAPIVersion;
import org.apache.flink.runtime.util.ExecutorThreadFactory;
import org.apache.flink.util.AutoCloseableAsync;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.util.NetUtils;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.netty4.io.netty.bootstrap.ServerBootstrap;
import org.apache.flink.shaded.netty4.io.netty.bootstrap.ServerBootstrapConfig;
import org.apache.flink.shaded.netty4.io.netty.channel.Channel;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFuture;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelInboundHandler;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelInitializer;
import org.apache.flink.shaded.netty4.io.netty.channel.EventLoopGroup;
import org.apache.flink.shaded.netty4.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.flink.shaded.netty4.io.netty.channel.socket.SocketChannel;
import org.apache.flink.shaded.netty4.io.netty.channel.socket.nio.NioServerSocketChannel;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpServerCodec;
import org.apache.flink.shaded.netty4.io.netty.handler.stream.ChunkedWriteHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.Serializable;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/** An abstract class for netty-based REST server endpoints. */
public abstract class RestServerEndpoint implements AutoCloseableAsync {
protected final Logger log = LoggerFactory.getLogger(getClass());
private final Object lock = new Object();
private final String restAddress;
private final String restBindAddress;
private final String restBindPortRange;
@Nullable private final SSLHandlerFactory sslHandlerFactory;
private final int maxContentLength;
protected final Path uploadDir;
protected final Map<String, String> responseHeaders;
private final CompletableFuture<Void> terminationFuture;
private List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>> handlers;
private ServerBootstrap bootstrap;
private Channel serverChannel;
private String restBaseUrl;
private State state = State.CREATED;
public RestServerEndpoint(RestServerEndpointConfiguration configuration) throws IOException {
Preconditions.checkNotNull(configuration);
this.restAddress = configuration.getRestAddress();
this.restBindAddress = configuration.getRestBindAddress();
this.restBindPortRange = configuration.getRestBindPortRange();
this.sslHandlerFactory = configuration.getSslHandlerFactory();
this.uploadDir = configuration.getUploadDir();
createUploadDir(uploadDir, log, true);
this.maxContentLength = configuration.getMaxContentLength();
this.responseHeaders = configuration.getResponseHeaders();
terminationFuture = new CompletableFuture<>();
}
/**
* This method is called at the beginning of {@link #start()} to setup all handlers that the
* REST server endpoint implementation requires.
*
* @param localAddressFuture future rest address of the RestServerEndpoint
* @return Collection of AbstractRestHandler which are added to the server endpoint
*/
protected abstract List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>>
initializeHandlers(final CompletableFuture<String> localAddressFuture);
/**
* Starts this REST server endpoint.
*
* @throws Exception if we cannot start the RestServerEndpoint
*/
public final void start() throws Exception {
synchronized (lock) {
Preconditions.checkState(
state == State.CREATED, "The RestServerEndpoint cannot be restarted.");
log.info("Starting rest endpoint.");
final Router router = new Router();
final CompletableFuture<String> restAddressFuture = new CompletableFuture<>();
handlers = initializeHandlers(restAddressFuture);
/* sort the handlers such that they are ordered the following:
* /jobs
* /jobs/overview
* /jobs/:jobid
* /jobs/:jobid/config
* /:*
*/
Collections.sort(handlers, RestHandlerUrlComparator.INSTANCE);
checkAllEndpointsAndHandlersAreUnique(handlers);
handlers.forEach(handler -> registerHandler(router, handler, log));
ChannelInitializer<SocketChannel> initializer =
new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) {
RouterHandler handler = new RouterHandler(router, responseHeaders);
// SSL should be the first handler in the pipeline
if (isHttpsEnabled()) {
ch.pipeline()
.addLast(
"ssl",
new RedirectingSslHandler(
restAddress,
restAddressFuture,
sslHandlerFactory));
}
ch.pipeline()
.addLast(new HttpServerCodec())
.addLast(new FileUploadHandler(uploadDir))
.addLast(
new FlinkHttpObjectAggregator(
maxContentLength, responseHeaders))
.addLast(new ChunkedWriteHandler())
.addLast(handler.getName(), handler)
.addLast(new PipelineErrorHandler(log, responseHeaders));
}
};
NioEventLoopGroup bossGroup =
new NioEventLoopGroup(
1, new ExecutorThreadFactory("flink-rest-server-netty-boss"));
NioEventLoopGroup workerGroup =
new NioEventLoopGroup(
0, new ExecutorThreadFactory("flink-rest-server-netty-worker"));
bootstrap = new ServerBootstrap();
bootstrap
.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(initializer);
Iterator<Integer> portsIterator;
try {
portsIterator = NetUtils.getPortRangeFromString(restBindPortRange);
} catch (IllegalConfigurationException e) {
throw e;
} catch (Exception e) {
throw new IllegalArgumentException(
"Invalid port range definition: " + restBindPortRange);
}
int chosenPort = 0;
while (portsIterator.hasNext()) {
try {
chosenPort = portsIterator.next();
final ChannelFuture channel;
if (restBindAddress == null) {
channel = bootstrap.bind(chosenPort);
} else {
channel = bootstrap.bind(restBindAddress, chosenPort);
}
serverChannel = channel.syncUninterruptibly().channel();
break;
} catch (final Exception e) {
// continue if the exception is due to the port being in use, fail early
// otherwise
if (!(e instanceof org.jboss.netty.channel.ChannelException
|| e instanceof java.net.BindException)) {
throw e;
}
}
}
if (serverChannel == null) {
throw new BindException(
"Could not start rest endpoint on any port in port range "
+ restBindPortRange);
}
log.debug("Binding rest endpoint to {}:{}.", restBindAddress, chosenPort);
final InetSocketAddress bindAddress = (InetSocketAddress) serverChannel.localAddress();
final String advertisedAddress;
if (bindAddress.getAddress().isAnyLocalAddress()) {
advertisedAddress = this.restAddress;
} else {
advertisedAddress = bindAddress.getAddress().getHostAddress();
}
final int port = bindAddress.getPort();
log.info("Rest endpoint listening at {}:{}", advertisedAddress, port);
restBaseUrl = new URL(determineProtocol(), advertisedAddress, port, "").toString();
restAddressFuture.complete(restBaseUrl);
state = State.RUNNING;
startInternal();
}
}
/**
* Hook to start sub class specific services.
*
* @throws Exception if an error occurred
*/
protected abstract void startInternal() throws Exception;
/**
* Returns the address on which this endpoint is accepting requests.
*
* @return address on which this endpoint is accepting requests or null if none
*/
@Nullable
public InetSocketAddress getServerAddress() {
synchronized (lock) {
Preconditions.checkState(
state != State.CREATED, "The RestServerEndpoint has not been started yet.");
Channel server = this.serverChannel;
if (server != null) {
try {
return ((InetSocketAddress) server.localAddress());
} catch (Exception e) {
log.error("Cannot access local server address", e);
}
}
return null;
}
}
/**
* Returns the base URL of the REST server endpoint.
*
* @return REST base URL of this endpoint
*/
public String getRestBaseUrl() {
synchronized (lock) {
Preconditions.checkState(
state != State.CREATED, "The RestServerEndpoint has not been started yet.");
return restBaseUrl;
}
}
@Override
public CompletableFuture<Void> closeAsync() {
synchronized (lock) {
log.info("Shutting down rest endpoint.");
if (state == State.RUNNING) {
final CompletableFuture<Void> shutDownFuture =
FutureUtils.composeAfterwards(closeHandlersAsync(), this::shutDownInternal);
shutDownFuture.whenComplete(
(Void ignored, Throwable throwable) -> {
log.info("Shut down complete.");
if (throwable != null) {
terminationFuture.completeExceptionally(throwable);
} else {
terminationFuture.complete(null);
}
});
state = State.SHUTDOWN;
} else if (state == State.CREATED) {
terminationFuture.complete(null);
state = State.SHUTDOWN;
}
return terminationFuture;
}
}
private FutureUtils.ConjunctFuture<Void> closeHandlersAsync() {
return FutureUtils.waitForAll(
handlers.stream()
.map(tuple -> tuple.f1)
.filter(handler -> handler instanceof AutoCloseableAsync)
.map(handler -> ((AutoCloseableAsync) handler).closeAsync())
.collect(Collectors.toList()));
}
/**
* Stops this REST server endpoint.
*
* @return Future which is completed once the shut down has been finished.
*/
protected CompletableFuture<Void> shutDownInternal() {
synchronized (lock) {
CompletableFuture<?> channelFuture = new CompletableFuture<>();
if (serverChannel != null) {
serverChannel
.close()
.addListener(
finished -> {
if (finished.isSuccess()) {
channelFuture.complete(null);
} else {
channelFuture.completeExceptionally(finished.cause());
}
});
serverChannel = null;
}
final CompletableFuture<Void> channelTerminationFuture = new CompletableFuture<>();
channelFuture.thenRun(
() -> {
CompletableFuture<?> groupFuture = new CompletableFuture<>();
CompletableFuture<?> childGroupFuture = new CompletableFuture<>();
final Time gracePeriod = Time.seconds(10L);
if (bootstrap != null) {
final ServerBootstrapConfig config = bootstrap.config();
final EventLoopGroup group = config.group();
if (group != null) {
group.shutdownGracefully(
0L,
gracePeriod.toMilliseconds(),
TimeUnit.MILLISECONDS)
.addListener(
finished -> {
if (finished.isSuccess()) {
groupFuture.complete(null);
} else {
groupFuture.completeExceptionally(
finished.cause());
}
});
} else {
groupFuture.complete(null);
}
final EventLoopGroup childGroup = config.childGroup();
if (childGroup != null) {
childGroup
.shutdownGracefully(
0L,
gracePeriod.toMilliseconds(),
TimeUnit.MILLISECONDS)
.addListener(
finished -> {
if (finished.isSuccess()) {
childGroupFuture.complete(null);
} else {
childGroupFuture.completeExceptionally(
finished.cause());
}
});
} else {
childGroupFuture.complete(null);
}
bootstrap = null;
} else {
// complete the group futures since there is nothing to stop
groupFuture.complete(null);
childGroupFuture.complete(null);
}
CompletableFuture<Void> combinedFuture =
FutureUtils.completeAll(
Arrays.asList(groupFuture, childGroupFuture));
combinedFuture.whenComplete(
(Void ignored, Throwable throwable) -> {
if (throwable != null) {
channelTerminationFuture.completeExceptionally(throwable);
} else {
channelTerminationFuture.complete(null);
}
});
});
return channelTerminationFuture;
}
}
private boolean isHttpsEnabled() {
return sslHandlerFactory != null;
}
private String determineProtocol() {
return isHttpsEnabled() ? "https" : "http";
}
private static void registerHandler(
Router router,
Tuple2<RestHandlerSpecification, ChannelInboundHandler> specificationHandler,
Logger log) {
final String handlerURL = specificationHandler.f0.getTargetRestEndpointURL();
// setup versioned urls
for (final RestAPIVersion supportedVersion :
specificationHandler.f0.getSupportedAPIVersions()) {
final String versionedHandlerURL =
'/' + supportedVersion.getURLVersionPrefix() + handlerURL;
log.debug(
"Register handler {} under {}@{}.",
specificationHandler.f1,
specificationHandler.f0.getHttpMethod(),
versionedHandlerURL);
registerHandler(
router,
versionedHandlerURL,
specificationHandler.f0.getHttpMethod(),
specificationHandler.f1);
if (supportedVersion.isDefaultVersion()) {
// setup unversioned url for convenience and backwards compatibility
log.debug(
"Register handler {} under {}@{}.",
specificationHandler.f1,
specificationHandler.f0.getHttpMethod(),
handlerURL);
registerHandler(
router,
handlerURL,
specificationHandler.f0.getHttpMethod(),
specificationHandler.f1);
}
}
}
private static void registerHandler(
Router router,
String handlerURL,
HttpMethodWrapper httpMethod,
ChannelInboundHandler handler) {
switch (httpMethod) {
case GET:
router.addGet(handlerURL, handler);
break;
case POST:
router.addPost(handlerURL, handler);
break;
case DELETE:
router.addDelete(handlerURL, handler);
break;
case PATCH:
router.addPatch(handlerURL, handler);
break;
default:
throw new RuntimeException("Unsupported http method: " + httpMethod + '.');
}
}
/** Creates the upload dir if needed. */
@VisibleForTesting
static void createUploadDir(
final Path uploadDir, final Logger log, final boolean initialCreation)
throws IOException {
if (!Files.exists(uploadDir)) {
if (initialCreation) {
log.info("Upload directory {} does not exist. ", uploadDir);
} else {
log.warn(
"Upload directory {} has been deleted externally. "
+ "Previously uploaded files are no longer available.",
uploadDir);
}
checkAndCreateUploadDir(uploadDir, log);
}
}
/**
* Checks whether the given directory exists and is writable. If it doesn't exist, this method
* will attempt to create it.
*
* @param uploadDir directory to check
* @param log logger used for logging output
* @throws IOException if the directory does not exist and cannot be created, or if the
* directory isn't writable
*/
private static synchronized void checkAndCreateUploadDir(final Path uploadDir, final Logger log)
throws IOException {
if (Files.exists(uploadDir) && Files.isWritable(uploadDir)) {
log.info("Using directory {} for file uploads.", uploadDir);
} else if (Files.isWritable(Files.createDirectories(uploadDir))) {
log.info("Created directory {} for file uploads.", uploadDir);
} else {
log.warn("Upload directory {} cannot be created or is not writable.", uploadDir);
throw new IOException(
String.format(
"Upload directory %s cannot be created or is not writable.",
uploadDir));
}
}
private static void checkAllEndpointsAndHandlersAreUnique(
final List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>> handlers) {
// check for all handlers that
// 1) the instance is only registered once
// 2) only 1 handler is registered for each endpoint (defined by (version, method, url))
// technically the first check is redundant since a duplicate instance also returns the same
// headers which
// should fail the second check, but we get a better error message
final Set<String> uniqueEndpoints = new HashSet<>();
final Set<ChannelInboundHandler> distinctHandlers =
Collections.newSetFromMap(new IdentityHashMap<>());
for (Tuple2<RestHandlerSpecification, ChannelInboundHandler> handler : handlers) {
boolean isNewHandler = distinctHandlers.add(handler.f1);
if (!isNewHandler) {
throw new FlinkRuntimeException(
"Duplicate REST handler instance found."
+ " Please ensure each instance is registered only once.");
}
final RestHandlerSpecification headers = handler.f0;
for (RestAPIVersion supportedAPIVersion : headers.getSupportedAPIVersions()) {
final String parameterizedEndpoint =
supportedAPIVersion.toString()
+ headers.getHttpMethod()
+ headers.getTargetRestEndpointURL();
// normalize path parameters; distinct path parameters still clash at runtime
final String normalizedEndpoint =
parameterizedEndpoint.replaceAll(":[\\w-]+", ":param");
boolean isNewEndpoint = uniqueEndpoints.add(normalizedEndpoint);
if (!isNewEndpoint) {
throw new FlinkRuntimeException(
String.format(
"REST handler registration overlaps with another registration for: version=%s, method=%s, url=%s.",
supportedAPIVersion,
headers.getHttpMethod(),
headers.getTargetRestEndpointURL()));
}
}
}
}
/**
* Comparator for Rest URLs.
*
* <p>The comparator orders the Rest URLs such that URLs with path parameters are ordered behind
* those without parameters. E.g.: /jobs /jobs/overview /jobs/:jobid /jobs/:jobid/config /:*
*
* <p>IMPORTANT: This comparator is highly specific to how Netty path parameters are encoded.
* Namely with a preceding ':' character.
*/
public static final class RestHandlerUrlComparator
implements Comparator<Tuple2<RestHandlerSpecification, ChannelInboundHandler>>,
Serializable {
private static final long serialVersionUID = 2388466767835547926L;
private static final Comparator<String> CASE_INSENSITIVE_ORDER =
new CaseInsensitiveOrderComparator();
private static final Comparator<RestAPIVersion> API_VERSION_ORDER =
new RestAPIVersion.RestAPIVersionComparator();
static final RestHandlerUrlComparator INSTANCE = new RestHandlerUrlComparator();
@Override
public int compare(
Tuple2<RestHandlerSpecification, ChannelInboundHandler> o1,
Tuple2<RestHandlerSpecification, ChannelInboundHandler> o2) {
final int urlComparisonResult =
CASE_INSENSITIVE_ORDER.compare(
o1.f0.getTargetRestEndpointURL(), o2.f0.getTargetRestEndpointURL());
if (urlComparisonResult != 0) {
return urlComparisonResult;
} else {
return API_VERSION_ORDER.compare(
Collections.min(o1.f0.getSupportedAPIVersions()),
Collections.min(o2.f0.getSupportedAPIVersions()));
}
}
/**
* Comparator for Rest URLs.
*
* <p>The comparator orders the Rest URLs such that URLs with path parameters are ordered
* behind those without parameters. E.g.: /jobs /jobs/overview /jobs/:jobid
* /jobs/:jobid/config /:*
*
* <p>IMPORTANT: This comparator is highly specific to how Netty path parameters are
* encoded. Namely with a preceding ':' character.
*/
public static final class CaseInsensitiveOrderComparator
implements Comparator<String>, Serializable {
private static final long serialVersionUID = 8550835445193437027L;
@Override
public int compare(String s1, String s2) {
int n1 = s1.length();
int n2 = s2.length();
int min = Math.min(n1, n2);
for (int i = 0; i < min; i++) {
char c1 = s1.charAt(i);
char c2 = s2.charAt(i);
if (c1 != c2) {
c1 = Character.toUpperCase(c1);
c2 = Character.toUpperCase(c2);
if (c1 != c2) {
c1 = Character.toLowerCase(c1);
c2 = Character.toLowerCase(c2);
if (c1 != c2) {
if (c1 == ':') {
// c2 is less than c1 because it is also different
return 1;
} else if (c2 == ':') {
// c1 is less than c2
return -1;
} else {
return c1 - c2;
}
}
}
}
}
return n1 - n2;
}
}
}
private enum State {
CREATED,
RUNNING,
SHUTDOWN
}
}
| |
package com.zenplanner.sql;
import com.google.common.base.Joiner;
import java.sql.*;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
public class Table extends TreeMap<String, Column> {
private final String name;
private List<Column> pk;
private static final int maxKeys = 2000; // jtds driver limit
public Table(String name) {
this.name = name;
}
public String getName() {
return name;
}
public int hashCode() {
return name.hashCode();
}
public boolean equals(Object obj) {
if (!(obj instanceof Table)) {
return false;
}
return name.equalsIgnoreCase(((Table) obj).getName());
}
/**
* @return A list of the columns that constitute the primary key
*/
public List<Column> getPk() {
if(pk != null) {
return pk;
}
synchronized (this) {
List<Column> pk = new ArrayList<>();
for(Column col : values()) {
if(col.isPrimaryKey()) {
pk.add(col);
}
}
this.pk = pk;
return pk;
}
}
public boolean hasColumn(String name) {
return containsKey(name);
}
/**
* @return the insert SQL for this table
*/
public String writeInsertQuery() {
List<String> colNames = new ArrayList<>();
List<String> valueNames = new ArrayList<>();
for(Column col : values()) {
String colName = col.getColumnName();
colNames.add("\n\t[" + colName + "]");
valueNames.add("?");
}
String nameClause = Joiner.on(", ").join(colNames);
String valueClause = Joiner.on(", ").join(valueNames);
String sql = String.format("INSERT INTO [%s] (%s\n) VALUES (%s)", getName(), nameClause, valueClause);
return sql;
}
public String writeUpdateQuery() {
List<String> updateCols = new ArrayList<>();
List<Column> pk = getPk();
for(Column col : values()) {
if(pk.contains(col)) {
continue; // TODO: Cache non-update columns for speed
}
String colName = col.getColumnName();
updateCols.add(String.format("\t[%s]=?", colName));
}
List<String> whereCols = new ArrayList<>();
for(Column col : pk) {
String colName = col.getColumnName();
whereCols.add(String.format("[%s]=?", colName));
}
String updateClause = Joiner.on(",\n").join(updateCols);
String whereClause = Joiner.on("\n\tand ").join(whereCols);
String sql = String.format("UPDATE [%s] SET\n%s\nWHERE %s", getName(), updateClause, whereClause);
return sql;
}
/**
* @return A magical query that returns the primary key and a hash of the row
*/
public String writeHashedQuery(Map<String,List<Object>> filters) {
List<String> colNames = new ArrayList<>();
List<String> pk = new ArrayList<>();
for(Column col : values()) {
colNames.add(col.getSelect());
}
for (Column col : getPk()) {
pk.add(String.format("[%s]", col.getColumnName()));
}
String hashNames = Joiner.on("+\n\t\t").join(colNames);
String orderClause = Joiner.on(",").join(pk);
String selectClause = orderClause + ",\n\tHASHBYTES('md5',\n\t\t" + hashNames + "\n\t) AS [Hash]";
String sql = String.format("SELECT\n\t%s\nFROM [%s]\n", selectClause, getName());
// Filter
sql = buildWhereClause(filters, sql);
sql += String.format("\nORDER BY %s", orderClause);
return sql;
}
private String buildWhereClause(Map<String, List<Object>> filters, String sql) {
if(hasAllColumns(filters.keySet())) {
StringBuilder sb = new StringBuilder();
for(String key : filters.keySet()) {
if(sb.length() > 0) {
sb.append("\n\t AND ");
}
List<Object> vals = filters.get(key);
List<String> terms = new ArrayList<>();
for(Object val : vals) {
terms.add("?");
}
sb.append("[" + key + "] IN (" + Joiner.on(",").join(terms) + ")");
}
sql += "WHERE " + sb.toString();
}
return sql;
}
public String writeCountQuery(Map<String,List<Object>> filters) {
String sql = String.format("SELECT\n\tCOUNT(*)\nFROM [%s]\n", getName());
sql = buildWhereClause(filters, sql);
return sql;
}
public boolean hasAllColumns(Set<String> colNames) {
Set<String> filterCols = new HashSet<>();
filterCols.addAll(keySet());
filterCols.retainAll(colNames);
return filterCols.size() == colNames.size();
}
/**
* Pulls an array of objects that represents the PK from a row
*
* @param rs A ResultSet to check
* @return A List representing the PK
* @throws Exception
*/
public Key getPk(ResultSet rs) throws Exception {
Key key = new Key();
if (rs.isClosed() || rs.isBeforeFirst() || rs.isAfterLast() || rs.getRow() == 0) {
return null;
}
for (Column col : values()) {
if (col.isPrimaryKey()) {
Comparable<?> val = col.getValue(rs);
key.add(val);
}
}
return key;
}
public void setIdentityInsert(Connection con, boolean enabled) {
try (Statement stmt = con.createStatement()) {
String state = enabled ? "ON" : "OFF";
stmt.executeUpdate(String.format("SET IDENTITY_INSERT [%s] %s;", getName(), state));
} catch (Exception ex) {
// TODO: Nicer solution for tables that don't have an identity
}
}
public PreparedStatement createSelectQuery(Connection con, Set<Key> keys, int count) {
StringBuilder sb = new StringBuilder();
for(Column col : values()) {
if(sb.length() > 0) {
sb.append(", ");
}
sb.append("[" + col.getColumnName() + "]");
}
String sql = "SELECT " + sb.toString();
return createQuery(sql, con, keys, count);
}
public PreparedStatement createDeleteQuery(Connection con, Set<Key> keys, int count) {
return createQuery("DELETE", con, keys, count);
}
// TODO: Break this monster out into separate methods for SQL and values
private PreparedStatement createQuery(String prefix, Connection con, Set<Key> keys, int count) {
List<Object> parms = new ArrayList<>();
List<Column> pk = getPk();
StringBuilder sb = new StringBuilder();
int rowIndex = 0;
for (Key key : new HashSet<>(keys)) {
keys.remove(key); // Remove as we go
if (sb.length() > 0) {
sb.append("\tOR ");
}
sb.append("(");
for (int pkIdx = 0; pkIdx < pk.size(); pkIdx++) {
if (pkIdx > 0) {
sb.append(" AND ");
}
Column col = pk.get(pkIdx);
sb.append("[");
sb.append(col.getColumnName());
sb.append("]=?");
// Grab the value of the parameter
Object val = key.get(pkIdx);
parms.add(val);
}
sb.append(")\n");
if (++rowIndex >= count) {
break;
}
}
String sql = String.format("%s\nFROM [%s]\nWHERE %s", prefix, getName(), sb.toString());
try {
PreparedStatement stmt = con.prepareStatement(sql);
for (int i = 0; i < parms.size(); i++) {
Object javaVal = parms.get(i);
Object sqlVal = javaToSql(javaVal);
stmt.setObject(i + 1, sqlVal);
}
return stmt;
} catch (Exception ex) {
throw new RuntimeException("Error creating select query!", ex);
}
}
public static Object javaToSql(Object val) {
if(val == null) {
return null;
}
if(val instanceof UUID) {
return UuidUtil.uuidToByteArray(((UUID)val));
}
if(val instanceof String) {
return val;
}
if(val instanceof Long) {
return val;
}
if(val instanceof java.sql.Date)
{
return val;
}
throw new RuntimeException("Unknown type: " + val.getClass().getName());
}
public void deleteRows(Connection dcon, Set<Key> keys, AtomicInteger currentMod) throws Exception {
List<Column> pk = getPk();
int rowLimit = (int) Math.floor(maxKeys / pk.size());
for (int rowIndex = 0; rowIndex < keys.size(); ) {
int count = Math.min(keys.size() - rowIndex, rowLimit);
System.out.println("Deleting " + count + " rows from " + getName());
try (PreparedStatement deleteStmt = createDeleteQuery(dcon, keys, count)) {
deleteStmt.execute();
}
rowIndex += count;
currentMod.addAndGet(count);
}
}
/**
* Queries the source database for row information on each row who's PK is in the keys array, and inserts those
* rows into the destination connection.
*
* @param scon The source connection
* @param dcon The destination connection
* @param keys The keys of the rows for which to query
* @throws Exception
*/
public void insertRows(Connection scon, Connection dcon, Set<Key> keys, AtomicInteger currentMod) throws Exception {
if (keys.size() <= 0) {
return;
}
int colCount = size();
String sql = writeInsertQuery();
//setIdentityInsert(dcon, true);
setIdentityInsert(dcon, false);
List<Column> pk = getPk();
int rowLimit = (int) Math.floor(maxKeys / pk.size());
int size = keys.size();
List<String> pkValuesList = new ArrayList<String>();
while (keys.size() > 0) {
int count = Math.min(keys.size(), rowLimit);
try (PreparedStatement selectStmt = createSelectQuery(scon, keys, count)) {
try (ResultSet rs = selectStmt.executeQuery()) {
ResultSetMetaData resultSetMetaData = rs.getMetaData();
try (PreparedStatement insertStmt = dcon.prepareStatement(sql)) {
long queryStart = System.currentTimeMillis();
pkValuesList.clear();
while (rs.next()) {
StringBuffer pkValues = new StringBuffer();
for (int i = 1; i <= colCount; i++) {
for (Column pkColumn : pk) {
if ((resultSetMetaData.getColumnName(i)).equalsIgnoreCase(pkColumn.getColumnName())) {
if (pkValues.length() > 0) {
pkValues.append(", ");
}
pkValues.append(rs.getString(i));
}
}
}
pkValuesList.add(pkValues.toString());
insertStmt.clearParameters();
for(int i = 1; i <= colCount; i++) {
insertStmt.setObject(i, rs.getObject(i));
}
currentMod.incrementAndGet();
insertStmt.addBatch();
}
long batchStart = System.currentTimeMillis();
System.out.println("Read " + count + " rows from " + getName() + " in " + (batchStart - queryStart) + "ms");
try {
insertStmt.executeBatch();
} catch (BatchUpdateException e) {
System.err.println("Batch Update Failed: " + e.getMessage());
int[] updateCounts = e.getUpdateCounts();
for (int i = 0; i < updateCounts.length; i++) {
String pkValuesForIndex = (pkValuesList != null ? pkValuesList.get(i) : "UNKNOWN");
if (updateCounts[i] >= 0) {
System.err.println(i + ": INSERT succeeded, rows = " + updateCounts[i] + " (" + pkValuesForIndex + ")");
} else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
System.err.println(i + ": INSERT succeeded, rows unknown (" + pkValuesForIndex + ")");
} else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
System.err.println(i + ": INSERT failed (" + pkValuesForIndex + ")");
}
}
} catch (Exception ex) {
throw new RuntimeException("Error inserting rows: " + sql, ex);
}
long end = System.currentTimeMillis();
System.out.println("Wrote " + count + " rows to " + getName() + " in " + (end - batchStart) + "ms");
}
}
} catch (Exception ex) {
throw new RuntimeException("Error inserting rows: " + sql, ex);
}
}
System.out.println("Batch inserted " + size + " rows into " + getName());
}
public void updateRows(Connection scon, Connection dcon, Set<Key> keys, AtomicInteger currentMod) throws Exception {
if (keys.size() <= 0) {
return;
}
List<Column> pk = getPk();
int rowLimit = (int) Math.floor(maxKeys / pk.size());
int colCount = size();
List<String> pkValuesList = new ArrayList<String>();
for (int rowIndex = 0; rowIndex < keys.size(); ) {
int count = Math.min(keys.size() - rowIndex, rowLimit);
System.out.println("Updating " + count + " rows in " + getName());
try (PreparedStatement selectStmt = createSelectQuery(scon, keys, count)) {
try (ResultSet rs = selectStmt.executeQuery()) {
ResultSetMetaData resultSetMetaData = rs.getMetaData();
String sql = writeUpdateQuery();
try (PreparedStatement updateStmt = dcon.prepareStatement(sql)) {
pkValuesList.clear();
while (rs.next()) {
StringBuffer pkValues = new StringBuffer();
for (int i = 1; i <= colCount; i++) {
for (Column pkColumn : pk) {
if ((resultSetMetaData.getColumnName(i)).equalsIgnoreCase(pkColumn.getColumnName())) {
if (pkValues.length() > 0) {
pkValues.append(", ");
}
pkValues.append(rs.getString(i));
}
}
}
pkValuesList.add(pkValues.toString());
updateRow(updateStmt, rs);
currentMod.incrementAndGet();
}
try {
updateStmt.executeBatch();
} catch (BatchUpdateException e) {
System.err.println("Batch Update Failed: " + e.getMessage());
int[] updateCounts = e.getUpdateCounts();
for (int i = 0; i < updateCounts.length; i++) {
String pkValuesForIndex = (pkValuesList != null ? pkValuesList.get(i) : "UNKNOWN");
if (updateCounts[i] >= 0) {
System.err.println(i + ": UPDATE succeeded, rows = " + updateCounts[i] + " (" + pkValuesForIndex + ")");
} else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
System.err.println(i + ": UPDATE succeeded, rows unknown (" + pkValuesForIndex + ")");
} else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
System.err.println(i + ": UPDATE failed (" + pkValuesForIndex + ")");
}
}
} catch (Exception ex) {
throw new RuntimeException("Error updating rows!", ex);
}
}
}
}
rowIndex += count;
}
}
private void updateRow(PreparedStatement stmt, ResultSet rs) throws Exception {
stmt.clearParameters();
int i = 0;
List<Column> pk = getPk();
for (Column col : values()) {
if(pk.contains(col)) {
continue; // TODO: Cache non-update columns for speed
}
String colName = col.getColumnName();
Object val = rs.getObject(colName);
stmt.setObject(++i, val);
}
for(Column col : pk) {
String colName = col.getColumnName();
Object val = rs.getObject(colName);
stmt.setObject(++i, val);
}
stmt.addBatch();
}
/**
* Gets the primary key from whichever row exists
*
* @param srs The source RecordSet
* @param drs The destination RecordSet
* @return The primary key of the row
* @throws Exception
*/
public Key getPk(ResultSet srs, ResultSet drs) throws Exception {
DbComparator.ChangeType change = detectChange(srs, drs);
if (change == DbComparator.ChangeType.DELETE) {
return getPk(drs);
}
return getPk(srs);
}
/**
* Basically this is the join logic. It compares the two rows presently under the cursors, and returns an action
* that needs to be taken based on whether the row is in left but not right, right but not left, or in both but
* changes are present. As usual for join code, this method assumes that the ResultSets are ordered, and the
* Key.compare() method exhibits the same ordering as the database engine.
*
* @param srs The source RecordSet
* @param drs The destination RecordSet
* @return A ChangeType indicating what action should be taken to sync the two databases
* @throws Exception
*/
public DbComparator.ChangeType detectChange(ResultSet srs, ResultSet drs) throws Exception {
// Verify we're on the same row
Key srcPk = getPk(srs);
Key dstPk = getPk(drs);
int eq = Key.compare(srcPk, dstPk);
/*
Left Right
ACD BDE
A B Left < right, insert A into right
C B Left > right, delete B from right
D D Left = right, update D in right
null E Left > right, delete E from right
*/
if (eq < 0) {
// Left < right, insert
return DbComparator.ChangeType.INSERT;
}
if (eq > 0) {
// Left > right, delete
return DbComparator.ChangeType.DELETE;
}
// Keys match, check hashes
byte[] shash = getHash(srs);
byte[] dhash = getHash(drs);
if (shash == null && dhash == null) {
throw new RuntimeException("Both rows are null!");
}
if (shash == null) {
return DbComparator.ChangeType.DELETE;
}
if (dhash == null) {
return DbComparator.ChangeType.INSERT;
}
if (Arrays.equals(shash, dhash)) {
return DbComparator.ChangeType.NONE;
}
return DbComparator.ChangeType.UPDATE;
}
/**
* Get the Hash from a ResultSet, or returns null if the ResultSet is exhausted
*
* @param rs The ResultSet
* @return The Hash, or null
*/
private static byte[] getHash(ResultSet rs) throws Exception {
if (rs == null || rs.isBeforeFirst() || rs.isAfterLast() || rs.getRow() == 0) {
return null;
}
return rs.getBytes("Hash");
}
}
| |
package mobi.acpm.inspeckage.util;
import android.content.SharedPreferences;
import android.util.Log;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import dalvik.system.DexFile;
import de.robv.android.xposed.callbacks.XC_LoadPackage;
import static android.text.TextUtils.isDigitsOnly;
/**
* Created by acpm on 19/09/16.
*/
public class DexUtil {
public static Map<String, ArrayList<String>> getClassesWithMethods(XC_LoadPackage.LoadPackageParam loadPackageParam, String packageName) throws Throwable {
Map<String, ArrayList<String>> classes = new HashMap<String, ArrayList<String>>();
if (!packageName.trim().equals("") && loadPackageParam.appInfo.sourceDir.contains(packageName)) {
DexFile dexFile = new DexFile(loadPackageParam.appInfo.sourceDir);
Enumeration<String> classNames = dexFile.entries();
while (classNames.hasMoreElements()) {
final String className = classNames.nextElement();
//if (!packageName.trim().equals("") && className.contains(packageName)) {
boolean subMethod = false;
if (className.contains("$")) {
String v = className.split("\\$")[1];
if (isDigitsOnly(v)) {
subMethod = true;
}
}
if (!subMethod && !className.contains(".R$")) {
try {
final Class cls = Class.forName(className, false, loadPackageParam.classLoader);
if (cls != null && cls.getDeclaredMethods().length > 0) {
ArrayList<String> methods = new ArrayList<>();
for (final Method method : cls.getDeclaredMethods()) {
if (!Modifier.isAbstract(method.getModifiers()) && !methods.contains(method.getName())) {
methods.add(method.getName());
}
}
classes.put(className, methods);
}
} catch (NoClassDefFoundError ex) {
Log.e("Error", ex.getMessage());
} catch (ClassNotFoundException ex) {
Log.e("Error", ex.getMessage());
}
}
//}
}
}
return classes;
}
public static void saveClassesWithMethodsJson(XC_LoadPackage.LoadPackageParam loadPackageParam, SharedPreferences prefs) throws Throwable {
String packageName = prefs.getString("package", "");
Map<String, ArrayList<String>> classes = DexUtil.getClassesWithMethods(loadPackageParam, packageName);
//RAIZ
ClassMethod root = new ClassMethod();
root.setID("p_" + packageName);
root.setName(packageName);
int c_id = 0;
for (String classNameComplete : classes.keySet()) {
if (classNameComplete.contains(packageName)) {
c_id++;
String pack_name = classNameComplete.substring(0, classNameComplete.lastIndexOf("."));
String class_name = classNameComplete.substring(classNameComplete.lastIndexOf(".") + 1);
//pacote
ClassMethod package_class = new ClassMethod();
package_class.setID(pack_name);
package_class.setName(pack_name);
if (!root.contains(package_class)) {
root.getClassMethods().add(package_class);
}
//classe
ClassMethod class_leaf = new ClassMethod();
class_leaf.setID(classNameComplete);
class_leaf.setName(class_name);
//adiciona metodos nas folhas(ultimas classes)
ArrayList<String> methods = classes.get(classNameComplete);
int m_id = 0;
for (String method : methods) {
m_id++;
ClassMethod m = new ClassMethod();
m.setID("m_" + c_id+"_"+m_id);//
m.setName(method);
m.setIcon("jstree-file");
if (!class_leaf.contains(m)) {
class_leaf.getClassMethods().add(m);
}
}
package_class.getClassMethods().add(class_leaf);
root.update(package_class);
} else {
/**String name = classNameComplete.substring(0,classNameComplete.lastIndexOf("."));
ClassMethod cx = new ClassMethod();
cx.setID(name);
cx.setName(name);
if(!array.contains(cx)){
//c.getClassMethods().add(cx);
}**/
}
}
Gson gson = new GsonBuilder().create();
JsonElement jsonElement = gson.toJsonTree(root);
JsonObject jsonObject = jsonElement.getAsJsonObject();
FileUtil.writeToFile(prefs, jsonObject.toString(), FileType.APP_STRUCT, "");
}
public static class ClassMethod {
private String id;
private String text;
private String icon;
private List<ClassMethod> children = new ArrayList<ClassMethod>();
public String getID() {
return id;
}
public void setID(String id) {
this.id = id;
}
public String getName() {
return text;
}
public void setName(String name) {
this.text = name;
}
public String getIcon() {
return icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
public List<ClassMethod> getClassMethods() {
return children;
}
public void setClassMethods(List<ClassMethod> children) {
this.children = children;
}
public boolean contains(ClassMethod cm) {
boolean x = false;
for (ClassMethod c : getClassMethods()) {
if (c.getID().equals(cm.getID())) {
x = true;
}
}
return x;
}
//se ja existir a classe entao pega os metodos da nova e atualiza a que ja existe
public boolean update(ClassMethod cm) {
boolean x = false;
for (ClassMethod c : getClassMethods()) {
if (c.getID().equals(cm.getID())) {
for (ClassMethod cm2 : cm.getClassMethods()) {
if (!c.contains(cm2)) {
c.getClassMethods().add(cm2);
}
}
x = true;
}
}
return x;
}
}
}
| |
package com.rey.material.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.ColorStateList;
import android.database.DataSetObserver;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.v4.view.GravityCompat;
import android.support.v7.internal.widget.TintManager;
import android.support.v7.internal.widget.TintTypedArray;
import android.support.v7.internal.widget.ViewUtils;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
import android.widget.AdapterView;
import android.widget.FrameLayout;
import android.widget.ListAdapter;
import android.widget.SpinnerAdapter;
import com.rey.material.R;
import com.rey.material.drawable.ArrowDrawable;
import com.rey.material.drawable.DividerDrawable;
import com.rey.material.drawable.RippleDrawable;
import com.rey.material.util.LocaleUtil;
import com.rey.material.util.ThemeUtil;
public class Spinner extends FrameLayout {
private static final int MAX_ITEMS_MEASURED = 15;
private static final int INVALID_POSITION = -1;
public interface OnItemClickListener {
boolean onItemClick(Spinner parent, View view, int position, long id);
}
public interface OnItemSelectedListener {
void onItemSelected(Spinner parent, View view, int position, long id);
}
private boolean mLabelEnable;
private LabelView mLabelView;
private Context mcontext;
private SpinnerAdapter mAdapter;
private OnItemClickListener mOnItemClickListener;
private OnItemSelectedListener mOnItemSelectedListener;
private int mMinWidth;
private int mMinHeight;
private DropdownPopup mPopup;
private int mDropDownWidth;
private ArrowDrawable mArrowDrawable;
private int mArrowSize;
private int mArrowPadding;
private boolean mArrowAnimSwitchMode;
private DividerDrawable mDividerDrawable;
private int mDividerHeight;
private int mDividerPadding;
private int mGravity;
private boolean mDisableChildrenWhenDisabled;
private int mSelectedPosition = INVALID_POSITION;
private RecycleBin mRecycler = new RecycleBin();
private Rect mTempRect = new Rect();
private DropDownAdapter mTempAdapter;
private SpinnerDataSetObserver mDataSetObserver = new SpinnerDataSetObserver();
private TintManager mTintManager;
private RippleManager mRippleManager = new RippleManager();
public Spinner(Context context) {
super(context);
init(context, null, R.attr.listPopupWindowStyle, 0);
}
public Spinner(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs, R.attr.listPopupWindowStyle, 0);
}
public Spinner(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr, 0);
}
public Spinner(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr, defStyleRes);
}
public void init(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
mcontext = context;
setWillNotDraw(false);
applyStyle(context, attrs, defStyleAttr, defStyleRes);
if (isInEditMode()) {
TextView tv = new TextView(context, attrs, defStyleAttr);
tv.setText("Item 1");
super.addView(tv);
}
setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showPopup();
}
});
}
public void applyStyle(int resId) {
applyStyle(getContext(), null, 0, resId);
}
private void applyStyle(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
CharSequence memoLabel = mLabelView == null ? null : mLabelView.getText();
removeAllViews();
mRippleManager.onCreate(this, context, attrs, defStyleAttr, defStyleRes);
TintTypedArray a = TintTypedArray.obtainStyledAttributes(context, attrs, R.styleable.Spinner, defStyleAttr, defStyleRes);
mLabelEnable = a.getBoolean(R.styleable.Spinner_spn_labelEnable, false);
if (mLabelEnable) {
mLabelView = new LabelView(context);
mLabelView.setGravity(GravityCompat.START);
mLabelView.setSingleLine(true);
int labelPadding = a.getDimensionPixelOffset(R.styleable.Spinner_spn_labelPadding, 0);
int labelTextSize = a.getDimensionPixelSize(R.styleable.Spinner_spn_labelTextSize, 0);
ColorStateList labelTextColor = a.getColorStateList(R.styleable.Spinner_spn_labelTextColor);
int labelTextAppearance = a.getResourceId(R.styleable.Spinner_spn_labelTextAppearance, 0);
int labelEllipsize = a.getInteger(R.styleable.Spinner_spn_labelEllipsize, 0);
CharSequence label = ThemeUtil.getString(a, R.styleable.Spinner_spn_label, memoLabel);
mLabelView.setText(label);
mLabelView.setPadding(0, 0, 0, labelPadding);
if (labelTextAppearance > 0)
mLabelView.setTextAppearance(context, labelTextAppearance);
if (labelTextSize > 0)
mLabelView.setTextSize(TypedValue.COMPLEX_UNIT_PX, labelTextSize);
if (labelTextColor != null)
mLabelView.setTextColor(labelTextColor);
switch (labelEllipsize) {
case 1:
mLabelView.setEllipsize(TextUtils.TruncateAt.START);
break;
case 2:
mLabelView.setEllipsize(TextUtils.TruncateAt.MIDDLE);
break;
case 3:
mLabelView.setEllipsize(TextUtils.TruncateAt.END);
break;
case 4:
mLabelView.setEllipsize(TextUtils.TruncateAt.MARQUEE);
break;
default:
mLabelView.setEllipsize(TextUtils.TruncateAt.END);
break;
}
addView(mLabelView, 0, new ViewGroup.LayoutParams(LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT));
}
mGravity = a.getInt(R.styleable.Spinner_android_gravity, Gravity.CENTER);
setMinimumWidth(a.getDimensionPixelOffset(R.styleable.Spinner_android_minWidth, 0));
setMinimumHeight(a.getDimensionPixelOffset(R.styleable.Spinner_android_minHeight, 0));
mPopup = new DropdownPopup(context, attrs, defStyleAttr, defStyleRes);
mPopup.setModal(true);
mDropDownWidth = a.getLayoutDimension(R.styleable.Spinner_android_dropDownWidth, LayoutParams.WRAP_CONTENT);
mPopup.setBackgroundDrawable(a.getDrawable(R.styleable.Spinner_android_popupBackground));
mPopup.setPromptText(a.getString(R.styleable.Spinner_prompt));
mPopup.setItemAnimation(a.getResourceId(R.styleable.Spinner_spn_popupItemAnimation, 0));
mPopup.setItemAnimationOffset(a.getInteger(R.styleable.Spinner_spn_popupItemAnimOffset, 50));
mDisableChildrenWhenDisabled = a.getBoolean(R.styleable.Spinner_disableChildrenWhenDisabled, false);
mArrowAnimSwitchMode = a.getBoolean(R.styleable.Spinner_spn_arrowSwitchMode, false);
int arrowAnimDuration = a.getInteger(R.styleable.Spinner_spn_arrowAnimDuration, 0);
mArrowSize = a.getDimensionPixelSize(R.styleable.Spinner_spn_arrowSize, ThemeUtil.dpToPx(getContext(), 4));
mArrowPadding = a.getDimensionPixelSize(R.styleable.Spinner_spn_arrowPadding, ThemeUtil.dpToPx(getContext(), 4));
ColorStateList arrowColor = a.getColorStateList(R.styleable.Spinner_spn_arrowColor);
if (arrowColor == null)
arrowColor = ColorStateList.valueOf(ThemeUtil.colorControlNormal(context, 0xFF000000));
int resId = a.getResourceId(R.styleable.Spinner_spn_arrowInterpolator, 0);
Interpolator arrowInterpolator = resId != 0 ? AnimationUtils.loadInterpolator(context, resId) : null;
boolean arrowClockwise = a.getBoolean(R.styleable.Spinner_spn_arrowAnimClockwise, true);
mArrowDrawable = new ArrowDrawable(ArrowDrawable.MODE_DOWN, mArrowSize, arrowColor, arrowAnimDuration, arrowInterpolator, arrowClockwise);
mArrowDrawable.setCallback(this);
mDividerHeight = a.getDimensionPixelOffset(R.styleable.Spinner_spn_dividerHeight, 0);
mDividerPadding = a.getDimensionPixelOffset(R.styleable.Spinner_spn_dividerPadding, 0);
int dividerAnimDuration = a.getInteger(R.styleable.Spinner_spn_dividerAnimDuration, 0);
ColorStateList dividerColor = a.getColorStateList(R.styleable.Spinner_spn_dividerColor);
if (dividerColor == null) {
int[][] states = new int[][]{
new int[]{-android.R.attr.state_pressed},
new int[]{android.R.attr.state_pressed, android.R.attr.state_enabled},
};
int[] colors = new int[]{
ThemeUtil.colorControlNormal(context, 0xFF000000),
ThemeUtil.colorControlActivated(context, 0xFF000000),
};
dividerColor = new ColorStateList(states, colors);
}
if (mDividerHeight > 0) {
mDividerDrawable = new DividerDrawable(mDividerHeight, dividerColor, dividerAnimDuration);
mDividerDrawable.setCallback(this);
}
mTintManager = a.getTintManager();
a.recycle();
if (mTempAdapter != null) {
mPopup.setAdapter(mTempAdapter);
mTempAdapter = null;
}
if (mAdapter != null)
setAdapter(mAdapter);
}
public View getSelectedView() {
View v = getChildAt(getChildCount() - 1);
return v == mLabelView ? null : v;
}
public void setSelection(int position) {
if (mAdapter != null)
position = Math.min(position, mAdapter.getCount() - 1);
if (mSelectedPosition != position) {
mSelectedPosition = position;
if (mOnItemSelectedListener != null)
mOnItemSelectedListener.onItemSelected(this, getSelectedView(), position, mAdapter == null ? -1 : mAdapter.getItemId(position));
onDataInvalidated();
}
}
public int getSelectedItemPosition() {
return mSelectedPosition;
}
public SpinnerAdapter getAdapter() {
return mAdapter;
}
public void setAdapter(SpinnerAdapter adapter) {
if (mAdapter != null)
mAdapter.unregisterDataSetObserver(mDataSetObserver);
mRecycler.clear();
mAdapter = adapter;
mAdapter.registerDataSetObserver(mDataSetObserver);
onDataChanged();
if (mPopup != null)
mPopup.setAdapter(new DropDownAdapter(adapter));
else
mTempAdapter = new DropDownAdapter(adapter);
}
public void setPopupBackgroundDrawable(Drawable background) {
mPopup.setBackgroundDrawable(background);
}
public void setPopupBackgroundResource(int resId) {
setPopupBackgroundDrawable(mTintManager.getDrawable(resId));
}
public Drawable getPopupBackground() {
return mPopup.getBackground();
}
public void setDropDownVerticalOffset(int pixels) {
mPopup.setVerticalOffset(pixels);
}
public int getDropDownVerticalOffset() {
return mPopup.getVerticalOffset();
}
public void setDropDownHorizontalOffset(int pixels) {
mPopup.setHorizontalOffset(pixels);
}
public int getDropDownHorizontalOffset() {
return mPopup.getHorizontalOffset();
}
public void setDropDownWidth(int pixels) {
mDropDownWidth = pixels;
}
public int getDropDownWidth() {
return mDropDownWidth;
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
if (mDisableChildrenWhenDisabled) {
final int count = getChildCount();
for (int i = 0; i < count; i++)
getChildAt(i).setEnabled(enabled);
}
}
@Override
public void setMinimumHeight(int minHeight) {
mMinHeight = minHeight;
super.setMinimumHeight(minHeight);
}
@Override
public void setMinimumWidth(int minWidth) {
mMinWidth = minWidth;
super.setMinimumWidth(minWidth);
}
public void setGravity(int gravity) {
if (mGravity != gravity) {
if ((gravity & Gravity.HORIZONTAL_GRAVITY_MASK) == 0)
gravity |= Gravity.START;
mGravity = gravity;
requestLayout();
}
}
@Override
public int getBaseline() {
View child = getSelectedView();
if (child != null) {
final int childBaseline = child.getBaseline();
return childBaseline >= 0 ? child.getTop() + childBaseline : -1;
}
return -1;
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mPopup != null && mPopup.isShowing())
mPopup.dismiss();
}
@Override
public void setBackgroundDrawable(Drawable drawable) {
Drawable background = getBackground();
if (background instanceof RippleDrawable && !(drawable instanceof RippleDrawable))
((RippleDrawable) background).setBackgroundDrawable(drawable);
else
super.setBackgroundDrawable(drawable);
}
@Override
public void setOnClickListener(OnClickListener l) {
if (l == mRippleManager)
super.setOnClickListener(l);
else {
mRippleManager.setOnClickListener(l);
setOnClickListener(mRippleManager);
}
}
public void setOnItemClickListener(OnItemClickListener l) {
mOnItemClickListener = l;
}
public void setOnItemSelectedListener(OnItemSelectedListener l) {
mOnItemSelectedListener = l;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
return true;
}
@Override
public boolean onTouchEvent(@NonNull MotionEvent event) {
boolean result = super.onTouchEvent(event);
return mRippleManager.onTouchEvent(event) || result;
}
@Override
protected boolean verifyDrawable(Drawable who) {
return super.verifyDrawable(who) || mArrowDrawable == who || mDividerDrawable == who;
}
private int getArrowDrawableWidth() {
return mArrowSize + mArrowPadding * 2;
}
private int getDividerDrawableHeight() {
return mDividerHeight > 0 ? mDividerHeight + mDividerPadding : 0;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int paddingHorizontal = getPaddingLeft() + getPaddingRight() + getArrowDrawableWidth();
int paddingVertical = getPaddingTop() + getPaddingBottom() + getDividerDrawableHeight();
int labelWidth = 0;
int labelHeight = 0;
if (mLabelView != null) {
mLabelView.measure(MeasureSpec.makeMeasureSpec(widthSize - paddingHorizontal, widthMode), MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
labelWidth = mLabelView.getMeasuredWidth();
labelHeight = mLabelView.getMeasuredHeight();
}
int width = 0;
int height = 0;
View v = getSelectedView();
if (v != null) {
int ws;
int hs;
ViewGroup.LayoutParams params = v.getLayoutParams();
switch (params.width) {
case ViewGroup.LayoutParams.WRAP_CONTENT:
ws = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
break;
case ViewGroup.LayoutParams.MATCH_PARENT:
ws = MeasureSpec.makeMeasureSpec(widthSize - paddingHorizontal, widthMode);
break;
default:
ws = MeasureSpec.makeMeasureSpec(params.width, MeasureSpec.EXACTLY);
break;
}
switch (params.height) {
case ViewGroup.LayoutParams.WRAP_CONTENT:
hs = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
break;
case ViewGroup.LayoutParams.MATCH_PARENT:
hs = MeasureSpec.makeMeasureSpec(heightSize - paddingVertical - labelHeight, heightMode);
break;
default:
hs = MeasureSpec.makeMeasureSpec(params.height, MeasureSpec.EXACTLY);
break;
}
v.measure(ws, hs);
width = v.getMeasuredWidth();
height = v.getMeasuredHeight();
}
width = Math.max(mMinWidth, Math.max(labelWidth, width) + paddingHorizontal);
height = Math.max(mMinHeight, height + labelHeight + paddingVertical);
switch (widthMode) {
case MeasureSpec.AT_MOST:
width = Math.min(widthSize, width);
break;
case MeasureSpec.EXACTLY:
width = widthSize;
break;
}
switch (heightMode) {
case MeasureSpec.AT_MOST:
height = Math.min(heightSize, height);
break;
case MeasureSpec.EXACTLY:
height = heightSize;
break;
}
setMeasuredDimension(width, height);
if (LocaleUtil.IsRTL()) {
width += paddingHorizontal;
} else {
width -= paddingHorizontal;
}
height -= labelHeight + paddingVertical;
if (v != null && (v.getMeasuredWidth() != width || v.getMeasuredHeight() != height))
v.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
int childLeft = getPaddingLeft();
int childRight = r - l - getPaddingRight() - getArrowDrawableWidth();
int childTop = getPaddingTop();
int childBottom = b - t - getPaddingBottom();
if (mLabelView != null) {
mLabelView.layout(childLeft, childTop, childLeft + mLabelView.getMeasuredWidth(), childTop + mLabelView.getMeasuredHeight());
childTop += mLabelView.getMeasuredHeight();
}
View v = getSelectedView();
if (v != null) {
int x, y;
int horizontalGravity = mGravity & Gravity.HORIZONTAL_GRAVITY_MASK;
switch (horizontalGravity) {
case Gravity.LEFT:
x = childLeft;
break;
case Gravity.CENTER_HORIZONTAL:
x = (childRight - childLeft - v.getMeasuredWidth()) / 2 + childLeft;
break;
case Gravity.RIGHT:
x = childRight - v.getMeasuredWidth();
break;
default:
x = (childRight - childLeft - v.getMeasuredWidth()) / 2 + childLeft;
break;
}
int verticalGravity = mGravity & Gravity.VERTICAL_GRAVITY_MASK;
switch (verticalGravity) {
case Gravity.TOP:
y = childTop;
break;
case Gravity.CENTER_VERTICAL:
y = (childBottom - childTop - v.getMeasuredHeight()) / 2 + childTop;
break;
case Gravity.BOTTOM:
y = childBottom - v.getMeasuredHeight();
break;
default:
y = (childBottom - childTop - v.getMeasuredHeight()) / 2 + childTop;
break;
}
v.layout(x, y, x + v.getMeasuredWidth(), y + v.getMeasuredHeight());
}
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
if (LocaleUtil.IsRTL()) {
mArrowDrawable.setBounds(getPaddingRight(), getPaddingTop() + (mLabelView == null ? 0 : mLabelView.getMeasuredHeight()), getArrowDrawableWidth() + getPaddingRight(), h - getDividerDrawableHeight() - getPaddingBottom());
} else {
mArrowDrawable.setBounds(w - getArrowDrawableWidth() - getPaddingRight(), getPaddingTop() + (mLabelView == null ? 0 : mLabelView.getMeasuredHeight()), w - getPaddingRight(), h - getDividerDrawableHeight() - getPaddingBottom());
}
if (mDividerDrawable != null)
mDividerDrawable.setBounds(getPaddingLeft(), h - mDividerHeight - getPaddingBottom(), w - getPaddingRight(), h - getPaddingBottom());
}
@Override
public void draw(@NonNull Canvas canvas) {
super.draw(canvas);
mArrowDrawable.draw(canvas);
if (mDividerDrawable != null)
mDividerDrawable.draw(canvas);
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
if (mDividerDrawable != null)
mDividerDrawable.setState(getDrawableState());
}
public boolean performItemClick(View view, int position, long id) {
if (mOnItemClickListener != null) {
// playSoundEffect(SoundEffectConstants.CLICK);
// if (view != null)
// view.sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_CLICKED);
if (mOnItemClickListener.onItemClick(this, view, position, id))
setSelection(position);
return true;
} else
setSelection(position);
return false;
}
private void onDataChanged() {
if (mSelectedPosition == INVALID_POSITION)
setSelection(0);
else if (mSelectedPosition < mAdapter.getCount())
onDataInvalidated();
else
setSelection(mAdapter.getCount() - 1);
}
private void onDataInvalidated() {
if (mAdapter == null)
return;
if (mLabelView == null)
removeAllViews();
else
for (int i = getChildCount() - 1; i > 0; i--)
removeViewAt(i);
int type = mAdapter.getItemViewType(mSelectedPosition);
View v = mAdapter.getView(mSelectedPosition, mRecycler.get(type), this);
v.setFocusable(false);
v.setClickable(false);
super.addView(v);
mRecycler.put(type, v);
}
private void showPopup() {
if (!mPopup.isShowing()) {
mPopup.show();
final ListView lv = mPopup.getListView();
if (lv != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
lv.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
lv.setSelection(getSelectedItemPosition());
}
if (mArrowAnimSwitchMode)
mArrowDrawable.setMode(ArrowDrawable.MODE_UP, true);
}
}
private void onPopupDismissed() {
mArrowDrawable.setMode(ArrowDrawable.MODE_DOWN, true);
}
private int measureContentWidth(SpinnerAdapter adapter, Drawable background) {
if (adapter == null)
return 0;
int width = 0;
View itemView = null;
int itemType = 0;
final int widthMeasureSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
final int heightMeasureSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
// Make sure the number of items we'll measure is capped. If it's a huge data set
// with wildly varying sizes, oh well.
int start = Math.max(0, getSelectedItemPosition());
final int end = Math.min(adapter.getCount(), start + MAX_ITEMS_MEASURED);
final int count = end - start;
start = Math.max(0, start - (MAX_ITEMS_MEASURED - count));
for (int i = start; i < end; i++) {
final int positionType = adapter.getItemViewType(i);
if (positionType != itemType) {
itemType = positionType;
itemView = null;
}
itemView = adapter.getView(i, itemView, null);
if (itemView.getLayoutParams() == null)
itemView.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT));
itemView.measure(widthMeasureSpec, heightMeasureSpec);
width = Math.max(width, itemView.getMeasuredWidth());
}
// Add background padding to measured width
if (background != null) {
background.getPadding(mTempRect);
width += mTempRect.left + mTempRect.right;
}
return width;
}
static class SavedState extends BaseSavedState {
int position;
boolean showDropdown;
SavedState(Parcelable superState) {
super(superState);
}
/**
* Constructor called from {@link #CREATOR}
*/
SavedState(Parcel in) {
super(in);
position = in.readInt();
showDropdown = in.readByte() != 0;
}
@Override
public void writeToParcel(@NonNull Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(position);
out.writeByte((byte) (showDropdown ? 1 : 0));
}
@Override
public String toString() {
return "AbsSpinner.SavedState{"
+ Integer.toHexString(System.identityHashCode(this))
+ " position=" + position
+ " showDropdown=" + showDropdown + "}";
}
public static final Creator<SavedState> CREATOR
= new Creator<SavedState>() {
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState ss = new SavedState(superState);
ss.position = getSelectedItemPosition();
ss.showDropdown = mPopup != null && mPopup.isShowing();
return ss;
}
@Override
public void onRestoreInstanceState(Parcelable state) {
SavedState ss = (SavedState) state;
super.onRestoreInstanceState(ss.getSuperState());
setSelection(ss.position);
if (ss.showDropdown) {
ViewTreeObserver vto = getViewTreeObserver();
if (vto != null) {
final ViewTreeObserver.OnGlobalLayoutListener listener = new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
showPopup();
final ViewTreeObserver vto = getViewTreeObserver();
if (vto != null)
vto.removeGlobalOnLayoutListener(this);
}
};
vto.addOnGlobalLayoutListener(listener);
}
}
}
private class SpinnerDataSetObserver extends DataSetObserver {
@Override
public void onChanged() {
onDataChanged();
}
@Override
public void onInvalidated() {
onDataInvalidated();
}
}
private class RecycleBin {
private final SparseArray<View> mScrapHeap = new SparseArray<>();
public void put(int type, View v) {
mScrapHeap.put(type, v);
}
View get(int type) {
View result = mScrapHeap.get(type);
if (result != null)
mScrapHeap.delete(type);
return result;
}
void clear() {
final SparseArray<View> scrapHeap = mScrapHeap;
scrapHeap.clear();
}
}
private static class DropDownAdapter implements ListAdapter, SpinnerAdapter, OnClickListener {
private SpinnerAdapter mAdapter;
private ListAdapter mListAdapter;
private AdapterView.OnItemClickListener mOnItemClickListener;
/**
* <p>Creates a new ListAdapter wrapper for the specified adapter.</p>
*
* @param adapter the Adapter to transform into a ListAdapter
*/
public DropDownAdapter(SpinnerAdapter adapter) {
this.mAdapter = adapter;
if (adapter instanceof ListAdapter)
this.mListAdapter = (ListAdapter) adapter;
}
public void setOnItemClickListener(AdapterView.OnItemClickListener listener) {
mOnItemClickListener = listener;
}
@Override
public void onClick(View v) {
int position = (Integer) v.getTag();
if (mOnItemClickListener != null)
mOnItemClickListener.onItemClick(null, v, position, 0);
}
public int getCount() {
return mAdapter == null ? 0 : mAdapter.getCount();
}
public Object getItem(int position) {
return mAdapter == null ? null : mAdapter.getItem(position);
}
public long getItemId(int position) {
return mAdapter == null ? -1 : mAdapter.getItemId(position);
}
public View getView(int position, View convertView, ViewGroup parent) {
View v = getDropDownView(position, convertView, parent);
v.setOnClickListener(this);
v.setTag(position);
return v;
}
public View getDropDownView(int position, View convertView, ViewGroup parent) {
return (mAdapter == null) ? null : mAdapter.getDropDownView(position, convertView, parent);
}
public boolean hasStableIds() {
return mAdapter != null && mAdapter.hasStableIds();
}
/**
* If the wrapped SpinnerAdapter is also a ListAdapter, delegate this call. Otherwise,
* return true.
*/
public boolean areAllItemsEnabled() {
final ListAdapter adapter = mListAdapter;
return adapter == null || adapter.areAllItemsEnabled();
}
/**
* If the wrapped SpinnerAdapter is also a ListAdapter, delegate this call. Otherwise,
* return true.
*/
public boolean isEnabled(int position) {
final ListAdapter adapter = mListAdapter;
return adapter == null || adapter.isEnabled(position);
}
public int getItemViewType(int position) {
final ListAdapter adapter = mListAdapter;
if (adapter != null)
return adapter.getItemViewType(position);
else
return 0;
}
public int getViewTypeCount() {
final ListAdapter adapter = mListAdapter;
if (adapter != null)
return adapter.getViewTypeCount();
else
return 1;
}
public boolean isEmpty() {
return getCount() == 0;
}
@Override
public void registerDataSetObserver(DataSetObserver observer) {
if (mAdapter != null)
mAdapter.registerDataSetObserver(observer);
}
@Override
public void unregisterDataSetObserver(DataSetObserver observer) {
if (mAdapter != null)
mAdapter.unregisterDataSetObserver(observer);
}
}
private class LabelView extends android.widget.TextView {
public LabelView(Context context) {
super(context);
}
@Override
protected int[] onCreateDrawableState(int extraSpace) {
return Spinner.this.getDrawableState();
}
}
private class DropdownPopup extends ListPopupWindow {
private CharSequence mHintText;
private DropDownAdapter mAdapter;
private ViewTreeObserver.OnGlobalLayoutListener layoutListener = new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
computeContentWidth();
// Use super.show here to update; we don't want to move the selected
// position or adjust other things that would be reset otherwise.
DropdownPopup.super.show();
}
};
public DropdownPopup(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
setAnchorView(Spinner.this);
setModal(true);
setPromptPosition(POSITION_PROMPT_ABOVE);
setOnDismissListener(new PopupWindow.OnDismissListener() {
@SuppressWarnings("deprecation")
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void onDismiss() {
final ViewTreeObserver vto = getViewTreeObserver();
if (vto != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN)
vto.removeOnGlobalLayoutListener(layoutListener);
else
vto.removeGlobalOnLayoutListener(layoutListener);
}
onPopupDismissed();
}
});
}
@Override
public void setAdapter(ListAdapter adapter) {
super.setAdapter(adapter);
mAdapter = (DropDownAdapter) adapter;
mAdapter.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View v, int position, long id) {
Spinner.this.performItemClick(v, position, mAdapter.getItemId(position));
dismiss();
}
});
}
public CharSequence getHintText() {
return mHintText;
}
public void setPromptText(CharSequence hintText) {
mHintText = hintText;
}
void computeContentWidth() {
final Drawable background = getBackground();
int hOffset = 0;
if (background != null) {
background.getPadding(mTempRect);
hOffset = ViewUtils.isLayoutRtl(Spinner.this) ? mTempRect.right : -mTempRect.left;
} else
mTempRect.left = mTempRect.right = 0;
final int spinnerPaddingLeft = Spinner.this.getPaddingLeft();
final int spinnerPaddingRight = Spinner.this.getPaddingRight();
final int spinnerWidth = Spinner.this.getWidth();
if (mDropDownWidth == WRAP_CONTENT) {
int contentWidth = measureContentWidth((SpinnerAdapter) mAdapter, getBackground());
final int contentWidthLimit = getContext().getResources().getDisplayMetrics().widthPixels - mTempRect.left - mTempRect.right;
if (contentWidth > contentWidthLimit)
contentWidth = contentWidthLimit;
setContentWidth(Math.max(contentWidth, spinnerWidth - spinnerPaddingLeft - spinnerPaddingRight));
} else if (mDropDownWidth == MATCH_PARENT)
setContentWidth(spinnerWidth - spinnerPaddingLeft - spinnerPaddingRight);
else
setContentWidth(mDropDownWidth);
if (ViewUtils.isLayoutRtl(Spinner.this)) {
hOffset += spinnerWidth - spinnerPaddingRight - getWidth();
} else if (LocaleUtil.IsRTL()) {
hOffset = -(spinnerPaddingLeft - (spinnerWidth - spinnerPaddingRight - getWidth())) + (hOffset * -1);
} else {
hOffset += spinnerPaddingLeft;
}
setHorizontalOffset(hOffset);
}
public void show() {
final boolean wasShowing = isShowing();
computeContentWidth();
setInputMethodMode(ListPopupWindow.INPUT_METHOD_NOT_NEEDED);
super.show();
if (wasShowing) {
// Skip setting up the layout/dismiss listener below. If we were previously
// showing it will still stick around.
return;
}
// Make sure we hide if our anchor goes away.
// TODO: This might be appropriate to push all the way down to PopupWindow,
// but it may have other side effects to investigate first. (Text editing handles, etc.)
final ViewTreeObserver vto = getViewTreeObserver();
if (vto != null)
vto.addOnGlobalLayoutListener(layoutListener);
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.graphics.ui;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import go.graphics.GLDrawContext;
import jsettlers.common.images.ImageLink;
import jsettlers.common.position.FloatRectangle;
import jsettlers.common.action.Action;
import jsettlers.graphics.image.Image;
import jsettlers.graphics.map.draw.ImageProvider;
/**
* This is a panel that holds UI elements and can have a background.
* <p>
* All elements are positioned relatively.
*
* @author michael
*/
public class UIPanel implements UIElement {
private final LinkedList<ChildLink> children = new LinkedList<>();
private FloatRectangle position = new FloatRectangle(0, 0, 1, 1);
private ImageLink background;
private boolean attached = false;
/**
* Sets the background. file=-1 means no background
*
* @param file
* @param settlerSeqIndex
*/
public void setBackground(ImageLink imageLink) {
this.background = imageLink;
}
/**
* Adds a child to the panel.
*
* @param child
* The child to add.
* @param left
* relative left border (0..1).
* @param bottom
* relative bottom border (0..1).
* @param right
* relative right border (0..1).
* @param top
* relative top border (0..1).
*/
public void addChild(UIElement child, float left, float bottom,
float right, float top) {
if (child == null) {
throw new NullPointerException();
}
this.children.add(new ChildLink(child, left, bottom, right, top));
if (attached) {
child.onAttach();
}
}
public void removeChild(UIElement child) {
for (Iterator<ChildLink> iterator = children.iterator(); iterator.hasNext();) {
ChildLink l = iterator.next();
if (l.child.equals(child)) {
if (attached) {
l.child.onDetach();
}
iterator.remove();
break;
}
}
}
public List<UIElement> getChildren() {
ArrayList<UIElement> list = new ArrayList<>();
for (ChildLink c : children) {
list.add(c.child);
}
return list;
}
@Override
public void drawAt(GLDrawContext gl) {
drawBackground(gl);
drawChildren(gl);
}
protected void drawChildren(GLDrawContext gl) {
if (children.size() > 0) {
gl.glPushMatrix();
gl.glTranslatef(position.getMinX(), position.getMinY(), 0);
for (ChildLink link : children) {
link.drawAt(gl, position.getWidth(), position.getHeight());
}
gl.glPopMatrix();
}
}
protected void drawBackground(GLDrawContext gl) {
ImageLink link = getBackgroundImage();
if (link != null) {
FloatRectangle position = getPosition();
Image image = ImageProvider.getInstance().getImage(link, position.getWidth(), position.getHeight());
drawAtRect(gl, image, position);
}
}
/**
* Draws an image at a given rect
*
* @param gl
* The context to use
* @param image
* The image to draw
* @param position
* The position to draw the image at
*/
protected void drawAtRect(GLDrawContext gl, Image image, FloatRectangle position) {
gl.color(1, 1, 1, 1);
float minX = position.getMinX();
float minY = position.getMinY();
float maxX = position.getMaxX();
float maxY = position.getMaxY();
image.drawImageAtRect(gl, minX, minY, maxX, maxY);
}
protected ImageLink getBackgroundImage() {
return background;
}
private class ChildLink {
private final UIElement child;
private final float left;
private final float right;
private final float top;
private final float bottom;
public ChildLink(UIElement child, float left, float bottom, float right, float top) {
this.child = child;
this.left = left;
this.right = right;
this.top = top;
this.bottom = bottom;
}
public void drawAt(GLDrawContext gl, float width, float height) {
child.setPosition(new FloatRectangle((left * width), (bottom * height), (right * width), (top * height)));
child.drawAt(gl);
}
public Action getActionRelative(float parentx, float parenty) {
if (left <= parentx && parentx <= right && bottom <= parenty && parenty <= top) {
float relativex = (parentx - left) / (right - left);
float relativey = (parenty - bottom) / (top - bottom);
return child.getAction(relativex, relativey);
} else {
return null;
}
}
public String getDesctiptionRelative(float parentx, float parenty) {
if (left <= parentx && parentx <= right && bottom <= parenty && parenty <= top) {
float relativex = (parentx - left) / (right - left);
float relativey = (parenty - bottom) / (top - bottom);
return child.getDescription(relativex, relativey);
} else {
return null;
}
}
}
@Override
public void setPosition(FloatRectangle position) {
this.position = position;
}
public FloatRectangle getPosition() {
return position;
}
public void removeAll() {
if (attached) {
for (ChildLink link : children) {
link.child.onDetach();
}
}
this.children.clear();
}
@Override
public Action getAction(float relativex, float relativey) {
for (ChildLink link : children) {
Action action = link.getActionRelative(relativex, relativey);
if (action != null) {
return action;
}
}
return null;
}
@Override
public String getDescription(float relativex, float relativey) {
for (ChildLink link : children) {
String description = link.getDesctiptionRelative(relativex, relativey);
if (description != null) {
return description;
}
}
return null;
}
@Override
public void onAttach() {
if (!attached) {
for (ChildLink link : children) {
link.child.onAttach();
}
}
attached = true;
}
@Override
public void onDetach() {
if (attached) {
for (ChildLink link : children) {
link.child.onDetach();
}
}
attached = false;
}
}
| |
/*
* Javassist, a Java-bytecode translator toolkit.
* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved.
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. Alternatively, the contents of this file may be used under
* the terms of the GNU Lesser General Public License Version 2.1 or later,
* or the Apache License Version 2.0.
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*/
package scouter.javassist.bytecode.analysis;
import scouter.javassist.ClassPool;
import scouter.javassist.CtClass;
import scouter.javassist.NotFoundException;
import java.util.*;
/**
* Represents a JVM type in data-flow analysis. This abstraction is necessary since
* a JVM type not only includes all normal Java types, but also a few special types
* that are used by the JVM internally. See the static field types on this class for
* more info on these special types.
*
* All primitive and special types reuse the same instance, so identity comparison can
* be used when examining them. Normal java types must use {@link #equals(Object)} to
* compare type instances.
*
* In most cases, applications which consume this API, only need to call {@link #getCtClass()}
* to obtain the needed type information.
*
* @author Jason T. Greene
*/
public class Type {
private final CtClass clazz;
private final boolean special;
private static final Map prims = new IdentityHashMap();
/** Represents the double primitive type */
public static final Type DOUBLE = new Type(CtClass.doubleType);
/** Represents the boolean primitive type */
public static final Type BOOLEAN = new Type(CtClass.booleanType);
/** Represents the long primitive type */
public static final Type LONG = new Type(CtClass.longType);
/** Represents the char primitive type */
public static final Type CHAR = new Type(CtClass.charType);
/** Represents the byte primitive type */
public static final Type BYTE = new Type(CtClass.byteType);
/** Represents the short primitive type */
public static final Type SHORT = new Type(CtClass.shortType);
/** Represents the integer primitive type */
public static final Type INTEGER = new Type(CtClass.intType);
/** Represents the float primitive type */
public static final Type FLOAT = new Type(CtClass.floatType);
/** Represents the void primitive type */
public static final Type VOID = new Type(CtClass.voidType);
/**
* Represents an unknown, or null type. This occurs when aconst_null is used.
* It is important not to treat this type as java.lang.Object, since a null can
* be assigned to any reference type. The analyzer will replace these with
* an actual known type if it can be determined by a merged path with known type
* information. If this type is encountered on a frame then it is guaranteed to
* be null, and the type information is simply not available. Any attempts to
* infer the type, without further information from the compiler would be a guess.
*/
public static final Type UNINIT = new Type(null);
/**
* Represents an internal JVM return address, which is used by the RET
* instruction to return to a JSR that invoked the subroutine.
*/
public static final Type RETURN_ADDRESS = new Type(null, true);
/** A placeholder used by the analyzer for the second word position of a double-word type */
public static final Type TOP = new Type(null, true);
/**
* Represents a non-accessible value. Code cannot access the value this type
* represents. It occurs when bytecode reuses a local variable table
* position with non-mergable types. An example would be compiled code which
* uses the same position for a primitive type in one branch, and a reference type
* in another branch.
*/
public static final Type BOGUS = new Type(null, true);
/** Represents the java.lang.Object reference type */
public static final Type OBJECT = lookupType("java.lang.Object");
/** Represents the java.io.Serializable reference type */
public static final Type SERIALIZABLE = lookupType("java.io.Serializable");
/** Represents the java.lang.Coneable reference type */
public static final Type CLONEABLE = lookupType("java.lang.Cloneable");
/** Represents the java.lang.Throwable reference type */
public static final Type THROWABLE = lookupType("java.lang.Throwable");
static {
prims.put(CtClass.doubleType, DOUBLE);
prims.put(CtClass.longType, LONG);
prims.put(CtClass.charType, CHAR);
prims.put(CtClass.shortType, SHORT);
prims.put(CtClass.intType, INTEGER);
prims.put(CtClass.floatType, FLOAT);
prims.put(CtClass.byteType, BYTE);
prims.put(CtClass.booleanType, BOOLEAN);
prims.put(CtClass.voidType, VOID);
}
/**
* Obtain the Type for a given class. If the class is a primitive,
* the the unique type instance for the primitive will be returned.
* Otherwise a new Type instance representing the class is returned.
*
* @param clazz The java class
* @return a type instance for this class
*/
public static Type get(CtClass clazz) {
Type type = (Type)prims.get(clazz);
return type != null ? type : new Type(clazz);
}
private static Type lookupType(String name) {
try {
return new Type(ClassPool.getDefault().get(name));
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
Type(CtClass clazz) {
this(clazz, false);
}
private Type(CtClass clazz, boolean special) {
this.clazz = clazz;
this.special = special;
}
// Used to indicate a merge internally triggered a change
boolean popChanged() {
return false;
}
/**
* Gets the word size of this type. Double-word types, such as long and double
* will occupy two positions on the local variable table or stack.
*
* @return the number of words needed to hold this type
*/
public int getSize() {
return clazz == CtClass.doubleType || clazz == CtClass.longType || this == TOP ? 2 : 1;
}
/**
* Returns the class this type represents. If the type is special, null will be returned.
*
* @return the class for this type, or null if special
*/
public CtClass getCtClass() {
return clazz;
}
/**
* Returns whether or not this type is a normal java reference, i.e. it is or extends java.lang.Object.
*
* @return true if a java reference, false if a primitive or special
*/
public boolean isReference() {
return !special && (clazz == null || !clazz.isPrimitive());
}
/**
* Returns whether or not the type is special. A special type is one that is either used
* for internal tracking, or is only used internally by the JVM.
*
* @return true if special, false if not
*/
public boolean isSpecial() {
return special;
}
/**
* Returns whether or not this type is an array.
*
* @return true if an array, false if not
*/
public boolean isArray() {
return clazz != null && clazz.isArray();
}
/**
* Returns the number of dimensions of this array. If the type is not an
* array zero is returned.
*
* @return zero if not an array, otherwise the number of array dimensions.
*/
public int getDimensions() {
if (!isArray()) return 0;
String name = clazz.getName();
int pos = name.length() - 1;
int count = 0;
while (name.charAt(pos) == ']' ) {
pos -= 2;
count++;
}
return count;
}
/**
* Returns the array component if this type is an array. If the type
* is not an array null is returned.
*
* @return the array component if an array, otherwise null
*/
public Type getComponent() {
if (this.clazz == null || !this.clazz.isArray())
return null;
CtClass component;
try {
component = this.clazz.getComponentType();
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
Type type = (Type)prims.get(component);
return (type != null) ? type : new Type(component);
}
/**
* Determines whether this type is assignable, to the passed type.
* A type is assignable to another if it is either the same type, or
* a sub-type.
*
* @param type the type to test assignability to
* @return true if this is assignable to type, otherwise false
*/
public boolean isAssignableFrom(Type type) {
if (this == type)
return true;
if ((type == UNINIT && isReference()) || this == UNINIT && type.isReference())
return true;
if (type instanceof MultiType)
return ((MultiType)type).isAssignableTo(this);
if (type instanceof MultiArrayType)
return ((MultiArrayType)type).isAssignableTo(this);
// Primitives and Special types must be identical
if (clazz == null || clazz.isPrimitive())
return false;
try {
return type.clazz.subtypeOf(clazz);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Finds the common base type, or interface which both this and the specified
* type can be assigned. If there is more than one possible answer, then a {@link MultiType},
* or a {@link MultiArrayType} is returned. Multi-types have special rules,
* and successive merges and assignment tests on them will alter their internal state,
* as well as other multi-types they have been merged with. This method is used by
* the data-flow analyzer to merge the type state from multiple branches.
*
* @param type the type to merge with
* @return the merged type
*/
public Type merge(Type type) {
if (type == this)
return this;
if (type == null)
return this;
if (type == Type.UNINIT)
return this;
if (this == Type.UNINIT)
return type;
// Unequal primitives and special types can not be merged
if (! type.isReference() || ! this.isReference())
return BOGUS;
// Centralize merging of multi-interface types
if (type instanceof MultiType)
return type.merge(this);
if (type.isArray() && this.isArray())
return mergeArray(type);
try {
return mergeClasses(type);
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
}
Type getRootComponent(Type type) {
while (type.isArray())
type = type.getComponent();
return type;
}
private Type createArray(Type rootComponent, int dims) {
if (rootComponent instanceof MultiType)
return new MultiArrayType((MultiType) rootComponent, dims);
String name = arrayName(rootComponent.clazz.getName(), dims);
Type type;
try {
type = Type.get(getClassPool(rootComponent).get(name));
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
return type;
}
String arrayName(String component, int dims) {
// Using char[] since we have no StringBuilder in JDK4, and StringBuffer is slow.
// Although, this is more efficient even if we did have one.
int i = component.length();
int size = i + dims * 2;
char[] string = new char[size];
component.getChars(0, i, string, 0);
while (i < size) {
string[i++] = '[';
string[i++] = ']';
}
component = new String(string);
return component;
}
private ClassPool getClassPool(Type rootComponent) {
ClassPool pool = rootComponent.clazz.getClassPool();
return pool != null ? pool : ClassPool.getDefault();
}
private Type mergeArray(Type type) {
Type typeRoot = getRootComponent(type);
Type thisRoot = getRootComponent(this);
int typeDims = type.getDimensions();
int thisDims = this.getDimensions();
// Array commponents can be merged when the dimensions are equal
if (typeDims == thisDims) {
Type mergedComponent = thisRoot.merge(typeRoot);
// If the components can not be merged (a primitive component mixed with a different type)
// then Object is the common type.
if (mergedComponent == Type.BOGUS)
return Type.OBJECT;
return createArray(mergedComponent, thisDims);
}
Type targetRoot;
int targetDims;
if (typeDims < thisDims) {
targetRoot = typeRoot;
targetDims = typeDims;
} else {
targetRoot = thisRoot;
targetDims = thisDims;
}
// Special case, arrays are cloneable and serializable, so prefer them when dimensions differ
if (eq(CLONEABLE.clazz, targetRoot.clazz) || eq(SERIALIZABLE.clazz, targetRoot.clazz))
return createArray(targetRoot, targetDims);
return createArray(OBJECT, targetDims);
}
private static CtClass findCommonSuperClass(CtClass one, CtClass two) throws NotFoundException {
CtClass deep = one;
CtClass shallow = two;
CtClass backupShallow = shallow;
CtClass backupDeep = deep;
// Phase 1 - Find the deepest hierarchy, set deep and shallow correctly
for (;;) {
// In case we get lucky, and find a match early
if (eq(deep, shallow) && deep.getSuperclass() != null)
return deep;
CtClass deepSuper = deep.getSuperclass();
CtClass shallowSuper = shallow.getSuperclass();
if (shallowSuper == null) {
// right, now reset shallow
shallow = backupShallow;
break;
}
if (deepSuper == null) {
// wrong, swap them, since deep is now useless, its our tmp before we swap it
deep = backupDeep;
backupDeep = backupShallow;
backupShallow = deep;
deep = shallow;
shallow = backupShallow;
break;
}
deep = deepSuper;
shallow = shallowSuper;
}
// Phase 2 - Move deepBackup up by (deep end - deep)
for (;;) {
deep = deep.getSuperclass();
if (deep == null)
break;
backupDeep = backupDeep.getSuperclass();
}
deep = backupDeep;
// Phase 3 - The hierarchy positions are now aligned
// The common super class is easy to find now
while (!eq(deep, shallow)) {
deep = deep.getSuperclass();
shallow = shallow.getSuperclass();
}
return deep;
}
private Type mergeClasses(Type type) throws NotFoundException {
CtClass superClass = findCommonSuperClass(this.clazz, type.clazz);
// If its Object, then try and find a common interface(s)
if (superClass.getSuperclass() == null) {
Map interfaces = findCommonInterfaces(type);
if (interfaces.size() == 1)
return new Type((CtClass) interfaces.values().iterator().next());
if (interfaces.size() > 1)
return new MultiType(interfaces);
// Only Object is in common
return new Type(superClass);
}
// Check for a common interface that is not on the found supertype
Map commonDeclared = findExclusiveDeclaredInterfaces(type, superClass);
if (commonDeclared.size() > 0) {
return new MultiType(commonDeclared, new Type(superClass));
}
return new Type(superClass);
}
private Map findCommonInterfaces(Type type) {
Map typeMap = getAllInterfaces(type.clazz, null);
Map thisMap = getAllInterfaces(this.clazz, null);
return findCommonInterfaces(typeMap, thisMap);
}
private Map findExclusiveDeclaredInterfaces(Type type, CtClass exclude) {
Map typeMap = getDeclaredInterfaces(type.clazz, null);
Map thisMap = getDeclaredInterfaces(this.clazz, null);
Map excludeMap = getAllInterfaces(exclude, null);
Iterator i = excludeMap.keySet().iterator();
while (i.hasNext()) {
Object intf = i.next();
typeMap.remove(intf);
thisMap.remove(intf);
}
return findCommonInterfaces(typeMap, thisMap);
}
Map findCommonInterfaces(Map typeMap, Map alterMap) {
Iterator i = alterMap.keySet().iterator();
while (i.hasNext()) {
if (! typeMap.containsKey(i.next()))
i.remove();
}
// Reduce to subinterfaces
// This does not need to be recursive since we make a copy,
// and that copy contains all super types for the whole hierarchy
i = new ArrayList(alterMap.values()).iterator();
while (i.hasNext()) {
CtClass intf = (CtClass) i.next();
CtClass[] interfaces;
try {
interfaces = intf.getInterfaces();
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
for (int c = 0; c < interfaces.length; c++)
alterMap.remove(interfaces[c].getName());
}
return alterMap;
}
Map getAllInterfaces(CtClass clazz, Map map) {
if (map == null)
map = new HashMap();
if (clazz.isInterface())
map.put(clazz.getName(), clazz);
do {
try {
CtClass[] interfaces = clazz.getInterfaces();
for (int i = 0; i < interfaces.length; i++) {
CtClass intf = interfaces[i];
map.put(intf.getName(), intf);
getAllInterfaces(intf, map);
}
clazz = clazz.getSuperclass();
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
} while (clazz != null);
return map;
}
Map getDeclaredInterfaces(CtClass clazz, Map map) {
if (map == null)
map = new HashMap();
if (clazz.isInterface())
map.put(clazz.getName(), clazz);
CtClass[] interfaces;
try {
interfaces = clazz.getInterfaces();
} catch (NotFoundException e) {
throw new RuntimeException(e);
}
for (int i = 0; i < interfaces.length; i++) {
CtClass intf = interfaces[i];
map.put(intf.getName(), intf);
getDeclaredInterfaces(intf, map);
}
return map;
}
public boolean equals(Object o) {
if (! (o instanceof Type))
return false;
return o.getClass() == getClass() && eq(clazz, ((Type)o).clazz);
}
static boolean eq(CtClass one, CtClass two) {
return one == two || (one != null && two != null && one.getName().equals(two.getName()));
}
public String toString() {
if (this == BOGUS)
return "BOGUS";
if (this == UNINIT)
return "UNINIT";
if (this == RETURN_ADDRESS)
return "RETURN ADDRESS";
if (this == TOP)
return "TOP";
return clazz == null ? "null" : clazz.getName();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.InvalidOffsetException;
import org.apache.kafka.clients.consumer.MockConsumer;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.errors.TaskMigratedException;
import org.apache.kafka.streams.processor.StateRestoreListener;
import org.apache.kafka.test.MockRestoreCallback;
import org.apache.kafka.test.MockStateRestoreListener;
import org.easymock.EasyMock;
import org.easymock.EasyMockRunner;
import org.easymock.Mock;
import org.easymock.MockType;
import org.hamcrest.CoreMatchers;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_BATCH;
import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_END;
import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_START;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(EasyMockRunner.class)
public class StoreChangelogReaderTest {
@Mock(type = MockType.NICE)
private RestoringTasks active;
@Mock(type = MockType.NICE)
private StreamTask task;
private final MockStateRestoreListener callback = new MockStateRestoreListener();
private final CompositeRestoreListener restoreListener = new CompositeRestoreListener(callback);
private final MockConsumer<byte[], byte[]> consumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST);
private final StateRestoreListener stateRestoreListener = new MockStateRestoreListener();
private final TopicPartition topicPartition = new TopicPartition("topic", 0);
private final LogContext logContext = new LogContext("test-reader ");
private final StoreChangelogReader changelogReader = new StoreChangelogReader(consumer, stateRestoreListener, logContext);
@Before
public void setUp() {
restoreListener.setUserRestoreListener(stateRestoreListener);
}
@Test
public void shouldRequestTopicsAndHandleTimeoutException() {
final AtomicBoolean functionCalled = new AtomicBoolean(false);
final MockConsumer<byte[], byte[]> consumer = new MockConsumer<byte[], byte[]>(OffsetResetStrategy.EARLIEST) {
@Override
public Map<String, List<PartitionInfo>> listTopics() {
functionCalled.set(true);
throw new TimeoutException("KABOOM!");
}
};
final StoreChangelogReader changelogReader = new StoreChangelogReader(consumer, stateRestoreListener, logContext);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
changelogReader.restore(active);
assertTrue(functionCalled.get());
}
@Test
public void shouldThrowExceptionIfConsumerHasCurrentSubscription() {
final StateRestorer mockRestorer = EasyMock.mock(StateRestorer.class);
mockRestorer.setUserRestoreListener(stateRestoreListener);
expect(mockRestorer.partition()).andReturn(new TopicPartition("sometopic", 0)).andReturn(new TopicPartition("sometopic", 0));
EasyMock.replay(mockRestorer);
changelogReader.register(mockRestorer);
consumer.subscribe(Collections.singleton("sometopic"));
try {
changelogReader.restore(active);
fail("Should have thrown IllegalStateException");
} catch (final StreamsException expected) {
// ok
}
}
@Test
public void shouldRestoreAllMessagesFromBeginningWhenCheckpointNull() {
final int messages = 10;
setupConsumer(messages, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(messages));
}
@Test
public void shouldRecoverFromInvalidOffsetExceptionAndFinishRestore() {
final int messages = 10;
setupConsumer(messages, topicPartition);
consumer.setException(new InvalidOffsetException("Try Again!") {
@Override
public Set<TopicPartition> partitions() {
return Collections.singleton(topicPartition);
}
});
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true,
"storeName"));
EasyMock.expect(active.restoringTaskFor(topicPartition)).andReturn(task);
EasyMock.replay(active);
// first restore call "fails" but we should not die with an exception
assertEquals(0, changelogReader.restore(active).size());
// retry restore should succeed
assertEquals(1, changelogReader.restore(active).size());
assertThat(callback.restored.size(), equalTo(messages));
}
@Test
public void shouldRestoreMessagesFromCheckpoint() {
final int messages = 10;
setupConsumer(messages, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, 5L, Long.MAX_VALUE, true,
"storeName"));
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(5));
}
@Test
public void shouldClearAssignmentAtEndOfRestore() {
final int messages = 1;
setupConsumer(messages, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true,
"storeName"));
changelogReader.restore(active);
assertThat(consumer.assignment(), equalTo(Collections.<TopicPartition>emptySet()));
}
@Test
public void shouldRestoreToLimitWhenSupplied() {
setupConsumer(10, topicPartition);
final StateRestorer restorer = new StateRestorer(topicPartition, restoreListener, null, 3, true,
"storeName");
changelogReader.register(restorer);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(3));
assertThat(restorer.restoredOffset(), equalTo(3L));
}
@Test
public void shouldRestoreMultipleStores() {
final TopicPartition one = new TopicPartition("one", 0);
final TopicPartition two = new TopicPartition("two", 0);
final MockRestoreCallback callbackOne = new MockRestoreCallback();
final MockRestoreCallback callbackTwo = new MockRestoreCallback();
final CompositeRestoreListener restoreListener1 = new CompositeRestoreListener(callbackOne);
final CompositeRestoreListener restoreListener2 = new CompositeRestoreListener(callbackTwo);
setupConsumer(10, topicPartition);
setupConsumer(5, one);
setupConsumer(3, two);
changelogReader
.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName1"));
changelogReader.register(new StateRestorer(one, restoreListener1, null, Long.MAX_VALUE, true, "storeName2"));
changelogReader.register(new StateRestorer(two, restoreListener2, null, Long.MAX_VALUE, true, "storeName3"));
expect(active.restoringTaskFor(one)).andReturn(null);
expect(active.restoringTaskFor(two)).andReturn(null);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(10));
assertThat(callbackOne.restored.size(), equalTo(5));
assertThat(callbackTwo.restored.size(), equalTo(3));
}
@Test
public void shouldRestoreAndNotifyMultipleStores() throws Exception {
final TopicPartition one = new TopicPartition("one", 0);
final TopicPartition two = new TopicPartition("two", 0);
final MockStateRestoreListener callbackOne = new MockStateRestoreListener();
final MockStateRestoreListener callbackTwo = new MockStateRestoreListener();
final CompositeRestoreListener restoreListener1 = new CompositeRestoreListener(callbackOne);
final CompositeRestoreListener restoreListener2 = new CompositeRestoreListener(callbackTwo);
setupConsumer(10, topicPartition);
setupConsumer(5, one);
setupConsumer(3, two);
changelogReader
.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName1"));
changelogReader.register(new StateRestorer(one, restoreListener1, null, Long.MAX_VALUE, true, "storeName2"));
changelogReader.register(new StateRestorer(two, restoreListener2, null, Long.MAX_VALUE, true, "storeName3"));
expect(active.restoringTaskFor(one)).andReturn(null);
expect(active.restoringTaskFor(two)).andReturn(null);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(10));
assertThat(callbackOne.restored.size(), equalTo(5));
assertThat(callbackTwo.restored.size(), equalTo(3));
assertAllCallbackStatesExecuted(callback, "storeName1");
assertCorrectOffsetsReportedByListener(callback, 0L, 9L, 10L);
assertAllCallbackStatesExecuted(callbackOne, "storeName2");
assertCorrectOffsetsReportedByListener(callbackOne, 0L, 4L, 5L);
assertAllCallbackStatesExecuted(callbackTwo, "storeName3");
assertCorrectOffsetsReportedByListener(callbackTwo, 0L, 2L, 3L);
}
@Test
public void shouldOnlyReportTheLastRestoredOffset() {
setupConsumer(10, topicPartition);
changelogReader
.register(new StateRestorer(topicPartition, restoreListener, null, 5, true, "storeName1"));
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(5));
assertAllCallbackStatesExecuted(callback, "storeName1");
assertCorrectOffsetsReportedByListener(callback, 0L, 4L, 5L);
}
private void assertAllCallbackStatesExecuted(final MockStateRestoreListener restoreListener,
final String storeName) {
assertThat(restoreListener.storeNameCalledStates.get(RESTORE_START), equalTo(storeName));
assertThat(restoreListener.storeNameCalledStates.get(RESTORE_BATCH), equalTo(storeName));
assertThat(restoreListener.storeNameCalledStates.get(RESTORE_END), equalTo(storeName));
}
private void assertCorrectOffsetsReportedByListener(final MockStateRestoreListener restoreListener,
final long startOffset,
final long batchOffset,
final long totalRestored) {
assertThat(restoreListener.restoreStartOffset, equalTo(startOffset));
assertThat(restoreListener.restoredBatchOffset, equalTo(batchOffset));
assertThat(restoreListener.totalNumRestored, equalTo(totalRestored));
}
@Test
public void shouldNotRestoreAnythingWhenPartitionIsEmpty() {
final StateRestorer
restorer =
new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName");
setupConsumer(0, topicPartition);
changelogReader.register(restorer);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(0));
assertThat(restorer.restoredOffset(), equalTo(0L));
}
@Test
public void shouldNotRestoreAnythingWhenCheckpointAtEndOffset() {
final Long endOffset = 10L;
setupConsumer(endOffset, topicPartition);
final StateRestorer
restorer =
new StateRestorer(topicPartition, restoreListener, endOffset, Long.MAX_VALUE, true, "storeName");
changelogReader.register(restorer);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(0));
assertThat(restorer.restoredOffset(), equalTo(endOffset));
}
@Test
public void shouldReturnRestoredOffsetsForPersistentStores() {
setupConsumer(10, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
changelogReader.restore(active);
final Map<TopicPartition, Long> restoredOffsets = changelogReader.restoredOffsets();
assertThat(restoredOffsets, equalTo(Collections.singletonMap(topicPartition, 10L)));
}
@Test
public void shouldNotReturnRestoredOffsetsForNonPersistentStore() {
setupConsumer(10, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, false, "storeName"));
changelogReader.restore(active);
final Map<TopicPartition, Long> restoredOffsets = changelogReader.restoredOffsets();
assertThat(restoredOffsets, equalTo(Collections.<TopicPartition, Long>emptyMap()));
}
@Test
public void shouldIgnoreNullKeysWhenRestoring() {
assignPartition(3, topicPartition);
final byte[] bytes = new byte[0];
consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), 0, bytes, bytes));
consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), 1, (byte[]) null, bytes));
consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), 2, bytes, bytes));
consumer.assign(Collections.singletonList(topicPartition));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, false,
"storeName"));
changelogReader.restore(active);
assertThat(callback.restored, CoreMatchers.equalTo(Utils.mkList(KeyValue.pair(bytes, bytes), KeyValue.pair(bytes, bytes))));
}
@Test
public void shouldCompleteImmediatelyWhenEndOffsetIs0() {
final Collection<TopicPartition> expected = Collections.singleton(topicPartition);
setupConsumer(0, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "store"));
final Collection<TopicPartition> restored = changelogReader.restore(active);
assertThat(restored, equalTo(expected));
}
@Test
public void shouldRestorePartitionsRegisteredPostInitialization() {
final MockRestoreCallback callbackTwo = new MockRestoreCallback();
final CompositeRestoreListener restoreListener2 = new CompositeRestoreListener(callbackTwo);
setupConsumer(1, topicPartition);
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 10L));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, false, "storeName"));
final TopicPartition postInitialization = new TopicPartition("other", 0);
expect(active.restoringTaskFor(topicPartition)).andReturn(null);
expect(active.restoringTaskFor(topicPartition)).andReturn(null);
expect(active.restoringTaskFor(postInitialization)).andReturn(null);
replay(active);
assertTrue(changelogReader.restore(active).isEmpty());
addRecords(9, topicPartition, 1);
setupConsumer(3, postInitialization);
consumer.updateBeginningOffsets(Collections.singletonMap(postInitialization, 0L));
consumer.updateEndOffsets(Collections.singletonMap(postInitialization, 3L));
changelogReader.register(new StateRestorer(postInitialization, restoreListener2, null, Long.MAX_VALUE, false, "otherStore"));
final Collection<TopicPartition> expected = Utils.mkSet(topicPartition, postInitialization);
consumer.assign(expected);
assertThat(changelogReader.restore(active), equalTo(expected));
assertThat(callback.restored.size(), equalTo(10));
assertThat(callbackTwo.restored.size(), equalTo(3));
}
@Test
public void shouldThrowTaskMigratedExceptionIfEndOffsetGetsExceededDuringRestoreForChangelogTopic() {
final int messages = 10;
setupConsumer(messages, topicPartition);
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 5L));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
try {
changelogReader.restore(active);
fail("Should have thrown TaskMigratedException");
} catch (final TaskMigratedException expected) { /* ignore */ }
}
@Test
public void shouldThrowTaskMigratedExceptionIfEndOffsetGetsExceededDuringRestoreForChangelogTopicEOSEnabled() {
final int totalMessages = 10;
assignPartition(totalMessages, topicPartition);
// records 0..4
addRecords(5, topicPartition, 0);
//EOS enabled commit marker at offset 5 so rest of records 6..10
addRecords(5, topicPartition, 6);
consumer.assign(Collections.<TopicPartition>emptyList());
// end offsets should start after commit marker of 5 from above
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 6L));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
try {
changelogReader.restore(active);
fail("Should have thrown task migrated exception");
} catch (final TaskMigratedException expected) {
/* ignore */
}
}
@Test
public void shouldNotThrowTaskMigratedExceptionDuringRestoreForChangelogTopicWhenEndOffsetNotExceededEOSEnabled() {
final int totalMessages = 10;
setupConsumer(totalMessages, topicPartition);
// records have offsets of 0..9 10 is commit marker so 11 is end offset
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 11L));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(10));
}
@Test
public void shouldNotThrowTaskMigratedExceptionDuringRestoreForChangelogTopicWhenEndOffsetNotExceededEOSDisabled() {
final int totalMessages = 10;
setupConsumer(totalMessages, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, Long.MAX_VALUE, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(10));
}
@Test
public void shouldNotThrowTaskMigratedExceptionIfEndOffsetGetsExceededDuringRestoreForSourceTopic() {
final int messages = 10;
setupConsumer(messages, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, 5, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(5));
}
@Test
public void shouldNotThrowTaskMigratedExceptionIfEndOffsetNotExceededDuringRestoreForSourceTopic() {
final int messages = 10;
setupConsumer(messages, topicPartition);
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, 10, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(10));
}
@Test
public void shouldNotThrowTaskMigratedExceptionIfEndOffsetGetsExceededDuringRestoreForSourceTopicEOSEnabled() {
final int totalMessages = 10;
assignPartition(totalMessages, topicPartition);
// records 0..4 last offset before commit is 4
addRecords(5, topicPartition, 0);
//EOS enabled so commit marker at offset 5 so records start at 6
addRecords(5, topicPartition, 6);
consumer.assign(Collections.<TopicPartition>emptyList());
// commit marker is 5 so ending offset is 12
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 12L));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, 6, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(5));
}
@Test
public void shouldNotThrowTaskMigratedExceptionIfEndOffsetNotExceededDuringRestoreForSourceTopicEOSEnabled() {
final int totalMessages = 10;
setupConsumer(totalMessages, topicPartition);
// records have offsets 0..9 10 is commit marker so 11 is ending offset
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, 11L));
changelogReader.register(new StateRestorer(topicPartition, restoreListener, null, 11, true, "storeName"));
expect(active.restoringTaskFor(topicPartition)).andReturn(task);
replay(active);
changelogReader.restore(active);
assertThat(callback.restored.size(), equalTo(10));
}
private void setupConsumer(final long messages,
final TopicPartition topicPartition) {
assignPartition(messages, topicPartition);
addRecords(messages, topicPartition, 0);
consumer.assign(Collections.<TopicPartition>emptyList());
}
private void addRecords(final long messages,
final TopicPartition topicPartition,
final int startingOffset) {
for (int i = 0; i < messages; i++) {
consumer.addRecord(new ConsumerRecord<>(topicPartition.topic(), topicPartition.partition(), startingOffset + i, new byte[0], new byte[0]));
}
}
private void assignPartition(final long messages,
final TopicPartition topicPartition) {
consumer.updatePartitions(topicPartition.topic(),
Collections.singletonList(
new PartitionInfo(topicPartition.topic(),
topicPartition.partition(),
null,
null,
null)));
consumer.updateBeginningOffsets(Collections.singletonMap(topicPartition, 0L));
consumer.updateEndOffsets(Collections.singletonMap(topicPartition, Math.max(0, messages)));
consumer.assign(Collections.singletonList(topicPartition));
}
}
| |
/*
* Copyright 2004-2014 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.schema;
import java.math.BigInteger;
import org.h2.api.ErrorCode;
import org.h2.engine.DbObject;
import org.h2.engine.Session;
import org.h2.message.DbException;
import org.h2.message.Trace;
import org.h2.table.Table;
/**
* A sequence is created using the statement
* CREATE SEQUENCE
*/
public class Sequence extends SchemaObjectBase {
/**
* The default cache size for sequences.
*/
public static final int DEFAULT_CACHE_SIZE = 32;
private long value;
private long valueWithMargin;
private long increment;
private long cacheSize;
private long minValue;
private long maxValue;
private boolean cycle;
private boolean belongsToTable;
/**
* Creates a new sequence for an auto-increment column.
*
* @param schema the schema
* @param id the object id
* @param name the sequence name
* @param startValue the first value to return
* @param increment the increment count
*/
public Sequence(Schema schema, int id, String name, long startValue,
long increment) {
this(schema, id, name, startValue, increment, null, null, null, false,
true);
}
/**
* Creates a new sequence.
*
* @param schema the schema
* @param id the object id
* @param name the sequence name
* @param startValue the first value to return
* @param increment the increment count
* @param cacheSize the number of entries to pre-fetch
* @param minValue the minimum value
* @param maxValue the maximum value
* @param cycle whether to jump back to the min value if needed
* @param belongsToTable whether this sequence belongs to a table (for
* auto-increment columns)
*/
public Sequence(Schema schema, int id, String name, Long startValue,
Long increment, Long cacheSize, Long minValue, Long maxValue,
boolean cycle, boolean belongsToTable) {
initSchemaObjectBase(schema, id, name, Trace.SEQUENCE);
this.increment = increment != null ?
increment : 1;
this.minValue = minValue != null ?
minValue : getDefaultMinValue(startValue, this.increment);
this.maxValue = maxValue != null ?
maxValue : getDefaultMaxValue(startValue, this.increment);
this.value = startValue != null ?
startValue : getDefaultStartValue(this.increment);
this.valueWithMargin = value;
this.cacheSize = cacheSize != null ?
Math.max(1, cacheSize) : DEFAULT_CACHE_SIZE;
this.cycle = cycle;
this.belongsToTable = belongsToTable;
if (!isValid(this.value, this.minValue, this.maxValue, this.increment)) {
throw DbException.get(ErrorCode.SEQUENCE_ATTRIBUTES_INVALID, name,
String.valueOf(this.value), String.valueOf(this.minValue),
String.valueOf(this.maxValue),
String.valueOf(this.increment));
}
}
/**
* Allows the start value, increment, min value and max value to be updated
* atomically, including atomic validation. Useful because setting these
* attributes one after the other could otherwise result in an invalid
* sequence state (e.g. min value > max value, start value < min value,
* etc).
*
* @param startValue the new start value (<code>null</code> if no change)
* @param minValue the new min value (<code>null</code> if no change)
* @param maxValue the new max value (<code>null</code> if no change)
* @param increment the new increment (<code>null</code> if no change)
*/
public synchronized void modify(Long startValue, Long minValue,
Long maxValue, Long increment) {
if (startValue == null) {
startValue = this.value;
}
if (minValue == null) {
minValue = this.minValue;
}
if (maxValue == null) {
maxValue = this.maxValue;
}
if (increment == null) {
increment = this.increment;
}
if (!isValid(startValue, minValue, maxValue, increment)) {
throw DbException.get(ErrorCode.SEQUENCE_ATTRIBUTES_INVALID,
getName(), String.valueOf(startValue),
String.valueOf(minValue),
String.valueOf(maxValue),
String.valueOf(increment));
}
this.value = startValue;
this.valueWithMargin = startValue;
this.minValue = minValue;
this.maxValue = maxValue;
this.increment = increment;
}
/**
* Validates the specified prospective start value, min value, max value and
* increment relative to each other, since each of their respective
* validities are contingent on the values of the other parameters.
*
* @param value the prospective start value
* @param minValue the prospective min value
* @param maxValue the prospective max value
* @param increment the prospective increment
*/
private static boolean isValid(long value, long minValue, long maxValue,
long increment) {
return minValue <= value &&
maxValue >= value &&
maxValue > minValue &&
increment != 0 &&
// Math.abs(increment) < maxValue - minValue
// use BigInteger to avoid overflows when maxValue and minValue
// are really big
BigInteger.valueOf(increment).abs().compareTo(
BigInteger.valueOf(maxValue).subtract(BigInteger.valueOf(minValue))) < 0;
}
private static long getDefaultMinValue(Long startValue, long increment) {
long v = increment >= 0 ? 1 : Long.MIN_VALUE;
if (startValue != null && increment >= 0 && startValue < v) {
v = startValue;
}
return v;
}
private static long getDefaultMaxValue(Long startValue, long increment) {
long v = increment >= 0 ? Long.MAX_VALUE : -1;
if (startValue != null && increment < 0 && startValue > v) {
v = startValue;
}
return v;
}
private long getDefaultStartValue(long increment) {
return increment >= 0 ? minValue : maxValue;
}
public boolean getBelongsToTable() {
return belongsToTable;
}
public long getIncrement() {
return increment;
}
public long getMinValue() {
return minValue;
}
public long getMaxValue() {
return maxValue;
}
public boolean getCycle() {
return cycle;
}
public void setCycle(boolean cycle) {
this.cycle = cycle;
}
@Override
public String getDropSQL() {
if (getBelongsToTable()) {
return null;
}
return "DROP SEQUENCE IF EXISTS " + getSQL();
}
@Override
public String getCreateSQLForCopy(Table table, String quotedName) {
throw DbException.throwInternalError();
}
@Override
public synchronized String getCreateSQL() {
StringBuilder buff = new StringBuilder("CREATE SEQUENCE ");
buff.append(getSQL()).append(" START WITH ").append(value);
if (increment != 1) {
buff.append(" INCREMENT BY ").append(increment);
}
if (minValue != getDefaultMinValue(value, increment)) {
buff.append(" MINVALUE ").append(minValue);
}
if (maxValue != getDefaultMaxValue(value, increment)) {
buff.append(" MAXVALUE ").append(maxValue);
}
if (cycle) {
buff.append(" CYCLE");
}
if (cacheSize != DEFAULT_CACHE_SIZE) {
buff.append(" CACHE ").append(cacheSize);
}
if (belongsToTable) {
buff.append(" BELONGS_TO_TABLE");
}
return buff.toString();
}
/**
* Get the next value for this sequence.
*
* @param session the session
* @return the next value
*/
public synchronized long getNext(Session session) {
boolean needsFlush = false;
if ((increment > 0 && value >= valueWithMargin) ||
(increment < 0 && value <= valueWithMargin)) {
valueWithMargin += increment * cacheSize;
needsFlush = true;
}
if ((increment > 0 && value > maxValue) ||
(increment < 0 && value < minValue)) {
if (cycle) {
value = increment > 0 ? minValue : maxValue;
valueWithMargin = value + (increment * cacheSize);
needsFlush = true;
} else {
throw DbException.get(ErrorCode.SEQUENCE_EXHAUSTED, getName());
}
}
if (needsFlush) {
flush(session);
}
long v = value;
value += increment;
return v;
}
/**
* Flush the current value to disk.
*/
public void flushWithoutMargin() {
if (valueWithMargin != value) {
valueWithMargin = value;
flush(null);
}
}
/**
* Flush the current value, including the margin, to disk.
*
* @param session the session
*/
public synchronized void flush(Session session) {
if (session == null || !database.isSysTableLocked()) {
// This session may not lock the sys table (except if it already has
// locked it) because it must be committed immediately, otherwise
// other threads can not access the sys table.
Session sysSession = database.getSystemSession();
synchronized (sysSession) {
flushInternal(sysSession);
sysSession.commit(false);
}
} else {
synchronized (session) {
flushInternal(session);
}
}
}
private void flushInternal(Session session) {
// just for this case, use the value with the margin for the script
long realValue = value;
try {
value = valueWithMargin;
if (!isTemporary()) {
database.updateMeta(session, this);
}
} finally {
value = realValue;
}
}
/**
* Flush the current value to disk and close this object.
*/
public void close() {
flushWithoutMargin();
}
@Override
public int getType() {
return DbObject.SEQUENCE;
}
@Override
public void removeChildrenAndResources(Session session) {
database.removeMeta(session, getId());
invalidate();
}
@Override
public void checkRename() {
// nothing to do
}
public synchronized long getCurrentValue() {
return value - increment;
}
public void setBelongsToTable(boolean b) {
this.belongsToTable = b;
}
public void setCacheSize(long cacheSize) {
this.cacheSize = Math.max(1, cacheSize);
}
public long getCacheSize() {
return cacheSize;
}
}
| |
package org.repetti.srs.ui;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.repetti.srs.core.ParametrizedCoder;
import org.repetti.utils.ExceptionHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
/**
* Created on 21/05/15.
*
* @author repetti
*/
public class SimpleFrame extends JFrame {
private static final Logger log = LoggerFactory.getLogger(SimpleFrame.class);
private final ParametrizedCoder coder = new ParametrizedCoder();
private final JButton buttonChoose;
// private static final int keyLength = 256;
// private static final int iterationCount = 100;
// private static final String algorithm = "AES/CTR/PKCS7Padding";
private final JButton buttonLoad;
private final JButton buttonSave;
private final JTextField fieldPath;
private final JScrollPane scroll;
private final JTextArea text;
private volatile FileInformation fileInformation;
public SimpleFrame() throws HeadlessException {
setTitle("SRS Simple UI");
this.setLayout(null);
buttonChoose = new JButton("Choose...");
buttonSave = new JButton("Save...");
buttonLoad = new JButton("Load...");
fieldPath = new JTextField();
text = new JTextArea();
text.setEditable(true);
scroll = new JScrollPane(text, ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_ALWAYS);
buttonChoose.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
choose();
}
});
buttonLoad.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
load();
}
});
buttonSave.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
save();
}
});
final int w = 640;
final int h = 480;
final int b = 5;
final int bw = 120;
final int eh = 25;
final int fw = w - b * 5 - bw * 3;
fieldPath.setBounds(b, b, fw, eh);
buttonChoose.setBounds(b * 2 + fw, b, bw, eh);
buttonLoad.setBounds(b * 3 + fw + bw, b, bw, eh);
buttonSave.setBounds(b * 4 + fw + bw * 2, b, bw, eh);
scroll.setBounds(b, b * 2 + eh, w - b * 2, h - eh - b * 2);
Container pane = this.getContentPane();
pane.add(fieldPath);
pane.add(buttonLoad);
pane.add(buttonSave);
pane.add(buttonChoose);
pane.add(scroll);
this.pack(); //to get insets of the window
Insets i = this.getInsets();
log.debug("{}", i);
this.setSize(w + i.left + i.right, h + i.top + i.bottom);
this.setResizable(false);
this.setVisible(true);
this.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
}
private void choose() {
//https://docs.oracle.com/javase/tutorial/uiswing/components/dialog.html
String path = fieldPath.getText();
JFileChooser d = new JFileChooser(path);
int res = d.showDialog(this, "Choose");
if (res == JFileChooser.APPROVE_OPTION) {
fieldPath.setText(d.getSelectedFile().getAbsolutePath());
}
}
private void load() {
File file = new File(fieldPath.getText());
if (!file.exists()) {
JOptionPane.showMessageDialog(this,
"File not found.",
"Error",
JOptionPane.WARNING_MESSAGE);
return;
}
if (!file.isFile()) {
JOptionPane.showMessageDialog(this,
"Not a regular file.",
"Error",
JOptionPane.WARNING_MESSAGE);
return;
}
if (!file.canRead()) {
JOptionPane.showMessageDialog(this,
"Unable to read from file.",
"Error",
JOptionPane.WARNING_MESSAGE);
return;
}
byte[] res;
try {
Path path = file.toPath();
res = Files.readAllBytes(path);
} catch (IOException e) {
JOptionPane.showMessageDialog(this,
"Unable to read from File: " + ExceptionHelper.stackTraceToString(e),
"Error",
JOptionPane.WARNING_MESSAGE);
return;
}
String pass = getPassword("Load");
if (pass != null) {
try {
byte[] ret = coder.decode(res, pass.toCharArray());
String retText = new String(ret);
// System.out.println(retText + " " + new String(textOriginal));
this.text.setText(retText);
updateFileInformation(file, pass);
JOptionPane.showMessageDialog(this,
"Successfully read.",
"Success",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e) {
JOptionPane.showMessageDialog(this,
ExceptionHelper.stackTraceToString(e),
"Exception",
JOptionPane.ERROR_MESSAGE);
}
} else {
JOptionPane.showMessageDialog(this,
"Cannot be empty. Aborting.",
"Invalid password",
JOptionPane.WARNING_MESSAGE);
}
}
private void save() {
File file = new File(fieldPath.getText());
if (file.exists() && !file.canWrite()) {
JOptionPane.showMessageDialog(this,
"Unable to write to file.",
"Error",
JOptionPane.WARNING_MESSAGE);
return;
}
boolean checkPassword = checkName(file);
FileInformation oldFileInformation = fileInformation;
String pass;
while (true) {
pass = getPassword(checkPassword ? "Save: reenter password" : "Save: define password");
if (checkPassword) {
if (pass == null) {
int res = JOptionPane.showConfirmDialog(this, "Define new password?", "Question", JOptionPane.YES_NO_OPTION);
if (res == JOptionPane.YES_OPTION) {
checkPassword = false;
removeFileInformation();
} else {
return;
}
} else {
if (checkPassword(pass)) {
break;
} else {
JOptionPane.showMessageDialog(this,
"Passwords don't match. Please define new password.",
"Invalid password",
JOptionPane.WARNING_MESSAGE);
checkPassword = false;
fileInformation = oldFileInformation;
}
}
} else {
if (pass == null) {
JOptionPane.showMessageDialog(this,
"Cannot be empty. Aborting.",
"Invalid password",
JOptionPane.WARNING_MESSAGE);
return;
} else {
checkPassword = true;
updateFileInformation(file, pass);
}
}
}
try {
String text = this.text.getText();
byte[] textOriginal = text.getBytes();
byte[] res = coder.encode(textOriginal, pass.toCharArray());
if (file.createNewFile()) {
log.debug("File created");
} else {
log.debug("File already existed");
}
BufferedOutputStream bos = null;
try {
try {
bos = new BufferedOutputStream(new FileOutputStream(file));
bos.write(res);
// bos.clo
} catch (IOException e) {
JOptionPane.showMessageDialog(this,
"Unable to write to File: " + ExceptionHelper.stackTraceToString(e),
"Error",
JOptionPane.WARNING_MESSAGE);
return;
}
} finally {
if (bos != null) {
try {
bos.close();
} catch (IOException e) {
log.warn("Exception while closing file {}", file, e);
}
}
}
JOptionPane.showMessageDialog(this,
res.length + " bytes successfully wrote.",
"Success",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e) {
JOptionPane.showMessageDialog(this,
ExceptionHelper.stackTraceToString(e),
"Exception",
JOptionPane.ERROR_MESSAGE);
}
}
@Nullable
private String getPassword(String title) {
// as described in http://blogger.ziesemer.com/2007/03/java-password-dialog.html
JLabel label = new JLabel("Please enter your password:");
JPasswordField passwordField = new JPasswordField();
int status = JOptionPane.showConfirmDialog(null,
new Object[]{label, passwordField}, title,
JOptionPane.OK_CANCEL_OPTION);
if (status == JOptionPane.OK_OPTION) {
return String.valueOf(passwordField.getPassword());
}
return null;
}
/**
* If the file was read or written before, we should check that the password is the same.
* If this is the new file, we should ask to enter password twice.
*
* @param file current file
* @return true if this is the last accessed file
*/
private boolean checkName(@NotNull File file) {
return fileInformation != null && fileInformation.checkName(file.getAbsolutePath());
}
/**
* Checks if the password was reentered correctly
*
* @param password password to check
* @return true if password is same
*/
private boolean checkPassword(@NotNull String password) {
assert fileInformation != null;
return fileInformation.checkPassword(password);
}
private void removeFileInformation() {
this.fileInformation = null;
}
private void updateFileInformation(@NotNull File file, @NotNull String password) {
this.fileInformation = new FileInformation(file.getAbsolutePath(), password);
}
}
| |
/*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.isochrone.algorithm;
import com.carrotsearch.hppc.IntObjectHashMap;
import com.carrotsearch.hppc.procedures.IntObjectProcedure;
import com.graphhopper.coll.GHIntObjectHashMap;
import com.graphhopper.routing.AbstractRoutingAlgorithm;
import com.graphhopper.routing.Path;
import com.graphhopper.routing.PathExtractor;
import com.graphhopper.routing.util.EdgeFilter;
import com.graphhopper.routing.util.TraversalMode;
import com.graphhopper.routing.weighting.Weighting;
import com.graphhopper.storage.Graph;
import com.graphhopper.storage.NodeAccess;
import com.graphhopper.storage.SPTEntry;
import com.graphhopper.util.EdgeIterator;
import com.graphhopper.util.GHUtility;
import com.graphhopper.util.shapes.GHPoint;
import org.locationtech.jts.geom.Coordinate;
import java.util.*;
import static com.graphhopper.isochrone.algorithm.Isochrone.ExploreType.DISTANCE;
import static com.graphhopper.isochrone.algorithm.Isochrone.ExploreType.TIME;
/**
* @author Peter Karich
*/
public class Isochrone extends AbstractRoutingAlgorithm {
enum ExploreType {TIME, DISTANCE}
// TODO use same class as used in GTFS module?
class IsoLabel extends SPTEntry {
IsoLabel(int edgeId, int adjNode, double weight, long time, double distance) {
super(edgeId, adjNode, weight);
this.time = time;
this.distance = distance;
}
public long time;
public double distance;
@Override
public String toString() {
return super.toString() + ", time:" + time + ", distance:" + distance;
}
}
private IntObjectHashMap<IsoLabel> fromMap;
private PriorityQueue<IsoLabel> fromHeap;
private IsoLabel currEdge;
private int visitedNodes;
private double limit = -1;
private double finishLimit = -1;
private ExploreType exploreType = TIME;
private final boolean reverseFlow;
public Isochrone(Graph g, Weighting weighting, boolean reverseFlow) {
super(g, weighting, TraversalMode.NODE_BASED);
fromHeap = new PriorityQueue<>(1000);
fromMap = new GHIntObjectHashMap<>(1000);
this.reverseFlow = reverseFlow;
}
@Override
public Path calcPath(int from, int to) {
throw new IllegalStateException("call search instead");
}
/**
* Time limit in seconds
*/
public void setTimeLimit(double limit) {
exploreType = TIME;
this.limit = limit * 1000;
// we explore until all spt-entries are '>timeLimitInSeconds'
// and add some more into this bucket for car we need a bit more as
// we otherwise get artifacts for motorway endings
this.finishLimit = this.limit + Math.max(this.limit * 0.14, 200_000);
}
/**
* Distance limit in meter
*/
public void setDistanceLimit(double limit) {
exploreType = DISTANCE;
this.limit = limit;
this.finishLimit = limit + Math.max(limit * 0.14, 2_000);
}
public static class IsoLabelWithCoordinates {
public final int nodeId;
public int edgeId, prevEdgeId, prevNodeId;
public int timeMillis, prevTimeMillis;
public int distance, prevDistance;
public GHPoint coordinate, prevCoordinate;
public IsoLabelWithCoordinates(int nodeId) {
this.nodeId = nodeId;
}
}
public interface Callback {
void add(IsoLabelWithCoordinates label);
}
public void search(int from, final Callback callback) {
searchInternal(from);
final NodeAccess na = graph.getNodeAccess();
fromMap.forEach(new IntObjectProcedure<IsoLabel>() {
@Override
public void apply(int nodeId, IsoLabel label) {
double lat = na.getLatitude(nodeId);
double lon = na.getLongitude(nodeId);
IsoLabelWithCoordinates isoLabelWC = new IsoLabelWithCoordinates(nodeId);
isoLabelWC.coordinate = new GHPoint(lat, lon);
isoLabelWC.timeMillis = Math.round(label.time);
isoLabelWC.distance = (int) Math.round(label.distance);
isoLabelWC.edgeId = label.edge;
if (label.parent != null) {
IsoLabel prevLabel = (IsoLabel) label.parent;
nodeId = prevLabel.adjNode;
double prevLat = na.getLatitude(nodeId);
double prevLon = na.getLongitude(nodeId);
isoLabelWC.prevNodeId = nodeId;
isoLabelWC.prevEdgeId = prevLabel.edge;
isoLabelWC.prevCoordinate = new GHPoint(prevLat, prevLon);
isoLabelWC.prevDistance = (int) Math.round(prevLabel.distance);
isoLabelWC.prevTimeMillis = Math.round(prevLabel.time);
}
callback.add(isoLabelWC);
}
});
}
public List<List<Coordinate>> searchGPS(int from, final int bucketCount) {
searchInternal(from);
final double bucketSize = limit / bucketCount;
final List<List<Coordinate>> buckets = new ArrayList<>(bucketCount);
for (int i = 0; i < bucketCount + 1; i++) {
buckets.add(new ArrayList<Coordinate>());
}
final NodeAccess na = graph.getNodeAccess();
fromMap.forEach(new IntObjectProcedure<IsoLabel>() {
@Override
public void apply(int nodeId, IsoLabel label) {
int bucketIndex = (int) (getExploreValue(label) / bucketSize);
if (bucketIndex < 0) {
throw new IllegalArgumentException("edge cannot have negative explore value " + nodeId + ", " + label);
} else if (bucketIndex > bucketCount) {
return;
}
double lat = na.getLatitude(nodeId);
double lon = na.getLongitude(nodeId);
buckets.get(bucketIndex).add(new Coordinate(lon, lat));
// guess center of road to increase precision a bit for longer roads
if (label.parent != null) {
nodeId = label.parent.adjNode;
double lat2 = na.getLatitude(nodeId);
double lon2 = na.getLongitude(nodeId);
buckets.get(bucketIndex).add(new Coordinate((lon + lon2) / 2, (lat + lat2) / 2));
}
}
});
return buckets;
}
public List<Set<Integer>> search(int from, final int bucketCount) {
searchInternal(from);
final double bucketSize = limit / bucketCount;
final List<Set<Integer>> list = new ArrayList<>(bucketCount);
for (int i = 0; i < bucketCount; i++) {
list.add(new HashSet<Integer>());
}
fromMap.forEach(new IntObjectProcedure<IsoLabel>() {
@Override
public void apply(int nodeId, IsoLabel label) {
if (finished()) {
return;
}
int bucketIndex = (int) (getExploreValue(label) / bucketSize);
if (bucketIndex < 0) {
throw new IllegalArgumentException("edge cannot have negative explore value " + nodeId + ", " + label);
} else if (bucketIndex == bucketCount) {
bucketIndex = bucketCount - 1;
} else if (bucketIndex > bucketCount) {
return;
}
list.get(bucketIndex).add(nodeId);
}
});
return list;
}
private void searchInternal(int from) {
checkAlreadyRun();
currEdge = new IsoLabel(-1, from, 0, 0, 0);
fromMap.put(from, currEdge);
EdgeFilter filter = reverseFlow ? inEdgeFilter : outEdgeFilter;
while (true) {
visitedNodes++;
if (finished()) {
break;
}
int neighborNode = currEdge.adjNode;
EdgeIterator iter = edgeExplorer.setBaseNode(neighborNode);
while (iter.next()) {
if (!accept(iter, currEdge.edge)) {
continue;
}
// todo: for #1776/#1835 move the access check into weighting
double tmpWeight = !filter.accept(iter)
? Double.POSITIVE_INFINITY
: (GHUtility.calcWeightWithTurnWeight(weighting, iter, reverseFlow, currEdge.edge) + currEdge.weight);
if (Double.isInfinite(tmpWeight))
continue;
double tmpDistance = iter.getDistance() + currEdge.distance;
long tmpTime = GHUtility.calcMillisWithTurnMillis(weighting, iter, reverseFlow, currEdge.edge) + currEdge.time;
int tmpNode = iter.getAdjNode();
IsoLabel nEdge = fromMap.get(tmpNode);
if (nEdge == null) {
nEdge = new IsoLabel(iter.getEdge(), tmpNode, tmpWeight, tmpTime, tmpDistance);
nEdge.parent = currEdge;
fromMap.put(tmpNode, nEdge);
fromHeap.add(nEdge);
} else if (nEdge.weight > tmpWeight) {
fromHeap.remove(nEdge);
nEdge.edge = iter.getEdge();
nEdge.weight = tmpWeight;
nEdge.distance = tmpDistance;
nEdge.time = tmpTime;
nEdge.parent = currEdge;
fromHeap.add(nEdge);
}
}
if (fromHeap.isEmpty()) {
break;
}
currEdge = fromHeap.poll();
if (currEdge == null) {
throw new AssertionError("Empty edge cannot happen");
}
}
}
private double getExploreValue(IsoLabel label) {
if (exploreType == TIME)
return label.time;
// if(exploreType == DISTANCE)
return label.distance;
}
@Override
protected boolean finished() {
return getExploreValue(currEdge) >= finishLimit;
}
@Override
protected Path extractPath() {
if (currEdge == null || !finished()) {
return createEmptyPath();
}
return PathExtractor.extractPath(graph, weighting, currEdge);
}
@Override
public String getName() {
return "reachability";
}
@Override
public int getVisitedNodes() {
return visitedNodes;
}
}
| |
package com.venky.core.date;
import com.venky.cache.UnboundedCache;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import java.util.Map;
import java.util.SimpleTimeZone;
import java.util.TimeZone;
public class DateUtils { // NOPMD by VMahadevan on 1/26/09 11:16 PM
private static final ThreadLocal<Map<String,SimpleDateFormat>> dateFormatFactory = new ThreadLocal<>();
public static long compareToMillis(final Date d1, final Date d2) {
return compareToMillis(d1.getTime(), d2.getTime());
}
public static int compareToMinutes(final Date d1, final Date d2) {
return compareToMinutes(d1.getTime(), d2.getTime());
}
public static double compareToDMinutes(final Date d1, final Date d2) {
return compareToDMinutes(d1.getTime(), d2.getTime());
}
public static long compareToMillis(final long millis1, final long millis2) {
return (millis1 - millis2);
}
public static int compareToMinutes(final long millis1, final long millis2) {
return (int) Math.round(compareToDMinutes(millis1, millis2));
}
public static double compareToDMinutes(final long millis1, final long millis2) {
final long millisecondDiff = compareToMillis(millis1, millis2);
final double millisecondsInMinute = 60 * 1000.0;
final double diff = (millisecondDiff) / millisecondsInMinute;
return diff;
}
public static Date max(final Date d1, final Date d2) {
return (d1.compareTo(d2) < 0 ? d2 : d1);
}
public static Date min(final Date d1, final Date d2) {
return (d1.compareTo(d2) < 0 ? d1 : d2);
}
public static Date addHours(final Date to, final int hours) {
return new Date(addHours(to.getTime(), hours));
}
public static Date addMinutes(final Date to, final int minutes) {
return new Date(addMinutes(to.getTime(), minutes));
}
public static long addHours(final long to, final int hours) {
return (to + hours * 3600000L);
}
public static long addMinutes(final long to, final int minutes) {
return (to + minutes * 60000L);
}
public static long addSeconds(final long to, final int seconds) {
return (to + seconds * 1000L);
}
public static long addMillis(final long to, final long millis) {
return (to + millis);
}
public static Date addToDate(final Date to, final int field, final int amount) {
return new Date(addToMillis(to.getTime(), field, amount));
}
public static long addToMillis(final long to, final int field, final int amount) {
final Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(to);
cal.add(field, amount);
return cal.getTimeInMillis();
}
public static final String ISO_TIME_FORMAT_STR = "HH:mm:ss";
public static final String ISO_DATE_FORMAT_STR = "yyyy-MM-dd";
public static final String ISO_DATE_TIME_FORMAT_STR = "yyyy-MM-dd HH:mm:ss";
public static final String ISO_DATE_TIME_FORMAT_WITH_MILLIS_STR = "yyyy-MM-dd HH:mm:ss.SSS";
public static final String ISO_8601_24H_FULL_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX";
public static final String APP_TIME_FORMAT_STR = "HH:mm";
public static final String APP_TIME_FORMAT_WITH_TZ_STR = "HH:mm Z";
public static final String APP_DATE_TIME_FORMAT_STR = "dd/MM/yyyy HH:mm:ss";
public static final String APP_DATE_TIME_FORMAT_WITH_TZ_STR = "dd/MM/yyyy HH:mm:ss Z";
public static final String APP_DATE_FORMAT_STR = "dd/MM/yyyy";
public static final Date HIGH_DATE = getHighDate();
public static DateFormat getFormat(String fmt){
if (dateFormatFactory.get() == null){
dateFormatFactory.set(new UnboundedCache<String, SimpleDateFormat>() {
@Override
protected SimpleDateFormat getValue(String fmt) {
return new SimpleDateFormat(fmt,Locale.getDefault());
}
});
}
return dateFormatFactory.get().get(fmt);
}
private static Date getHighDate() {
try {
return getFormat(APP_DATE_TIME_FORMAT_STR).parse("31/12/2999 12:59:59"); // NOPMD by VMahadevan on 1/26/09 11:12 PM
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static Date getStartOfDay(final Date date) {
return new Date(getStartOfDay(date.getTime()));
}
public static long getStartOfDay(final long date) {
final Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(date);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTimeInMillis();
}
public static long getEndOfDay(final long date) {
final Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(date);
calendar.set(Calendar.HOUR_OF_DAY, 23);
calendar.set(Calendar.MINUTE, 59);
calendar.set(Calendar.SECOND, 59);
calendar.set(Calendar.MILLISECOND, 999);
return calendar.getTimeInMillis();
}
public static Date getEndOfDay(final Date date) {
return new Date(getEndOfDay(date.getTime()));
}
public static Date getTimeOfDay(final Date date, final String atTimeofDay) {
return getTimeOfDay(date, new Time(atTimeofDay));
}
public static Date getTimeOfDay(final Date date, final Time atTimeofDay) {
return new Date(getTimeOfDay(date.getTime(), atTimeofDay));
}
public static long getTimeOfDay(final long date, final Time atTimeofDay) {
try {
final Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(date);
calendar.set(Calendar.HOUR_OF_DAY, atTimeofDay.getHours());
calendar.set(Calendar.MINUTE, atTimeofDay.getMinutes());
calendar.set(Calendar.SECOND, atTimeofDay.getSeconds());
return calendar.getTimeInMillis();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static Date getDate(final String dateStr) {
String[] trialFormats = new String[] { APP_DATE_TIME_FORMAT_WITH_TZ_STR , APP_DATE_TIME_FORMAT_STR, ISO_DATE_TIME_FORMAT_WITH_MILLIS_STR, ISO_8601_24H_FULL_FORMAT, ISO_DATE_TIME_FORMAT_STR, APP_DATE_FORMAT_STR, ISO_DATE_FORMAT_STR };
for (String trialFormat : trialFormats ){
try {
return getFormat(trialFormat).parse(dateStr);
} catch (ParseException e) {
//e.printStackTrace();
}
}
try {
long time = Long.parseLong(dateStr);
return new Date(time);
}catch (NumberFormatException ex){
}
throw new RuntimeException("Unknown Date Format : " + dateStr);
}
public static Date getTime(String time) {
String[] trialFormats = new String[] { APP_TIME_FORMAT_WITH_TZ_STR, APP_TIME_FORMAT_STR , ISO_TIME_FORMAT_STR };
for (String trialFormat : trialFormats ){
try {
return getFormat(trialFormat).parse(time);
} catch (ParseException e) {
//
}
}
throw new RuntimeException("Unknown Time Format");
}
public static String getTimeStr(final Date time) {
return getTimeStr(time, TimeZone.getDefault());
}
public static String getTimeStr(final Date time,TimeZone zone) {
return getTimestampStr(time, zone, APP_TIME_FORMAT_WITH_TZ_STR);
}
public static String getDateStr(final Date date) {
return getTimestampStr(date, TimeZone.getDefault(),APP_DATE_FORMAT_STR);
}
public static String getTimestampStr(final Date date){
return getTimestampStr(date,TimeZone.getDefault(),APP_DATE_TIME_FORMAT_WITH_TZ_STR);
}
public static String getTimestampStr(final Date date,final String tz){
TimeZone zone = SimpleTimeZone.getTimeZone(tz);
return getTimestampStr(date,zone,APP_DATE_TIME_FORMAT_WITH_TZ_STR);
}
public static String getTimestampStr(final Date inOneTimeZone, final TimeZone zone, String datefmt){
DateFormat fmt = getFormat(datefmt);
fmt.setTimeZone(zone);
return fmt.format(inOneTimeZone);
}
public static Date getDate(final Date inOneTimeZone, final String tz) {
final TimeZone zone = SimpleTimeZone.getTimeZone(tz);
final Calendar target = Calendar.getInstance(zone);
final Calendar cal = Calendar.getInstance();
cal.setTime(inOneTimeZone);
final int fromOffset = cal.get(Calendar.ZONE_OFFSET) + cal.get(Calendar.DST_OFFSET);
final int toOffset = target.get(Calendar.ZONE_OFFSET) + target.get(Calendar.DST_OFFSET);
final int diffOffset = toOffset - fromOffset;
cal.add(Calendar.MILLISECOND, diffOffset);
target.setTimeInMillis(cal.getTime().getTime());
return target.getTime();
}
public static String getDOW(Date date) {
String[] DOW =new String[] {"SUN","MON", "TUE","WED","THU","FRI","SAT"};
Calendar c = Calendar.getInstance();
c.setTime(date);
return DOW[c.get(Calendar.DAY_OF_WEEK)-1];
}
}
| |
/*
* GeoServer-Manager - Simple Manager Library for GeoServer
*
* Copyright (C) 2007,2011 GeoSolutions S.A.S.
* http://www.geo-solutions.it
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package it.geosolutions.geoserver.rest.publisher;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.StoreType;
import it.geosolutions.geoserver.rest.GeoServerRESTPublisher.UploadMethod;
import it.geosolutions.geoserver.rest.GeoserverRESTTest;
import it.geosolutions.geoserver.rest.decoder.RESTLayer;
import it.geosolutions.geoserver.rest.encoder.GSResourceEncoder.ProjectionPolicy;
import it.geosolutions.geoserver.rest.encoder.coverage.GSCoverageEncoderTest;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.commons.httpclient.NameValuePair;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
/**
* Testcase for publishing layers on geoserver.
* We need a running GeoServer to properly run the tests.
* If such geoserver instance cannot be contacted, tests will be skipped.
*
* @author etj
* @author Carlo Cancellieri - carlo.cancellieri@geo-solutions.it
*/
public class GeoserverRESTShapeTest extends GeoserverRESTTest {
private final static Logger LOGGER = LoggerFactory.getLogger(GeoserverRESTShapeTest.class);
@After
public void cleanUp(){
}
@Test
public void testReloadDataStore() throws FileNotFoundException, IOException {
if (!enabled())
return;
deleteAllWorkspacesRecursively();
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String storeName = "resttestshp";
String layerName = "cities";
File zipFile = new ClassPathResource("testdata/resttestshp.zip").getFile();
// test insert
boolean published = publisher.publishShp(DEFAULT_WS, storeName, layerName, zipFile);
assertTrue("publish() failed", published);
// test reload
assertTrue(publisher.reloadStore(DEFAULT_WS, storeName, StoreType.DATASTORES));
}
@Test
public void testPublishDeleteShapeZip() throws FileNotFoundException, IOException {
if (!enabled()) {
return;
}
deleteAllWorkspacesRecursively();
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String storeName = "resttestshp";
String layerName = "cities";
File zipFile = new ClassPathResource("testdata/resttestshp.zip").getFile();
// test insert
boolean published = publisher.publishShp(DEFAULT_WS, storeName, layerName, zipFile);
assertTrue("publish() failed", published);
assertTrue(existsLayer(layerName));
// Test exists datastore
assertTrue(reader.existsDatastore(DEFAULT_WS, storeName));
// Test exists featuretype
assertTrue(reader.existsFeatureType(DEFAULT_WS, storeName, layerName));
RESTLayer layer = reader.getLayer(layerName);
LOGGER.info("Layer style is " + layer.getDefaultStyle());
//test delete
boolean ok = publisher.unpublishFeatureType(DEFAULT_WS, storeName, layerName);
assertTrue("Unpublish() failed", ok);
assertFalse(existsLayer(layerName));
// Test not exists featuretype
assertFalse(reader.existsFeatureType(DEFAULT_WS, storeName, layerName));
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(DEFAULT_WS, storeName,false);
assertTrue("removeDatastore() failed", dsRemoved);
// Test not exists datastore
assertFalse(reader.existsDatastore(DEFAULT_WS, storeName));
}
@Test
public void testPublishDeleteExternalComplexShapeZip() throws FileNotFoundException, IOException {
if (!enabled()) {
return;
}
deleteAllWorkspacesRecursively();
// Assume.assumeTrue(enabled);
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String storeName = "resttestshp_complex";
String datasetName = "cities";
File zipFile = new ClassPathResource("testdata/shapefile/cities.shp").getFile();
// test insert
boolean published = publisher.publishShp(DEFAULT_WS, storeName, new NameValuePair[]{new NameValuePair("charset", "UTF-8")},datasetName, UploadMethod.EXTERNAL, zipFile.toURI(), "EPSG:4326",GSCoverageEncoderTest.WGS84,ProjectionPolicy.REPROJECT_TO_DECLARED,"polygon");
assertTrue("publish() failed", published);
assertTrue(existsLayer(datasetName));
RESTLayer layer = reader.getLayer(datasetName);
LOGGER.info("Layer style is " + layer.getDefaultStyle());
//test delete
boolean ok = publisher.unpublishFeatureType(DEFAULT_WS, storeName, datasetName);
assertTrue("Unpublish() failed", ok);
assertFalse(existsLayer(datasetName));
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(DEFAULT_WS, storeName,false);
assertTrue("removeDatastore() failed", dsRemoved);
}
@Test
public void testPublishDeleteComplexShapeZip() throws FileNotFoundException, IOException {
if (!enabled()) {
return;
}
deleteAllWorkspacesRecursively();
// Assume.assumeTrue(enabled);
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String storeName = "resttestshp_complex";
String datasetName = "cities";
File zipFile = new ClassPathResource("testdata/resttestshp.zip").getFile();
// test insert
boolean published = publisher.publishShp(DEFAULT_WS, storeName, new NameValuePair[]{new NameValuePair("charset", "UTF-8")},datasetName, UploadMethod.FILE, zipFile.toURI(), "EPSG:4326",GSCoverageEncoderTest.WGS84,ProjectionPolicy.REPROJECT_TO_DECLARED,"polygon");
assertTrue("publish() failed", published);
assertTrue(existsLayer(datasetName));
RESTLayer layer = reader.getLayer(datasetName);
LOGGER.info("Layer style is " + layer.getDefaultStyle());
//test delete
boolean ok = publisher.unpublishFeatureType(DEFAULT_WS, storeName, datasetName);
assertTrue("Unpublish() failed", ok);
assertFalse(existsLayer(datasetName));
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(DEFAULT_WS, storeName,false);
assertTrue("removeDatastore() failed", dsRemoved);
}
@Test
public void testPublishDeleteStyledShapeZip() throws FileNotFoundException, IOException {
if (!enabled()) {
return;
}
deleteAllWorkspacesRecursively();
// Assume.assumeTrue(enabled);
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String ns = "geosolutions";
String storeName = "resttestshp";
String layerName = "cities";
final String styleName = "restteststyle";
File zipFile = new ClassPathResource("testdata/resttestshp.zip").getFile();
publisher.removeDatastore(DEFAULT_WS, storeName,true);
publisher.removeStyle(styleName);
File sldFile = new ClassPathResource("testdata/restteststyle.sld").getFile();
// insert style
boolean sldpublished = publisher.publishStyle(sldFile); // Will take the name from sld contents
assertTrue("style publish() failed", sldpublished);
assertTrue(reader.existsStyle(styleName));
// test insert
boolean published = publisher.publishShp(ns, storeName, layerName, zipFile, "EPSG:4326", styleName);
assertTrue("publish() failed", published);
assertTrue(existsLayer(layerName));
RESTLayer layer = reader.getLayer(layerName);
// RESTLayer layerDecoder = new RESTLayer(layer);
LOGGER.info("Layer style is " + layer.getDefaultStyle());
assertEquals("Style not assigned properly", styleName, layer.getDefaultStyle());
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(ns, storeName,true);
assertTrue("removeDatastore() failed", dsRemoved);
//test delete style
boolean oksld = publisher.removeStyle(styleName);
assertTrue("Unpublish() failed", oksld);
assertFalse(reader.existsStyle(styleName));
}
@Test
public void testPublishDeleteStyledInWorkspaceShapeZip() throws FileNotFoundException, IOException {
if (!enabled()) {
return;
}
deleteAllWorkspacesRecursively();
// Assume.assumeTrue(enabled);
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String ns = "geosolutions";
String storeName = "resttestshp";
String layerName = "cities";
final String styleName = "restteststyle";
File zipFile = new ClassPathResource("testdata/resttestshp.zip").getFile();
publisher.removeDatastore(DEFAULT_WS, storeName,true);
publisher.removeStyleInWorkspace(DEFAULT_WS, styleName);
File sldFile = new ClassPathResource("testdata/restteststyle.sld").getFile();
// insert style
boolean sldpublished = publisher.publishStyleInWorkspace(DEFAULT_WS, sldFile); // Will take the name from sld contents
assertTrue("style publish() failed", sldpublished);
assertTrue(reader.existsStyle(DEFAULT_WS, styleName));
// test insert
boolean published = publisher.publishShp(ns, storeName, layerName, zipFile, "EPSG:4326", DEFAULT_WS + ":" + styleName);
assertTrue("publish() failed", published);
assertTrue(existsLayer(layerName));
RESTLayer layer = reader.getLayer(layerName);
// RESTLayer layerDecoder = new RESTLayer(layer);
LOGGER.info("Layer style is " + layer.getDefaultStyle());
assertEquals("Style not assigned properly", DEFAULT_WS + ":" + styleName, layer.getDefaultStyle());
assertEquals("Style not assigned properly", DEFAULT_WS, layer.getDefaultStyleWorkspace());
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(ns, storeName,true);
assertTrue("removeDatastore() failed", dsRemoved);
//test delete style
boolean oksld = publisher.removeStyleInWorkspace(DEFAULT_WS, styleName);
assertTrue("Unpublish() failed", oksld);
assertFalse(reader.existsStyle(styleName));
}
@Test
public void testPublishDeleteShapeZipWithParams() throws FileNotFoundException, IOException {
if (!enabled()) {
return;
}
deleteAllWorkspacesRecursively();
// Assume.assumeTrue(enabled);
assertTrue(publisher.createWorkspace(DEFAULT_WS));
String storeName = "resttestshp";
String layerName = "cities";
File zipFile = new ClassPathResource("testdata/resttestshp.zip").getFile();
// known state?
publisher.removeDatastore(DEFAULT_WS, storeName,true);
// test insert
boolean published = publisher.publishShp(DEFAULT_WS, storeName, layerName, zipFile,"EPSG:4326",new NameValuePair("charset","UTF-8"));
assertTrue("publish() failed", published);
assertTrue(existsLayer(layerName));
RESTLayer layer = reader.getLayer(layerName);
LOGGER.info("Layer style is " + layer.getDefaultStyle());
//test delete
boolean ok = publisher.unpublishFeatureType(DEFAULT_WS, storeName, layerName);
assertTrue("Unpublish() failed", ok);
assertFalse(existsLayer(layerName));
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(DEFAULT_WS, storeName);
assertTrue("removeDatastore() failed", dsRemoved);
}
/**
* Test case to solve error described in:
* https://github.com/geosolutions-it/geoserver-manager/issues/11
*
* @throws IllegalArgumentException
* @throws FileNotFoundException
*/
@Test
public void testPublishShpUsingDeclaredNativeCRS() throws Exception {
if (!enabled())
return;
deleteAllWorkspacesRecursively();
// layer publication params
String workspace = DEFAULT_WS;
String storename = "resttestshp";
String layerName = "cities";
File zipFile = new ClassPathResource("testdata/testshp_no_prj.zip")
.getFile();
String nativeCrs = "EPSG:4230";
String defaultStyle = null;
// Cleanup
deleteAllWorkspacesRecursively();
assertTrue(publisher.createWorkspace(workspace));
// Publish layer
assertTrue(publisher.publishShp(workspace, storename, layerName,
zipFile, nativeCrs, defaultStyle));
// Read CRS. Should be using the one indicated at publication time.
assertNotNull(reader.getLayer(layerName));
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(DEFAULT_WS, storename,true);
assertTrue("removeDatastore() failed", dsRemoved);
}
/**
* Test case to solve error described in:
* https://github.com/geosolutions-it/geoserver-manager/issues/11
*
* @throws IllegalArgumentException
* @throws FileNotFoundException
*/
@Test
public void testPublishShpUsingWKTNativeCRS() throws Exception {
if (!enabled())
return;
deleteAllWorkspacesRecursively();
// layer publication params
String workspace = DEFAULT_WS;
String storename = "resttestshp";
String layerName = "10m_populated_places";
File zipFile = new ClassPathResource("testdata/test_noepsg.zip")
.getFile();
String nativeCrs = "EPSG:4326";
String defaultStyle = null;
// Cleanup
deleteAllWorkspacesRecursively();
assertTrue(publisher.createWorkspace(workspace));
// Publish layer
assertTrue(publisher.publishShp(workspace, storename, layerName,
zipFile, nativeCrs, defaultStyle));
// Read CRS. Should be using the one indicated at publication time.
assertNotNull(reader.getLayer(layerName));
// remove also datastore
boolean dsRemoved = publisher.removeDatastore(DEFAULT_WS, storename,true);
assertTrue("removeDatastore() failed", dsRemoved);
}
}
| |
package org.elkoserver.util.trace;
import java.util.Calendar;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
/**
* This class manages dumping of messages to the semi-permanent on-disk log.
* Will queue messages until it's pointed at a log file or stdout. Messages
* will be redirected to stdout if a given logfile can't be opened.
*/
class TraceLog implements TraceMessageAcceptor {
/* Trace log file defaults */
private static final long STARTING_LOG_SIZE_THRESHOLD = 500000;
private static final long SMALLEST_LOG_SIZE_THRESHOLD = 1000;
/* Behaviors when opening files that already exist */
static final int VA_IRRELEVANT = -1; /* When opening stdout */
static final int VA_OVERWRITE = 0; /* Empty: overwrite existing */
static final int VA_ADD = 1; /* Rollover: add a new file */
private static final int STARTING_LOG_VERSION_ACTION = VA_ADD;
static final String DEFAULT_NAME = "default";
/** What to do with full or existing log files: rollover or empty. */
private int myVersionAction = STARTING_LOG_VERSION_ACTION;
/** Flag controlling whether a log file should be written at all. */
private boolean amWriteEnabled = false;
/** Log file size above which the file will be rolled over, in chars. */
private long myMaxSize = STARTING_LOG_SIZE_THRESHOLD;
/** Flag that max size was explicitly set rather than defaulted. */
private boolean amMaxSizeSet = false;
/** Number of characters in the current log file. */
private long myCurrentSize;
/** Frequency with which log files are rolled over, in milliseconds. */
private long myRolloverFrequency = 0;
/** Time of next scheduled log file rollover, or 0 if rollover is off */
private long myNextRolloverTime = 0;
/** Log to which messages are currently flowing, or null if none yet. */
private TraceLogDescriptor myCurrent;
/**
* The user can change the characteristics of this log descriptor, then
* redirect the log to it. Characteristics are changed via properties like
* "tracelog_tag". Redirection is done via "tracelog_reopen".
*/
private TraceLogDescriptor myPending = new TraceLogDescriptor();
/** True if all the initialization properties have been processed. */
private boolean mySetupComplete = false;
/** Buffer for building log message strings in. */
private StringBuilder myStringBuffer;
/** Queue for messages prior to log init and while switching log files. */
private List<TraceMessage> myQueuedMessages;
private static final int LINE_SEPARATOR_LENGTH =
System.getProperty("line.separator").length();
/**
* Constructor. Queue messages until setup is complete.
*/
TraceLog() {
/*
DANGER: This constructor must be called as part of static
initialization of TraceController. Until that initialization is
done, Trace should not be loaded. Therefore, nothing in this
constructor should directly or indirectly use a tracing function.
*/
myStringBuffer = new StringBuilder(200);
startQueuing();
}
/**
* Accept a message for the log. It will be discarded if both writing
* and the queue are turned off.
*/
public synchronized void accept(TraceMessage message) {
if (isAcceptingMessages()) {
if (isQueuing()) {
myQueuedMessages.add(message);
} else {
outputMessage(message);
}
}
}
/**
* Take a message and actually output it to the log. In particular, the
* queue of pending messages is bypassed, because this method is used in
* the process of draining that queue.
*/
private void outputMessage(TraceMessage message) {
message.stringify(myStringBuffer);
String output = myStringBuffer.toString();
myCurrent.stream.println(output);
/* Note: there's little point in checking for an output error. We
can't put the trace in the log, and there's little chance the user
would see it in the trace buffer. So we ignore it, with regret. */
myCurrentSize += output.length() + LINE_SEPARATOR_LENGTH;
if (myCurrentSize > myMaxSize) {
rolloverLogFile("This log is full.");
} else if (myNextRolloverTime != 0 &&
myNextRolloverTime < message.timestamp()) {
do {
myNextRolloverTime += myRolloverFrequency;
} while (myNextRolloverTime < message.timestamp());
rolloverLogFile("The time has come for a new log file.");
}
}
/**
* Call to initialize a log when logging is just beginning (or resuming
* after having been turned off). There is no current log, so nothing is
* written to it. If the pending log cannot be opened, standard output is
* used as the log. In any case, the queue is drained just before the
* method returns.
*/
private void beginLogging() {
try {
/* Rename any existing file */
myPending.startUsing(myVersionAction, null);
} catch (Exception e) {
/* Couldn't open the log file. Bail to stdout. */
Trace.trace.shred(e, "Exception has already been logged.");
myCurrent = TraceLogDescriptor.stdout;
try {
myCurrent.startUsing(VA_IRRELEVANT, null);
} catch (Exception ignore) {
assert false: "Exceptions shouldn't happen opening stdout.";
}
drainQueue();
return;
}
myCurrent = myPending;
Trace.trace.worldi("Logging begins on " + myCurrent.printName() + ".");
myPending = (TraceLogDescriptor) myCurrent.clone();
myCurrentSize = 0;
drainQueue();
}
/**
* Change how a full logfile handles its version files. "one" or "1" means
* that there will be at most one file, which will be overwritten if
* needed. "many" means a new versioned file with a new name should be
* created each time the base file fills up. Has effect when the next log
* file fills up.
*/
private void changeVersionFileHandling(String newBehavior) {
if (newBehavior.equalsIgnoreCase("one") || newBehavior.equals("1")) {
Trace.trace.eventi("Log files will be overwritten.");
myVersionAction = VA_OVERWRITE;
} else if (newBehavior.equalsIgnoreCase("many")) {
Trace.trace.eventi("New version files will always be created.");
myVersionAction = VA_ADD;
} else {
Trace.trace.errori(
"tracelog_versions property was given unknown value '" +
newBehavior + "'.");
}
}
/**
* Change the default directory in which logfiles live. Has effect only
* when a new logfile is opened.
*/
private void changeDir(String value) {
myPending.setDir(value);
}
/**
* Explicitly set the name of the next logfile to open. Overrides the
* effect of "tracelog_dir" only if the given name is absolute. Has effect
* only when a new logfile is opened.
*/
private void changeName(String value) {
myPending.setName(value);
}
/**
* Change the time-based log file rollover policy. By default, log files
* are only rolled over when they reach some size threshold, but by setting
* this you can also make them rollover based on the clock.
*
* @param value Rollover policy. Valid values are "weekly", "daily",
* "hourly", "none", or an integer number that expresses the rollover
* frequency in minutes.
*/
private void changeRollover(String value) {
int freq = 0;
Calendar startCal = Calendar.getInstance();
if (value.equalsIgnoreCase("weekly")) {
freq = 7 * 24 * 60;
startCal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
startCal.set(Calendar.HOUR_OF_DAY, 0);
startCal.set(Calendar.MINUTE, 0);
startCal.set(Calendar.SECOND, 0);
startCal.set(Calendar.MILLISECOND, 0);
} else if (value.equalsIgnoreCase("daily")) {
freq = 24 * 60;
startCal.set(Calendar.HOUR_OF_DAY, 0);
startCal.set(Calendar.MINUTE, 0);
startCal.set(Calendar.SECOND, 0);
startCal.set(Calendar.MILLISECOND, 0);
} else if (value.equalsIgnoreCase("hourly")) {
freq = 60;
startCal.set(Calendar.MINUTE, 0);
startCal.set(Calendar.SECOND, 0);
startCal.set(Calendar.MILLISECOND, 0);
} else if (value.equalsIgnoreCase("none")) {
freq = 0;
} else {
try {
freq = Integer.parseInt(value);
if (freq < 1) {
Trace.trace.errori("Log size rollover value " + value +
"too small (minimum is 1 minute)");
freq = 0;
} else {
int minute = startCal.get(Calendar.MINUTE);
startCal.set(Calendar.MINUTE, (minute / freq) * freq);
startCal.set(Calendar.SECOND, 0);
startCal.set(Calendar.MILLISECOND, 0);
}
} catch (NumberFormatException e) {
Trace.trace.errori("Invalid log size rollover value '" +
value + "'.");
}
}
if (freq != 0) {
myRolloverFrequency = freq * 60 * 1000;
if (!amMaxSizeSet) {
myMaxSize = Long.MAX_VALUE;
}
long startTime = startCal.getTimeInMillis();
myNextRolloverTime = startTime + myRolloverFrequency;
} else {
myNextRolloverTime = 0;
}
}
/**
* Change the new maximum allowable size for a logfile. Has effect on the
* current logfile. Note that the trace system does not prevent the log
* from exceeding this size; it only opens a new log file as soon as it
* does.
*/
private void changeSize(String value) {
long newSize;
if (value.equalsIgnoreCase(DEFAULT_NAME)) {
newSize = STARTING_LOG_SIZE_THRESHOLD;
} else if (value.equalsIgnoreCase("unlimited")) {
newSize = Long.MAX_VALUE;
} else try {
newSize = Long.parseLong(value);
} catch (NumberFormatException e) {
Trace.trace.errori(
"Log size cannot be changed to illegal value '" +
value + "'.");
newSize = myMaxSize; /* leave unchanged. */
}
if (newSize < SMALLEST_LOG_SIZE_THRESHOLD) {
Trace.trace.errori(
value + " is too small a threshold size for the log. "
+ "Try " + SMALLEST_LOG_SIZE_THRESHOLD + ".");
newSize = myMaxSize;
}
amMaxSizeSet = true;
myMaxSize = newSize;
if (myCurrentSize > myMaxSize) {
rolloverLogFile("This log is full.");
}
}
/**
* Change the 'tag' (base of filename) that logfiles have. Has effect only
* when a new logfile is opened.
*/
private void changeTag(String value) {
myPending.setTag(value);
}
/**
* The meaning of changeWrite is complicated. Here are the cases when it's
* used to turn writing ON.
*
* If setup is still in progress, the state variable 'amWriteEnabled' is
* used to note that logging should begin when setupIsComplete() is called.
*
* If setup is complete, logging should begin immediately. If logging has
* already begun, this is a no-op.
*
* Here are the cases for turning writing OFF.
*
* If setup is not complete, the state variable 'amWriteEnabled' informs
* setupIsComplete() that logging should not begin.
*
* If setup is complete, logging is stopped. However, if it was already
* stopped, the call is a no-op.
*
* There would be some merit in having a state machine implement all this.
*/
private void changeWrite(String value) {
if (value.equalsIgnoreCase("true")) {
if (amWriteEnabled) {
Trace.trace.warningi("Log writing enabled twice in a row.");
} else {
amWriteEnabled = true;
startQueuing(); /* it's ok if the queue already started. */
Trace.trace.eventi("Logging is enabled.");
if (mySetupComplete) {
beginLogging();
}
}
} else if (value.equalsIgnoreCase("false")) {
if (!amWriteEnabled) {
Trace.trace.warningi("Log writing disabled twice in a row.");
} else {
Trace.trace.eventi("Logging disabled.");
drainQueue(); /* either write messages or discard them */
amWriteEnabled = false;
if (mySetupComplete) {
myCurrent.stopUsing();
myCurrent = null;
} else {
assert myCurrent == null;
}
}
} else {
Trace.trace.errori("tracelog_write property was given value '" +
value + "'.");
}
}
/**
* Deal with messages accumulated in the queue. If the log is turned on
* (amWriteEnabled is true), they are written. Otherwise, they are
* discarded. It is safe to call this routine without knowing whether
* queuing is in progress.
*/
private void drainQueue() {
if (amWriteEnabled && isQueuing()) {
List<TraceMessage> queueToDrain = myQueuedMessages;
myQueuedMessages = null;
for (TraceMessage message : queueToDrain) {
outputMessage(message);
}
}
myQueuedMessages = null;
}
/**
* Call when the logfile fills up or reaches rollover time. Reopens the
* same log file.
*
* Standard output can never fill up, so this routine is a no-op when the
* current size of text sent to standard out exceeds the maximum, except
* that the current size is reset to zero.
*/
private void rolloverLogFile(String why) {
/* Preemptively set the log size back to zero. This allows log
messages about the fullness of the log to be placed into the log,
without getting into an infinite recursion.
*/
myCurrentSize = 0;
if (myCurrent.stream != System.out) {
Trace.trace.worldi(why);
shutdownAndSwap();
}
}
/**
* Call to switch to a log when another - with a different name - is
* currently being used. If the pending log cannot be opened, the current
* log continues to be used.
*
* Before the old log is closed, a WORLD message is logged, directing the
* reader to the new log. Trace messages may be queued while the swap is
* happening, but the queue is drained before the method returns.
*
* This routine is never called when the logfile fills - it's only used
* when explicitly reopening a log file. (tracelog_reopen=true).
*/
private void hotSwap() {
/* Finish the old log with a pointer to the new. */
Trace.trace.worldi("Logging ends.");
Trace.trace.worldi("Logging will continue on " +
myPending.printName() + ".");
startQueuing(); /* further messages should go to the new log. */
try {
/* rename an existing file, since it is not an earlier version of
the new name we're using. */
myPending.startUsing(myVersionAction, null);
} catch (Exception e) {
Trace.trace.shred(e, "Exception has already been logged.");
/* continue using current. */
drainQueue();
return;
}
/* Stash old log name to print in new log. */
String lastLog = myCurrent.printName();
myCurrent.stopUsing();
myCurrent = myPending;
Trace.trace.worldi("Logging begins on " + myCurrent.printName() + ".");
Trace.trace.worldi("Previous log was " + lastLog + ".");
myCurrentSize = 0;
myPending = (TraceLogDescriptor) myCurrent.clone();
drainQueue();
}
/**
* Test if this log is accepting messages.
*
* The log accepts messages if the "tracelog_write" property was set.
* Before setup is completed, it also accepts and queues up messages. When
* setup is complete, it either posts or discards those queued messages,
* depending on what the user wants.
*
* Queuing also happens transitorily while logs are being switched.
*
* @return true if this log is accepting messages, false if it is
* discarding them.
*/
private boolean isAcceptingMessages() {
return amWriteEnabled || isQueuing();
}
/**
* Test if this log is queueing messages.
*
* The log queues messages during its startup time and while switching
* between open log files.
*
* @return true if this log is currently queueing messages.
*/
private boolean isQueuing() {
return myQueuedMessages != null;
}
/**
* The gist of this routine is that it shuts down the current log and
* reopens a new one (possibly with the same name, possibly with a
* different name). There are some special cases, because this routine
* could be called before setup is complete (though using tracelog_reopen
* in the initial Properties is deprecated).
*
* It's called before setup is complete and writing is not enabled. The
* behavior is the same as tracelog_write [the preferred interface].
*
* It's called before setup is complete and writing is enabled. The effect
* is that of calling tracelog_write twice (a warning).
*
* It's called after setup is complete and writing is not enabled. The
* behavior is the same as calling tracelog_write [again, the preferred
* interface, because you're not "reopening" anything].
*
* It's called after setup is complete and writing is enabled. This is the
* way it's supposed to be used. The current log is closed and the pending
* log is opened.
*/
private void reopen(String ignored) {
if (!amWriteEnabled || !mySetupComplete) {
changeWrite("true");
} else if (myPending.equals(myCurrent)) {
shutdownAndSwap();
} else {
hotSwap();
}
}
/**
* Modify the acceptor configuration based on a property setting. Property
* names here are not true property names but property names with the
* "trace_" or "tracelog_" prefix stripped off.
*
* @param name Property name
* @param value Property value
*
* @return true if the property was recognized and handled, false if not
*/
public synchronized boolean setConfiguration(String name, String value) {
if (name.equalsIgnoreCase("write")) {
changeWrite(value);
} else if (name.equalsIgnoreCase("dir")) {
changeDir(value);
} else if (name.equalsIgnoreCase("tag")) {
changeTag(value);
} else if (name.equalsIgnoreCase("name")) {
changeName(value);
} else if (name.equalsIgnoreCase("size")) {
changeSize(value);
} else if (name.equalsIgnoreCase("rollover")) {
changeRollover(value);
} else if (name.equalsIgnoreCase("versions")) {
changeVersionFileHandling(value);
} else if (name.equalsIgnoreCase("reopen")) {
reopen(value);
} else {
return false;
}
return true;
}
/**
* Call this only after all properties have been processed. It begins
* logging, but only if tracelog_write or tracelog_reopen have been used,
* or if the default behavior is to write.
*/
public synchronized void setupIsComplete() {
Trace.trace.eventi("Logging is being started.");
if (amWriteEnabled) {
beginLogging();
}
drainQueue();
mySetupComplete = true;
}
/**
* Call to initialize a log when the same file is already open. If the
* pending log cannot be opened, standard output is used.
*
* Before the old log is closed, a WORLD message is logged, directing the
* reader to the new log. Trace messages may be queued while the swap is
* happening, but the queue is drained before the method returns.
*
* This routine can be called to version a full logfile, or to explicitly
* reopen the same logfile (via tracelog_reopen=true).
*/
private void shutdownAndSwap() {
/* In the old log, say what will happen. Can't log it while it's
happening, because that all goes to the new log.
*/
myCurrent.prepareToRollover(myVersionAction);
/* Stash old log name. This is used if reopening fails and further
logging is blurted to stdout.
*/
String lastLog = myCurrent.printName();
myCurrent.stopUsing();
startQueuing(); /* further messages should go to the new log. */
try {
myPending.startUsing(myVersionAction, myCurrent);
} catch (Exception e) {
Trace.trace.shred(e, "Exception has already been logged.");
myCurrent = TraceLogDescriptor.stdout;
myCurrentSize = 0;
try {
myCurrent.startUsing(VA_IRRELEVANT, null);
} catch (Exception ignore) {
assert false: "No exceptions when opening stdout.";
}
drainQueue();
Trace.trace.worldi("Previous log was " + lastLog + ".");
return;
}
myCurrent = myPending;
myCurrentSize = 0;
Trace.trace.worldi("Logging continues on " + myCurrent.printName() +
".");
Trace.trace.worldi("Previous log was " + lastLog + ".");
myPending = (TraceLogDescriptor) myCurrent.clone();
drainQueue();
}
/**
* Redirect trace messages to a queue. Used while switching to a new log
* file, or before setup is complete.
*
* It is harmless to call this routine twice.
*/
private void startQueuing() {
/* NOTE: trace messages must not be generated by this routine, because
it's called from the constructor.
*/
if (!isQueuing()) {
myQueuedMessages = new LinkedList<TraceMessage>();
}
}
}
| |
/*
* Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.core.publisher;
import java.util.Objects;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import reactor.core.CoreSubscriber;
import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Scheduler.Worker;
import reactor.util.annotation.Nullable;
/**
* Subscribes to the source Publisher asynchronously through a scheduler function or
* ExecutorService.
*
* @param <T> the value type
* @see <a href="https://github.com/reactor/reactive-streams-commons">Reactive-Streams-Commons</a>
*/
final class FluxSubscribeOn<T> extends FluxOperator<T, T> {
final Scheduler scheduler;
final boolean requestOnSeparateThread;
FluxSubscribeOn(
Flux<? extends T> source,
Scheduler scheduler,
boolean requestOnSeparateThread) {
super(source);
this.scheduler = Objects.requireNonNull(scheduler, "scheduler");
this.requestOnSeparateThread = requestOnSeparateThread;
}
@Override
@SuppressWarnings("unchecked")
public void subscribe(CoreSubscriber<? super T> actual) {
Worker worker;
try {
worker = Objects.requireNonNull(scheduler.createWorker(),
"The scheduler returned a null Function");
} catch (Throwable e) {
Operators.error(actual, Operators.onOperatorError(e, actual.currentContext()));
return;
}
SubscribeOnSubscriber<T> parent = new SubscribeOnSubscriber<>(source,
actual, worker, requestOnSeparateThread);
actual.onSubscribe(parent);
try {
worker.schedule(parent);
}
catch (RejectedExecutionException ree) {
if (parent.s != Operators.cancelledSubscription()) {
actual.onError(Operators.onRejectedExecution(ree, parent, null, null,
actual.currentContext()));
}
}
}
static final class SubscribeOnSubscriber<T>
implements InnerOperator<T, T>, Runnable {
final CoreSubscriber<? super T> actual;
final Publisher<? extends T> source;
final Worker worker;
final boolean requestOnSeparateThread;
volatile Subscription s;
static final AtomicReferenceFieldUpdater<SubscribeOnSubscriber, Subscription> S =
AtomicReferenceFieldUpdater.newUpdater(SubscribeOnSubscriber.class,
Subscription.class,
"s");
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<SubscribeOnSubscriber> REQUESTED =
AtomicLongFieldUpdater.newUpdater(SubscribeOnSubscriber.class,
"requested");
volatile Thread thread;
@SuppressWarnings("rawtypes")
static final AtomicReferenceFieldUpdater<SubscribeOnSubscriber, Thread> THREAD =
AtomicReferenceFieldUpdater.newUpdater(SubscribeOnSubscriber.class,
Thread.class,
"thread");
SubscribeOnSubscriber(Publisher<? extends T> source, CoreSubscriber<? super T> actual,
Worker worker, boolean requestOnSeparateThread) {
this.actual = actual;
this.worker = worker;
this.source = source;
this.requestOnSeparateThread = requestOnSeparateThread;
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.setOnce(S, this, s)) {
long r = REQUESTED.getAndSet(this, 0L);
if (r != 0L) {
requestUpstream(r, s);
}
}
}
void requestUpstream(final long n, final Subscription s) {
if (!requestOnSeparateThread || Thread.currentThread() == THREAD.get(this)) {
s.request(n);
}
else {
try {
worker.schedule(() -> s.request(n));
}
catch (RejectedExecutionException ree) {
if(!worker.isDisposed()) {
//FIXME should not throw but if we implement strict
// serialization like in StrictSubscriber, onNext will carry an
// extra cost
throw Operators.onRejectedExecution(ree, this, null, null,
actual.currentContext());
}
}
}
}
@Override
public void onNext(T t) {
actual.onNext(t);
}
@Override
public void onError(Throwable t) {
try {
actual.onError(t);
}
finally {
worker.dispose();
}
}
@Override
public void onComplete() {
actual.onComplete();
worker.dispose();
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
Subscription s = S.get(this);
if (s != null) {
requestUpstream(n, s);
}
else {
Operators.addCap(REQUESTED, this, n);
s = S.get(this);
if (s != null) {
long r = REQUESTED.getAndSet(this, 0L);
if (r != 0L) {
requestUpstream(r, s);
}
}
}
}
}
@Override
public void run() {
THREAD.lazySet(this, Thread.currentThread());
source.subscribe(this);
}
@Override
public void cancel() {
Subscription a = s;
if (a != Operators.cancelledSubscription()) {
a = S.getAndSet(this, Operators.cancelledSubscription());
if (a != null && a != Operators.cancelledSubscription()) {
a.cancel();
}
}
worker.dispose();
}
@Override
@Nullable
public Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.CANCELLED) return s == Operators.cancelledSubscription();
if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested;
return InnerOperator.super.scanUnsafe(key);
}
@Override
public CoreSubscriber<? super T> actual() {
return actual;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.IntSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable;
import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.IndexNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
/**
* Represents a global cluster-wide routing table for all indices including the
* version of the current routing state.
*
* @see IndexRoutingTable
*/
public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<RoutingTable> {
public static RoutingTable PROTO = builder().build();
public static final RoutingTable EMPTY_ROUTING_TABLE = builder().build();
private final long version;
// index to IndexRoutingTable map
private final ImmutableOpenMap<String, IndexRoutingTable> indicesRouting;
RoutingTable(long version, ImmutableOpenMap<String, IndexRoutingTable> indicesRouting) {
this.version = version;
this.indicesRouting = indicesRouting;
}
/**
* Returns the version of the {@link RoutingTable}.
*
* @return version of the {@link RoutingTable}
*/
public long version() {
return this.version;
}
@Override
public Iterator<IndexRoutingTable> iterator() {
return indicesRouting.valuesIt();
}
public boolean hasIndex(String index) {
return indicesRouting.containsKey(index);
}
public IndexRoutingTable index(String index) {
return indicesRouting.get(index);
}
public ImmutableOpenMap<String, IndexRoutingTable> indicesRouting() {
return indicesRouting;
}
public ImmutableOpenMap<String, IndexRoutingTable> getIndicesRouting() {
return indicesRouting();
}
public RoutingTable validateRaiseException(MetaData metaData) throws RoutingValidationException {
RoutingTableValidation validation = validate(metaData);
if (!validation.valid()) {
throw new RoutingValidationException(validation);
}
return this;
}
public RoutingTableValidation validate(MetaData metaData) {
RoutingTableValidation validation = new RoutingTableValidation();
for (IndexRoutingTable indexRoutingTable : this) {
indexRoutingTable.validate(validation, metaData);
}
return validation;
}
public List<ShardRouting> shardsWithState(ShardRoutingState state) {
List<ShardRouting> shards = new ArrayList<>();
for (IndexRoutingTable indexRoutingTable : this) {
shards.addAll(indexRoutingTable.shardsWithState(state));
}
return shards;
}
/**
* All the shards (replicas) for all indices in this routing table.
*
* @return All the shards
*/
public List<ShardRouting> allShards() {
List<ShardRouting> shards = new ArrayList<>();
String[] indices = indicesRouting.keys().toArray(String.class);
for (String index : indices) {
List<ShardRouting> allShardsIndex = allShards(index);
shards.addAll(allShardsIndex);
}
return shards;
}
/**
* All the shards (replicas) for the provided index.
*
* @param index The index to return all the shards (replicas).
* @return All the shards matching the specific index
* @throws IndexNotFoundException If the index passed does not exists
*/
public List<ShardRouting> allShards(String index) {
List<ShardRouting> shards = new ArrayList<>();
IndexRoutingTable indexRoutingTable = index(index);
if (indexRoutingTable == null) {
throw new IndexNotFoundException(index);
}
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
shards.add(shardRouting);
}
}
return shards;
}
public GroupShardsIterator allActiveShardsGrouped(String[] indices, boolean includeEmpty) {
return allActiveShardsGrouped(indices, includeEmpty, false);
}
/**
* Return GroupShardsIterator where each active shard routing has it's own shard iterator.
*
* @param includeEmpty if true, a shard iterator will be added for non-assigned shards as well
* @param includeRelocationTargets if true, an <b>extra</b> shard iterator will be added for relocating shards. The extra
* iterator contains a single ShardRouting pointing at the relocating target
*/
public GroupShardsIterator allActiveShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) {
return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ACTIVE_PREDICATE);
}
public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty) {
return allAssignedShardsGrouped(indices, includeEmpty, false);
}
/**
* Return GroupShardsIterator where each assigned shard routing has it's own shard iterator.
*
* @param includeEmpty if true, a shard iterator will be added for non-assigned shards as well
* @param includeRelocationTargets if true, an <b>extra</b> shard iterator will be added for relocating shards. The extra
* iterator contains a single ShardRouting pointing at the relocating target
*/
public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) {
return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ASSIGNED_PREDICATE);
}
private static Predicate<ShardRouting> ACTIVE_PREDICATE = shardRouting -> shardRouting.active();
private static Predicate<ShardRouting> ASSIGNED_PREDICATE = shardRouting -> shardRouting.assignedToNode();
// TODO: replace with JDK 8 native java.util.function.Predicate
private GroupShardsIterator allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) {
// use list here since we need to maintain identity across shards
ArrayList<ShardIterator> set = new ArrayList<>();
for (String index : indices) {
IndexRoutingTable indexRoutingTable = index(index);
if (indexRoutingTable == null) {
continue;
// we simply ignore indices that don't exists (make sense for operations that use it currently)
}
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
if (predicate.test(shardRouting)) {
set.add(shardRouting.shardsIt());
if (includeRelocationTargets && shardRouting.relocating()) {
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard())));
}
} else if (includeEmpty) { // we need this for counting properly, just make it an empty one
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.<ShardRouting>emptyList()));
}
}
}
}
return new GroupShardsIterator(set);
}
public ShardsIterator allShards(String[] indices) {
return allShardsSatisfyingPredicate(indices, shardRouting -> true, false);
}
public ShardsIterator allShardsIncludingRelocationTargets(String[] indices) {
return allShardsSatisfyingPredicate(indices, shardRouting -> true, true);
}
// TODO: replace with JDK 8 native java.util.function.Predicate
private ShardsIterator allShardsSatisfyingPredicate(String[] indices, Predicate<ShardRouting> predicate, boolean includeRelocationTargets) {
// use list here since we need to maintain identity across shards
List<ShardRouting> shards = new ArrayList<>();
for (String index : indices) {
IndexRoutingTable indexRoutingTable = index(index);
if (indexRoutingTable == null) {
continue;
// we simply ignore indices that don't exists (make sense for operations that use it currently)
}
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
if (predicate.test(shardRouting)) {
shards.add(shardRouting);
if (includeRelocationTargets && shardRouting.relocating()) {
shards.add(shardRouting.buildTargetRelocatingShard());
}
}
}
}
}
return new PlainShardsIterator(shards);
}
/**
* All the *active* primary shards for the provided indices grouped (each group is a single element, consisting
* of the primary shard). This is handy for components that expect to get group iterators, but still want in some
* cases to iterate over all primary shards (and not just one shard in replication group).
*
* @param indices The indices to return all the shards (replicas)
* @return All the primary shards grouped into a single shard element group each
* @throws IndexNotFoundException If an index passed does not exists
* @see IndexRoutingTable#groupByAllIt()
*/
public GroupShardsIterator activePrimaryShardsGrouped(String[] indices, boolean includeEmpty) {
// use list here since we need to maintain identity across shards
ArrayList<ShardIterator> set = new ArrayList<>();
for (String index : indices) {
IndexRoutingTable indexRoutingTable = index(index);
if (indexRoutingTable == null) {
throw new IndexNotFoundException(index);
}
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
ShardRouting primary = indexShardRoutingTable.primaryShard();
if (primary.active()) {
set.add(primary.shardsIt());
} else if (includeEmpty) { // we need this for counting properly, just make it an empty one
set.add(new PlainShardIterator(primary.shardId(), Collections.<ShardRouting>emptyList()));
}
}
}
return new GroupShardsIterator(set);
}
@Override
public Diff<RoutingTable> diff(RoutingTable previousState) {
return new RoutingTableDiff(previousState, this);
}
@Override
public Diff<RoutingTable> readDiffFrom(StreamInput in) throws IOException {
return new RoutingTableDiff(in);
}
@Override
public RoutingTable readFrom(StreamInput in) throws IOException {
Builder builder = new Builder();
builder.version = in.readLong();
int size = in.readVInt();
for (int i = 0; i < size; i++) {
IndexRoutingTable index = IndexRoutingTable.Builder.readFrom(in);
builder.add(index);
}
return builder.build();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(version);
out.writeVInt(indicesRouting.size());
for (ObjectCursor<IndexRoutingTable> index : indicesRouting.values()) {
index.value.writeTo(out);
}
}
private static class RoutingTableDiff implements Diff<RoutingTable> {
private final long version;
private final Diff<ImmutableOpenMap<String, IndexRoutingTable>> indicesRouting;
public RoutingTableDiff(RoutingTable before, RoutingTable after) {
version = after.version;
indicesRouting = DiffableUtils.diff(before.indicesRouting, after.indicesRouting);
}
public RoutingTableDiff(StreamInput in) throws IOException {
version = in.readLong();
indicesRouting = DiffableUtils.readImmutableOpenMapDiff(in, IndexRoutingTable.PROTO);
}
@Override
public RoutingTable apply(RoutingTable part) {
return new RoutingTable(version, indicesRouting.apply(part.indicesRouting));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(version);
indicesRouting.writeTo(out);
}
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(RoutingTable routingTable) {
return new Builder(routingTable);
}
/**
* Builder for the routing table. Note that build can only be called one time.
*/
public static class Builder {
private long version;
private ImmutableOpenMap.Builder<String, IndexRoutingTable> indicesRouting = ImmutableOpenMap.builder();
public Builder() {
}
public Builder(RoutingTable routingTable) {
version = routingTable.version;
for (IndexRoutingTable indexRoutingTable : routingTable) {
indicesRouting.put(indexRoutingTable.index(), indexRoutingTable);
}
}
public Builder updateNodes(RoutingNodes routingNodes) {
// this is being called without pre initializing the routing table, so we must copy over the version as well
this.version = routingNodes.routingTable().version();
Map<String, IndexRoutingTable.Builder> indexRoutingTableBuilders = new HashMap<>();
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRoutingEntry : routingNode) {
// every relocating shard has a double entry, ignore the target one.
if (shardRoutingEntry.initializing() && shardRoutingEntry.relocatingNodeId() != null)
continue;
String index = shardRoutingEntry.index();
IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
if (indexBuilder == null) {
indexBuilder = new IndexRoutingTable.Builder(index);
indexRoutingTableBuilders.put(index, indexBuilder);
}
IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id());
indexBuilder.addShard(refData, shardRoutingEntry);
}
}
Iterable<ShardRouting> shardRoutingEntries = Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored());
for (ShardRouting shardRoutingEntry : shardRoutingEntries) {
String index = shardRoutingEntry.index();
IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
if (indexBuilder == null) {
indexBuilder = new IndexRoutingTable.Builder(index);
indexRoutingTableBuilders.put(index, indexBuilder);
}
IndexShardRoutingTable refData = routingNodes.routingTable().index(shardRoutingEntry.index()).shard(shardRoutingEntry.id());
indexBuilder.addShard(refData, shardRoutingEntry);
}
for (IndexRoutingTable.Builder indexBuilder : indexRoutingTableBuilders.values()) {
add(indexBuilder);
}
return this;
}
public Builder updateNumberOfReplicas(int numberOfReplicas, String... indices) {
if (indicesRouting == null) {
throw new IllegalStateException("once build is called the builder cannot be reused");
}
if (indices == null || indices.length == 0) {
indices = indicesRouting.keys().toArray(String.class);
}
for (String index : indices) {
IndexRoutingTable indexRoutingTable = indicesRouting.get(index);
if (indexRoutingTable == null) {
// ignore index missing failure, its closed...
continue;
}
int currentNumberOfReplicas = indexRoutingTable.shards().get(0).size() - 1; // remove the required primary
IndexRoutingTable.Builder builder = new IndexRoutingTable.Builder(index);
// re-add all the shards
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
builder.addIndexShard(indexShardRoutingTable);
}
if (currentNumberOfReplicas < numberOfReplicas) {
// now, add "empty" ones
for (int i = 0; i < (numberOfReplicas - currentNumberOfReplicas); i++) {
builder.addReplica();
}
} else if (currentNumberOfReplicas > numberOfReplicas) {
int delta = currentNumberOfReplicas - numberOfReplicas;
if (delta <= 0) {
// ignore, can't remove below the current one...
} else {
for (int i = 0; i < delta; i++) {
builder.removeReplica();
}
}
}
indicesRouting.put(index, builder.build());
}
return this;
}
public Builder addAsNew(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.OPEN) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsNew(indexMetaData);
add(indexRoutingBuilder);
}
return this;
}
public Builder addAsRecovery(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.OPEN) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsRecovery(indexMetaData);
add(indexRoutingBuilder);
}
return this;
}
public Builder addAsFromDangling(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.OPEN) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsFromDangling(indexMetaData);
add(indexRoutingBuilder);
}
return this;
}
public Builder addAsFromCloseToOpen(IndexMetaData indexMetaData) {
if (indexMetaData.state() == IndexMetaData.State.OPEN) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsFromCloseToOpen(indexMetaData);
add(indexRoutingBuilder);
}
return this;
}
public Builder addAsRestore(IndexMetaData indexMetaData, RestoreSource restoreSource) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsRestore(indexMetaData, restoreSource);
add(indexRoutingBuilder);
return this;
}
public Builder addAsNewRestore(IndexMetaData indexMetaData, RestoreSource restoreSource, IntSet ignoreShards) {
IndexRoutingTable.Builder indexRoutingBuilder = new IndexRoutingTable.Builder(indexMetaData.index())
.initializeAsNewRestore(indexMetaData, restoreSource, ignoreShards);
add(indexRoutingBuilder);
return this;
}
public Builder add(IndexRoutingTable indexRoutingTable) {
if (indicesRouting == null) {
throw new IllegalStateException("once build is called the builder cannot be reused");
}
indexRoutingTable.validate();
indicesRouting.put(indexRoutingTable.index(), indexRoutingTable);
return this;
}
public Builder add(IndexRoutingTable.Builder indexRoutingTableBuilder) {
add(indexRoutingTableBuilder.build());
return this;
}
public Builder indicesRouting(Map<String, IndexRoutingTable> indicesRouting) {
if (indicesRouting == null) {
throw new IllegalStateException("once build is called the builder cannot be reused");
}
this.indicesRouting.putAll(indicesRouting);
return this;
}
public Builder remove(String index) {
if (indicesRouting == null) {
throw new IllegalStateException("once build is called the builder cannot be reused");
}
indicesRouting.remove(index);
return this;
}
public Builder version(long version) {
this.version = version;
return this;
}
/**
* Builds the routing table. Note that once this is called the builder
* must be thrown away. If you need to build a new RoutingTable as a
* copy of this one you'll need to build a new RoutingTable.Builder.
*/
public RoutingTable build() {
if (indicesRouting == null) {
throw new IllegalStateException("once build is called the builder cannot be reused");
}
// normalize the versions right before we build it...
for (ObjectCursor<IndexRoutingTable> indexRoutingTable : indicesRouting.values()) {
indicesRouting.put(indexRoutingTable.value.index(), indexRoutingTable.value.normalizeVersions());
}
RoutingTable table = new RoutingTable(version, indicesRouting.build());
indicesRouting = null;
return table;
}
public static RoutingTable readFrom(StreamInput in) throws IOException {
return PROTO.readFrom(in);
}
}
public String prettyPrint() {
StringBuilder sb = new StringBuilder("routing_table (version ").append(version).append("):\n");
for (ObjectObjectCursor<String, IndexRoutingTable> entry : indicesRouting) {
sb.append(entry.value.prettyPrint()).append('\n');
}
return sb.toString();
}
}
| |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.groups;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.naming.Name;
import org.jasig.portal.EntityIdentifier;
import org.jasig.portal.concurrency.CachingException;
import org.jasig.portal.concurrency.IEntityLock;
import org.jasig.portal.concurrency.LockingException;
import org.jasig.portal.services.EntityCachingService;
import org.jasig.portal.services.EntityLockService;
import org.jasig.portal.services.GroupService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Reference individual, or leaf, group service.
*
* @author Dan Ellentuck
* @version $Revision$
*/
public class ReferenceIndividualGroupService extends ReferenceCompositeGroupService
implements IIndividualGroupService, ILockableGroupService
{
private static final Logger log = LoggerFactory.getLogger(ReferenceIndividualGroupService.class);
// Describes the attributes of this service. See compositeGroupServices.xml.
protected ComponentGroupServiceDescriptor serviceDescriptor;
protected IEntityGroupStore groupFactory;
// Entity searcher
protected IEntitySearcher entitySearcher;
/**
* ReferenceGroupsService constructor.
*/
public ReferenceIndividualGroupService() throws GroupsException
{
this(new ComponentGroupServiceDescriptor());
}
/**
* ReferenceGroupsService constructor.
*/
public ReferenceIndividualGroupService(ComponentGroupServiceDescriptor svcDescriptor)
throws GroupsException
{
super();
serviceDescriptor = svcDescriptor;
initialize();
}
/**
* Answers if <code>IGroupMembers</code> are being cached.
*/
protected boolean cacheInUse()
{
return getServiceDescriptor().isCachingEnabled();
}
/**
* Removes the <code>IEntityGroup</code> from the cache and the store.
* @param group IEntityGroup
*/
public void deleteGroup(IEntityGroup group) throws GroupsException
{
throwExceptionIfNotInternallyManaged();
synchronizeGroupMembersOnDelete(group);
getGroupStore().delete(group);
if ( cacheInUse() )
{ cacheRemove(group); }
}
/**
* Removes the <code>ILockableEntityGroup</code> from its containing groups.
* The <code>finally</code> block tries to release any groups that are still
* locked, which can occur if an attempt to remove the group from one of
* its containing groups fails and throws a GroupsException. In this event,
* we do not try to roll back any successful removes, since that would probably
* fail anyway.
* @param group ILockableEntityGroup
*/
private void removeDeletedGroupFromContainingGroups(ILockableEntityGroup group)
throws GroupsException
{
Iterator itr;
IEntityGroup containingGroup = null;
ILockableEntityGroup lockableGroup = null;
IEntityLock lock = null;
List lockableGroups = new ArrayList();
try
{
String lockOwner = group.getLock().getLockOwner();
for ( itr=group.getContainingGroups(); itr.hasNext(); )
{
containingGroup = (IEntityGroup) itr.next();
lockableGroup=
GroupService.findLockableGroup(containingGroup.getKey(), lockOwner);
if ( lockableGroup != null )
{ lockableGroups.add(lockableGroup); }
}
for ( itr = lockableGroups.iterator(); itr.hasNext(); )
{
lockableGroup = (ILockableEntityGroup) itr.next();
lockableGroup.removeMember(group);
lockableGroup.updateMembers();
}
}
catch (GroupsException ge)
{ throw new GroupsException("Could not remove deleted group " + group.getKey() +
" from parent", ge); }
finally
{
for ( itr = lockableGroups.iterator(); itr.hasNext(); )
{
lock = ((ILockableEntityGroup) itr.next()).getLock();
try
{
if ( lock.isValid() )
{ lock.release(); }
}
catch (LockingException le)
{
log.error(
"ReferenceIndividualGroupService.removeDeletedGroupFromContainingGroups(): " +
"Problem unlocking parent group", le);
}
}
}
}
/**
* Removes the <code>ILockableEntityGroup</code> from the cache and the store,
* including both parent and child memberships.
* @param group ILockableEntityGroup
*/
public void deleteGroup(ILockableEntityGroup group) throws GroupsException
{
throwExceptionIfNotInternallyManaged();
try
{
if ( group.getLock().isValid() )
{
removeDeletedGroupFromContainingGroups(group);
deleteGroup( (IEntityGroup)group );
}
else
{ throw new GroupsException("Could not delete group " + group.getKey() +
" has invalid lock."); }
}
catch (LockingException le)
{ throw new GroupsException("Could not delete group " + group.getKey(),
le); }
finally
{
try { group.getLock().release(); }
catch ( LockingException le ) {}
}
}
private EntityIdentifier[] filterEntities(EntityIdentifier[] entities, IEntityGroup ancestor) throws GroupsException{
ArrayList ar = new ArrayList(entities.length);
for(int i=0; i< entities.length;i++){
IGroupMember gm = this.getGroupMember(entities[i]);
if (ancestor.deepContains(gm)){
ar.add(entities[i]);
}
}
return (EntityIdentifier[]) ar.toArray(new EntityIdentifier[0]);
}
/**
* Returns and caches the containing groups for the <code>IGroupMember</code>
* @param gm IGroupMember
*/
public Iterator findContainingGroups(IGroupMember gm) throws GroupsException
{
log.debug("Finding containing groups for member {}", gm.getKey());
Collection groups = new ArrayList(10);
IEntityGroup group = null;
for ( Iterator it = getGroupStore().findContainingGroups(gm); it.hasNext(); )
{
group = (IEntityGroup) it.next();
group.setLocalGroupService(this);
groups.add(group);
if (cacheInUse())
{
try
{
if ( getGroupFromCache(group.getEntityIdentifier().getKey()) == null )
{ cacheAdd(group); }
}
catch (CachingException ce)
{ throw new GroupsException("Problem finding containing groups", ce); }
}
}
return groups.iterator();
}
/**
* Returns a pre-existing <code>IEntityGroup</code> or null if it
* does not exist.
*/
public IEntityGroup findGroup(String key) throws GroupsException
{
return findGroup(newCompositeEntityIdentifier(key));
}
/**
* Returns a pre-existing <code>IEntityGroup</code> or null if it
* does not exist.
*/
public IEntityGroup findGroup(CompositeEntityIdentifier ent) throws GroupsException
{
return ( cacheInUse() )
? findGroupWithCache(ent)
: primFindGroup(ent.getLocalKey());
}
/**
* Returns a pre-existing <code>IEntityGroup</code> or null if it
* does not exist.
*/
protected IEntityGroup findGroupWithCache(String key) throws GroupsException
{
return findGroupWithCache(newCompositeEntityIdentifier(key));
}
/**
* Returns a pre-existing <code>IEntityGroup</code> or null if it
* does not exist.
*/
protected IEntityGroup findGroupWithCache(CompositeEntityIdentifier ent) throws GroupsException
{
try
{
IEntityGroup group = getGroupFromCache(ent.getKey());
if (group == null)
{
group = primFindGroup(ent.getLocalKey());
if (group != null)
{ cacheAdd(group); }
}
return group;
}
catch (CachingException ce)
{ throw new GroupsException("Problem retrieving group " + ent.getKey(), ce);}
}
/**
* Returns a pre-existing <code>ILockableEntityGroup</code> or null if the
* group is not found.
*/
public ILockableEntityGroup findGroupWithLock(String key, String owner)
throws GroupsException
{
return findGroupWithLock(key, owner, 0);
}
/**
* Returns a pre-existing <code>ILockableEntityGroup</code> or null if the
* group is not found.
*/
public ILockableEntityGroup findGroupWithLock(
String key,
String owner,
int secs)
throws GroupsException {
throwExceptionIfNotInternallyManaged();
Class groupType = org.jasig.portal.EntityTypes.GROUP_ENTITY_TYPE;
try {
IEntityLock lock =
(secs == 0)
? EntityLockService.instance().newWriteLock(groupType, key, owner)
: EntityLockService.instance().newWriteLock(groupType, key, owner, secs);
ILockableEntityGroup group = groupFactory.findLockable(key);
if (group == null) {
lock.release();
} else {
group.setLock(lock);
group.setLocalGroupService(this);
}
return group;
} catch (LockingException le) {
throw new GroupsException(
"Problem getting lock for group " + key, le);
}
}
/**
* Returns and caches the member groups for the <code>IEntityGroup</code>
* @param eg IEntityGroup
*/
protected Iterator findLocalMemberGroups(IEntityGroup eg) throws GroupsException
{
Collection groups = new ArrayList(10);
IEntityGroup group = null;
for ( Iterator it = getGroupStore().findMemberGroups(eg); it.hasNext(); )
{
group = (IEntityGroup) it.next();
if(group == null) {
log.warn("A null IEntityGroup object was part of a list groupStore.findMemberGroups");
continue;
}
group.setLocalGroupService(this);
groups.add(group);
if (cacheInUse())
{
try
{
if ( getGroupFromCache(group.getEntityIdentifier().getKey()) == null )
{ cacheAdd(group); }
}
catch (CachingException ce)
{ throw new GroupsException("Problem finding member groups", ce); }
}
}
return groups.iterator();
}
/**
* Finds the <code>IEntities</code> that are members of <code>group</code>.
*/
public Iterator findMemberEntities(IEntityGroup group) throws GroupsException
{
return getGroupStore().findEntitiesForGroup(group);
}
/**
* Returns member groups for the <code>IEntityGroup</code>. First get the
* member groups that are local to this service. Then retrieve the keys of
* all of the member groups and ask the GroupService to find the groups
* we do not yet have.
*
* @param eg IEntityGroup
*/
public Iterator findMemberGroups(IEntityGroup eg) throws GroupsException
{
Map groups = new HashMap();
IEntityGroup group = null;
for ( Iterator itr = findLocalMemberGroups(eg); itr.hasNext(); )
{
group = (IEntityGroup) itr.next();
groups.put(group.getKey(), group);
}
String[] memberGroupKeys = getGroupStore().findMemberGroupKeys(eg);
for (int i=0; i<memberGroupKeys.length; i++)
{
if ( ! groups.containsKey(memberGroupKeys[i]) )
{
group = GroupService.findGroup(memberGroupKeys[i]);
if ( group != null )
{ groups.put(group.getKey(), group); }
}
}
return groups.values().iterator();
}
/**
* Returns and members for the <code>IEntityGroup</code>.
* @param eg IEntityGroup
*/
public Iterator findMembers(IEntityGroup eg) throws GroupsException
{
Collection members = new ArrayList(10);
Iterator it = null;
for ( it = findMemberGroups(eg); it.hasNext(); )
{ members.add(it.next()); }
for ( it = findMemberEntities(eg); it.hasNext(); )
{ members.add(it.next()); }
return members.iterator();
}
/**
* Returns an <code>IEntity</code> representing a portal entity. This does
* not guarantee that the underlying entity actually exists.
*/
public IEntity getEntity(String key, Class type) throws GroupsException
{
IEntity ent = primGetEntity(key, type);
if ( cacheInUse() )
{
try
{
IEntity cachedEnt = getEntityFromCache(ent.getEntityIdentifier().getKey());
if ( cachedEnt == null )
{ cacheAdd(ent); }
else
{ ent = cachedEnt; }
}
catch (CachingException ce)
{ throw new GroupsException("Problem retrieving group member " + type + "(" + key + ")", ce);}
}
return ent;
}
/**
* Returns an <code>IEntity</code> representing a portal entity. This does
* not guarantee that the entity actually exists.
*/
public IEntityStore getEntityFactory()
{
return entityFactory;
}
/**
* Returns a cached <code>IEntityGroup</code> or null if it has not been cached.
*/
protected IEntityGroup getGroupFromCache(String key) throws CachingException
{
return (IEntityGroup) EntityCachingService.instance().get(org.jasig.portal.EntityTypes.GROUP_ENTITY_TYPE, key);
}
/**
* Returns an <code>IGroupMember</code> representing either a group or a
* portal entity. If the parm <code>type</code> is the group type,
* the <code>IGroupMember</code> is an <code>IEntityGroup</code> else it is
* an <code>IEntity</code>.
*/
public IGroupMember getGroupMember(String key, Class type) throws GroupsException
{
IGroupMember gm = null;
if ( type == org.jasig.portal.EntityTypes.GROUP_ENTITY_TYPE )
gm = findGroup(key);
else
gm = getEntity(key, type);
return gm;
}
/**
* Returns an <code>IGroupMember</code> representing either a group or a
* portal entity, based on the <code>EntityIdentifier</code>, which
* refers to the UNDERLYING entity for the <code>IGroupMember</code>.
*/
public IGroupMember getGroupMember(EntityIdentifier underlyingEntityIdentifier)
throws GroupsException
{
return getGroupMember(underlyingEntityIdentifier.getKey(),
underlyingEntityIdentifier.getType());
}
/**
* Returns the implementation of <code>IEntityGroupStore</code> whose class name
* was retrieved by the PropertiesManager (see initialize()).
*/
public IEntityGroupStore getGroupStore() throws GroupsException
{
return groupFactory;
}
/**
*
*/
protected ComponentGroupServiceDescriptor getServiceDescriptor()
{
return serviceDescriptor;
}
/**
* @exception org.jasig.portal.groups.GroupsException
*/
private void initialize() throws GroupsException
{
String eMsg = null;
String svcName = getServiceDescriptor().getName();
if (log.isDebugEnabled())
log.debug("Service descriptor attributes: " + svcName);
// print service descriptor attributes:
for (Iterator i=getServiceDescriptor().keySet().iterator(); i.hasNext();)
{
String descriptorKey = (String)i.next();
Object descriptorValue = getServiceDescriptor().get(descriptorKey);
if ( descriptorValue != null )
{ if (log.isDebugEnabled())
log.debug(" " + descriptorKey + " : " + descriptorValue); }
}
String groupStoreFactoryName = getServiceDescriptor().getGroupStoreFactoryName();
String entityStoreFactoryName = getServiceDescriptor().getEntityStoreFactoryName();
String entitySearcherFactoryName = getServiceDescriptor().getEntitySearcherFactoryName();
if ( groupStoreFactoryName == null )
{
if (log.isInfoEnabled()) {
log.info("ReferenceGroupService.initialize(): (" + svcName +
") No Group Store factory specified in service descriptor.");
}
}
else
{
try
{
IEntityGroupStoreFactory groupStoreFactory = (IEntityGroupStoreFactory)Class.forName(groupStoreFactoryName).newInstance();
groupFactory = groupStoreFactory.newGroupStore(getServiceDescriptor());
}
catch (Exception e)
{
eMsg = "ReferenceIndividualGroupService.initialize(): Failed to instantiate group store (" + svcName +"): " + e;
log.error( eMsg);
throw new GroupsException(eMsg, e);
}
}
if ( entityStoreFactoryName == null )
{
if (log.isInfoEnabled())
log.info("ReferenceIndividualGroupService.initialize(): " +
"No Entity Store Factory specified in service descriptor (" + svcName + ")");
}
else
{
try
{
IEntityStoreFactory entityStoreFactory = (IEntityStoreFactory)Class.forName(entityStoreFactoryName).newInstance();
entityFactory = entityStoreFactory.newEntityStore();
}
catch (Exception e)
{
eMsg = "ReferenceIndividualGroupService.initialize(): Failed to instantiate entity store " + e;
log.error( eMsg);
throw new GroupsException(eMsg, e);
}
}
if ( entitySearcherFactoryName == null )
{
if (log.isInfoEnabled())
log.info("ReferenceIndividualGroupService.initialize(): " +
"No Entity Searcher Factory specified in service descriptor.");
}
else
{
try
{
IEntitySearcherFactory entitySearcherFactory = (IEntitySearcherFactory)Class.forName(entitySearcherFactoryName).newInstance();
entitySearcher = entitySearcherFactory.newEntitySearcher();
}
catch (Exception e)
{
eMsg = "ReferenceIndividualGroupService.initialize(): Failed to instantiate entity searcher " + e;
log.error( eMsg);
throw new GroupsException(eMsg, e);
}
}
}
/**
* Answers if the group can be updated or deleted in the store.
*/
public boolean isEditable(IEntityGroup group) throws GroupsException
{
return isInternallyManaged();
}
/**
* Answers if this service is managed by the portal and is therefore
* updatable.
*/
protected boolean isInternallyManaged()
{
return getServiceDescriptor().isInternallyManaged();
}
/**
* Answers if this service is a leaf in the composite; a service that
* actually operates on groups.
*/
public boolean isLeafService() {
return true;
}
/**
* Answers if this service is updateable by the portal.
*/
public boolean isEditable()
{
return isInternallyManaged();
}
/**
* Returns a new <code>IEntityGroup</code> for the given Class with an unused
* key.
*/
public IEntityGroup newGroup(Class type) throws GroupsException
{
throwExceptionIfNotInternallyManaged();
IEntityGroup group = groupFactory.newInstance(type);
group.setLocalGroupService(this);
if ( cacheInUse() )
{ cacheAdd(group); }
return group;
}
/**
* Returns a pre-existing <code>IEntityGroup</code> or null if it
* does not exist.
*/
protected IEntityGroup primFindGroup(String localKey) throws GroupsException
{
IEntityGroup group = groupFactory.find(localKey);
if ( group != null )
{ group.setLocalGroupService(this); }
return group;
}
private EntityIdentifier[] removeDuplicates(EntityIdentifier[] entities){
ArrayList ar = new ArrayList(entities.length);
for(int i=0; i< entities.length;i++){
if (!ar.contains(entities[i])){
ar.add(entities[i]);
}
}
return (EntityIdentifier[]) ar.toArray(new EntityIdentifier[0]);
}
public EntityIdentifier[] searchForEntities(String query, int method, Class type) throws GroupsException {
return removeDuplicates(entitySearcher.searchForEntities(query,method,type));
}
public EntityIdentifier[] searchForEntities(String query, int method, Class type, IEntityGroup ancestor) throws GroupsException {
return filterEntities(searchForEntities(query,method,type),ancestor);
}
public EntityIdentifier[] searchForGroups(String query, int method, Class leaftype) throws GroupsException {
return removeDuplicates(groupFactory.searchForGroups(query,method,leaftype));
}
public EntityIdentifier[] searchForGroups(String query, int method, Class leaftype, IEntityGroup ancestor) throws GroupsException {
return filterEntities(searchForGroups(query,method,leaftype),ancestor);
}
/**
*
*/
protected void throwExceptionIfNotInternallyManaged()
throws GroupsException
{
if (! isInternallyManaged() )
{ throw new GroupsException("Group Service " + getServiceName() + " is not updatable."); }
}
/**
* Update the store and the updated members.
* @param group IEntityGroup
*/
public void updateGroup(IEntityGroup group) throws GroupsException
{
throwExceptionIfNotInternallyManaged();
getGroupStore().update(group);
if ( cacheInUse())
{ cacheUpdate(group); }
synchronizeGroupMembersOnUpdate(group);
}
/**
* Updates the <code>ILockableEntityGroup</code> in the cache and the store.
* @param group ILockableEntityGroup
*/
public void updateGroup(ILockableEntityGroup group) throws GroupsException
{
updateGroup(group, false);
}
/**
* Updates the <code>ILockableEntityGroup</code> in the store and removes
* it from the cache.
* @param group ILockableEntityGroup
*/
public void updateGroup(ILockableEntityGroup group, boolean renewLock)
throws GroupsException
{
throwExceptionIfNotInternallyManaged();
try
{
if ( ! group.getLock().isValid() )
{ throw new GroupsException("Could not update group " + group.getKey() +
" has invalid lock."); }
// updateGroup((IEntityGroup)group);
getGroupStore().update(group);
if ( cacheInUse())
{ cacheRemove(group); }
synchronizeGroupMembersOnUpdate(group);
if ( renewLock )
{ group.getLock().renew(); }
else
{ group.getLock().release(); }
}
catch (LockingException le)
{ throw new GroupsException("Problem updating group " + group.getKey(),
le); }
}
/**
* Update the store and the updated members.
* @param group IEntityGroup
*/
public void updateGroupMembers(IEntityGroup group) throws GroupsException {
throwExceptionIfNotInternallyManaged();
getGroupStore().updateMembers(group);
if ( cacheInUse())
{ cacheUpdate(group); }
synchronizeGroupMembersOnUpdate(group);
}
/**
* Updates the <code>ILockableEntityGroup</code> in the cache and the store.
* @param group ILockableEntityGroup
*/
public void updateGroupMembers(ILockableEntityGroup group) throws GroupsException
{
updateGroupMembers(group, false);
}
/**
* Updates the <code>ILockableEntityGroup</code> in the store and removes
* it from the cache.
* @param group ILockableEntityGroup
*/
public void updateGroupMembers(ILockableEntityGroup group, boolean renewLock)
throws GroupsException
{
throwExceptionIfNotInternallyManaged();
try
{
if ( ! group.getLock().isValid() )
{ throw new GroupsException("Could not update group " + group.getKey() +
" has invalid lock."); }
getGroupStore().updateMembers(group);
if ( cacheInUse())
{ cacheRemove(group); }
synchronizeGroupMembersOnUpdate(group);
if ( renewLock )
{ group.getLock().renew(); }
else
{ group.getLock().release(); }
}
catch (LockingException le)
{ throw new GroupsException("Problem updating group " + group.getKey(),
le); }
}
/**
* Returns an <code>IEntity</code> representing a portal entity. This does
* not guarantee that the underlying entity actually exists.
*/
protected IEntity primGetEntity(String key, Class type) throws GroupsException
{
return entityFactory.newInstance(key, type);
}
/**
* Remove the back pointers of the group members of the deleted group. Then
* update the cache to invalidate copies on peer servers.
*
* @param group ILockableEntityGroup
*/
protected void synchronizeGroupMembersOnDelete(IEntityGroup group)
throws GroupsException
{
GroupMemberImpl gmi = null;
for (Iterator it=group.getMembers(); it.hasNext();)
{
gmi = (GroupMemberImpl) it.next();
gmi.removeGroup(group);
if ( cacheInUse() )
{ cacheUpdate(gmi); }
}
}
/**
* Adjust the back pointers of the updated group members to either add or remove
* the parent group. Then update the cache to invalidate copies on peer servers.
*
* @param group ILockableEntityGroup
*/
protected void synchronizeGroupMembersOnUpdate(IEntityGroup group)
throws GroupsException
{
EntityGroupImpl egi = (EntityGroupImpl) group;
GroupMemberImpl gmi = null;
for (Iterator it=egi.getAddedMembers().values().iterator(); it.hasNext();)
{
gmi = (GroupMemberImpl) it.next();
gmi.addGroup(egi);
if ( cacheInUse() )
{ cacheUpdate(gmi); }
}
for (Iterator it=egi.getRemovedMembers().values().iterator(); it.hasNext();)
{
gmi = (GroupMemberImpl) it.next();
gmi.removeGroup(egi);
if ( cacheInUse() )
{ cacheUpdate(gmi); }
}
}
/**
* Answers if <code>group</code> contains <code>member</code>.
* If the group belongs to another service and the present service is
* not editable, simply return false.
* @return boolean
* @param group org.jasig.portal.groups.IEntityGroup
* @param member org.jasig.portal.groups.IGroupMember
*/
public boolean contains(IEntityGroup group, IGroupMember member)
throws GroupsException
{
return ( isForeign(member) && ! isEditable() )
? false
: getGroupStore().contains(group, member);
}
/**
* A foreign member is a group from a different service.
* @param member IGroupMember
* @return boolean
*/
protected boolean isForeign(IGroupMember member)
{
if (member.isEntity())
{ return false; }
else
{
Name memberSvcName = ((IEntityGroup)member).getServiceName();
return ( ! getServiceName().equals(memberSvcName) );
}
}
}
| |
/**
* Portions Copyright 2001 Sun Microsystems, Inc.
* Portions Copyright 1999-2001 Language Technologies Institute,
* Carnegie Mellon University.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*/
package com.sun.speech.freetts.lexicon;
import com.sun.speech.freetts.util.Utilities;
import com.sun.speech.freetts.util.BulkTimer;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
/**
* Provides an implementation of a Lexicon.
*
* <p>This implementation will either read from a straight ASCII file
* or a binary file. When reading from an ASCII file, you can specify
* when the input line is tokenized: load, lookup, or never. If you
* specify 'load', the entire file will be parsed when it is loaded.
* If you specify 'lookup', the file will be loaded, but the parsing
* for each line will be delayed until it is referenced and the parsed
* form will be saved away. If you specify 'never', the lines will
* parsed each time they are referenced. The default is 'never'. To
* specify the load type, set the system property as follows:
*
* <pre>
* -Dcom.sun.speech.freetts.lexicon.LexTokenize=load
* </pre>
*
* <p>If a binary file is used, you can also specify whether the new
* IO package is used. The new IO package is new for JDK1.4, and can
* greatly improve the speed of loading files. To enable new IO, use
* the following system property (it is enabled by default):
*
* <pre>
* -Dcom.sun.speech.freetts.useNewIO=true
* </pre>
*
* <p>The implementation also allows users to define their own addenda
* that will be used in addition to the system addenda. If the user
* defines their own addenda, it values will be added to the system
* addenda, overriding any existing elements in the system addenda.
* To define a user addenda, the user needs to set the following
* property:
*
* <pre>
* -Dcom.sun.speeech.freetts.lexicon.userAddenda=<URLToUserAddenda>
* </pre>
*
* Where <URLToUserAddenda> is a URL pointing to an ASCII file
* containing addenda entries.
*
* <p>[[[TODO: support multiple homographs with the same part of speech.]]]
*/
abstract public class LexiconImpl implements Lexicon {
/**
* If true, the phone string is replaced with the phone array in
* the hashmap when the phone array is loaded. The side effects
* of this are quicker lookups, but more memory usage and a longer
* startup time.
*/
protected boolean tokenizeOnLoad = false;
/**
* If true, the phone string is replaced with the phone array in
* the hashmap when the phone array is first looked up. The side effects
* Set by cmufilelex.tokenize=lookup.
*/
protected boolean tokenizeOnLookup = false;
/**
* Magic number for binary Lexicon files.
*/
private final static int MAGIC = 0xBABB1E;
/**
* Current binary file version.
*/
private final static int VERSION = 1;
/**
* URL for the compiled form.
*/
private URL compiledURL;
/**
* URL for the addenda.
*/
private URL addendaURL;
/**
* URL for the letter to sound rules.
*/
private URL letterToSoundURL;
/**
* The addenda.
*/
private Map addenda;
/**
* The compiled lexicon.
*/
private Map compiled;
/**
* The LetterToSound rules.
*/
private LetterToSound letterToSound = null;
/**
* Parts of Speech.
*/
private ArrayList partsOfSpeech = new ArrayList();
/**
* A static directory of compiledURL URL objects and associated
* already-loaded compiled Map objects. This is used to share
* the immutable compiled lexicons between lexicon instances.
* As the addenda can be changed using <code>addAddendum()</code>
* and <code>removeAddendum</code>, each lexicon instance has its
* own addenda.
*/
private static Map loadedCompiledLexicons;
/**
* Loaded State of the lexicon
*/
private boolean loaded = false;
/**
* Type of lexicon to load
*/
private boolean binary = false;
/**
* No phones for this word.
*/
final static private String[] NO_PHONES = new String[0];
/**
* Temporary place holder.
*/
private char charBuffer[] = new char[128];
/**
* Use the new IO package?
*/
private boolean useNewIO =
Utilities.getProperty("com.sun.speech.freetts.useNewIO",
"true").equals("true");
/**
* Create a new LexiconImpl by reading from the given URLS.
*
* @param compiledURL a URL pointing to the compiled lexicon
* @param addendaURL a URL pointing to lexicon addenda
* @param letterToSoundURL a LetterToSound to use if a word cannot
* be found in the compiled form or the addenda
* @param binary if <code>true</code>, the input streams are binary;
* otherwise, they are text.
*/
public LexiconImpl(URL compiledURL, URL addendaURL,
URL letterToSoundURL,
boolean binary) {
this();
setLexiconParameters(compiledURL, addendaURL, letterToSoundURL, binary);
}
/**
* Class constructor for an empty Lexicon.
*/
public LexiconImpl() {
// Find out when to convert the phone string into an array.
//
String tokenize =
Utilities.getProperty("com.sun.speech.freetts.lexicon.LexTokenize",
"never");
tokenizeOnLoad = tokenize.equals("load");
tokenizeOnLookup = tokenize.equals("lookup");
}
/**
* Sets the lexicon parameters
* @param compiledURL a URL pointing to the compiled lexicon
* @param addendaURL a URL pointing to lexicon addenda
* @param letterToSoundURL a URL pointing to the LetterToSound to use
* @param binary if <code>true</code>, the input streams are binary;
* otherwise, they are text.
*/
protected void setLexiconParameters(URL compiledURL,
URL addendaURL,
URL letterToSoundURL,
boolean binary) {
this.compiledURL = compiledURL;
this.addendaURL = addendaURL;
this.letterToSoundURL = letterToSoundURL;
this.binary = binary;
}
/**
* Determines if this lexicon is loaded.
*
* @return <code>true</code> if the lexicon is loaded
*/
public boolean isLoaded() {
return loaded;
}
/**
* Loads the data for this lexicon. If the
*
* @throws IOException if errors occur during loading
*/
public void load() throws IOException {
BulkTimer.LOAD.start("Lexicon");
if (compiledURL == null) {
throw new IOException("Can't load lexicon");
}
if (addendaURL == null) {
throw new IOException("Can't load lexicon addenda " );
}
if (loadedCompiledLexicons == null) {
loadedCompiledLexicons = new HashMap();
}
if (!loadedCompiledLexicons.containsKey(compiledURL)) {
InputStream compiledIS = Utilities.getInputStream(compiledURL);
if (compiledIS == null) {
throw new IOException("Can't load lexicon from " + compiledURL);
}
Map newCompiled = createLexicon(compiledIS, binary, 65000);
loadedCompiledLexicons.put(compiledURL, newCompiled);
compiledIS.close();
}
compiled = Collections.unmodifiableMap((Map)loadedCompiledLexicons.get(compiledURL));
InputStream addendaIS = Utilities.getInputStream(addendaURL);
if (addendaIS == null) {
throw new IOException("Can't load lexicon addenda from "
+ addendaURL);
}
// [[[TODO: what is the best way to derive the estimated sizes?]]]
//
addenda = createLexicon(addendaIS, binary, 50);
addendaIS.close();
/* Load the user-defined addenda and override any existing
* entries in the system addenda.
*/
String userAddenda = Utilities.getProperty(
"com.sun.speech.freetts.lexicon.userAddenda", null);
if (userAddenda != null) {
try {
URL userAddendaURL = new URL(userAddenda);
InputStream userAddendaIS = Utilities.getInputStream(
userAddendaURL);
if (userAddendaIS == null) {
throw new IOException("Can't load user addenda from "
+ userAddenda);
}
Map tmpAddenda = createLexicon(userAddendaIS, false, 50);
userAddendaIS.close();
for (Iterator keys = tmpAddenda.keySet().iterator();
keys.hasNext();) {
Object key = keys.next();
addenda.put(key, tmpAddenda.get(key));
}
} catch (MalformedURLException e) {
throw new IOException("User addenda URL is malformed: " +
userAddenda);
}
}
loaded = true;
BulkTimer.LOAD.stop("Lexicon");
letterToSound = new LetterToSoundImpl(letterToSoundURL, binary);
}
/**
* Reads the given input stream as lexicon data and returns the
* results in a <code>Map</code>.
*
* @param is the input stream
* @param binary if <code>true</code>, the data is binary
* @param estimatedSize the estimated size of the lexicon
*
* @throws IOException if errors are encountered while reading the data
*/
protected Map createLexicon(InputStream is,
boolean binary,
int estimatedSize)
throws IOException {
if (binary) {
if (useNewIO && is instanceof FileInputStream) {
FileInputStream fis = (FileInputStream) is;
return loadMappedBinaryLexicon(fis, estimatedSize);
} else {
DataInputStream dis = new DataInputStream(
new BufferedInputStream(is));
return loadBinaryLexicon(dis, estimatedSize);
}
} else {
return loadTextLexicon(is, estimatedSize);
}
}
/**
* Reads the given input stream as text lexicon data and returns the
* results in a <code>Map</code>.
*
* @param is the input stream
* @param estimatedSize the estimated number of entries of the lexicon
*
* @throws IOException if errors are encountered while reading the data
*/
protected Map loadTextLexicon(InputStream is, int estimatedSize)
throws IOException {
Map lexicon = new LinkedHashMap(estimatedSize * 4 / 3);
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line;
line = reader.readLine();
while (line != null) {
if (!line.startsWith("***")) {
parseAndAdd(lexicon, line);
}
line = reader.readLine();
}
return lexicon;
}
/**
* Creates a word from the given input line and add it to the lexicon.
*
* @param lexicon the lexicon
* @param line the input text
*/
protected void parseAndAdd(Map lexicon, String line) {
StringTokenizer tokenizer = new StringTokenizer(line,"\t");
String phones = null;
String wordAndPos = tokenizer.nextToken();
String pos = wordAndPos.substring(wordAndPos.length() - 1);
if (!partsOfSpeech.contains(pos)) {
partsOfSpeech.add(pos);
}
if (tokenizer.hasMoreTokens()) {
phones = tokenizer.nextToken();
}
if ((phones != null) && (tokenizeOnLoad)) {
lexicon.put(wordAndPos, getPhones(phones));
} else if (phones == null) {
lexicon.put(wordAndPos, NO_PHONES);
} else {
lexicon.put(wordAndPos, phones);
}
}
/**
* Gets the phone list for a given word. If a phone list cannot
* be found, returns <code>null</code>. The format is lexicon
* dependent. If the part of speech does not matter, pass in
* <code>null</code>.
*
* @param word the word to find
* @param partOfSpeech the part of speech
*
* @return the list of phones for word or <code>null</code>
*/
public String[] getPhones(String word, String partOfSpeech) {
return getPhones(word, partOfSpeech, true);
}
/**
* Gets the phone list for a given word. If a phone list cannot
* be found, <code>null</code> is returned. The
* <code>partOfSpeech</code> is implementation dependent, but
* <code>null</code> always matches.
*
* @param word the word to find
* @param partOfSpeech the part of speech or <code>null</code>
* @param useLTS whether to use the letter-to-sound rules when
* the word is not in the lexicon.
*
* @return the list of phones for word or null
*/
public String[] getPhones
(String word, String partOfSpeech, boolean useLTS){
String[] phones = null;
phones = getPhones(addenda, word, partOfSpeech);
if (phones == null) {
phones = getPhones(compiled, word, partOfSpeech);
}
if(useLTS){
if (phones == null && letterToSound != null) {
phones = letterToSound.getPhones(word, partOfSpeech);
}
}
if(phones != null){
String[] copy = new String[phones.length];
System.arraycopy(phones, 0, copy, 0, phones.length);
return copy;
}
else return null;
}
/**
* Gets a phone list for a word from a given lexicon. If a phone
* list cannot be found, returns <code>null</code>. The format is
* lexicon dependent. If the part of speech does not matter, pass
* in <code>null</code>.
*
* @param lexicon the lexicon
* @param word the word to find
* @param partOfSpeech the part of speech
*
* @return the list of phones for word or <code>null</code>
*/
protected String[] getPhones(Map lexicon,
String word,
String partOfSpeech) {
String[] phones;
partOfSpeech = fixPartOfSpeech(partOfSpeech);
phones = getPhones(lexicon, word+partOfSpeech);
for (int i = 0;
(i < partsOfSpeech.size()) && (phones == null);
i++) {
if (!partOfSpeech.equals((String) partsOfSpeech.get(i))) {
phones = getPhones(lexicon,
word + (String) partsOfSpeech.get(i));
}
}
return phones;
}
/**
* Gets a phone list for a word from a given lexicon. If a phone
* list cannot be found, returns <code>null</code>.
*
* @param lexicon the lexicon
* @param wordAndPartOfSpeech word and part of speech concatenated
* together
*
* @return the list of phones for word or <code>null</code>
*/
protected String[] getPhones(Map lexicon,
String wordAndPartOfSpeech) {
Object value = lexicon.get(wordAndPartOfSpeech);
if (value instanceof String[]) {
return (String[]) value;
} else if (value instanceof String) {
String[] phoneArray;
phoneArray = getPhones((String) value);
if (tokenizeOnLookup) {
lexicon.put(wordAndPartOfSpeech, phoneArray);
}
return phoneArray;
} else {
return null;
}
}
/**
* Turns the phone <code>String</code> into a <code>String[]</code>,
* using " " as the delimiter.
*
* @param phones the phones
*
* @return the phones split into an array
*/
protected String[] getPhones(String phones) {
ArrayList phoneList = new ArrayList();
StringTokenizer tokenizer = new StringTokenizer(phones, " ");
while (tokenizer.hasMoreTokens()) {
phoneList.add(tokenizer.nextToken());
}
return (String[]) phoneList.toArray(new String[0]);
}
/**
* Adds a word to the addenda.
*
* @param word the word to find
* @param partOfSpeech the part of speech
* @param phones the phones for the word
*
*/
public void addAddendum(String word,
String partOfSpeech,
String[] phones) {
String pos = fixPartOfSpeech(partOfSpeech);
if (!partsOfSpeech.contains(pos)) {
partsOfSpeech.add(pos);
}
addenda.put(word + pos, phones);
}
/**
* Removes a word from the addenda.
*
* @param word the word to remove
* @param partOfSpeech the part of speech
*/
public void removeAddendum(String word, String partOfSpeech) {
addenda.remove(word + fixPartOfSpeech(partOfSpeech));
}
/**
* Outputs a string to a data output stream.
*
* @param dos the data output stream
* @param s the string to output
*
* @throws IOException if errors occur during writing
*/
private void outString(DataOutputStream dos, String s)
throws IOException {
dos.writeByte((byte) s.length());
for (int i = 0; i < s.length(); i++) {
dos.writeChar(s.charAt(i));
}
}
/**
* Inputs a string from a DataInputStream. This method is not re-entrant.
*
* @param dis the data input stream
*
* @return the string
*
* @throws IOException if errors occur during reading
*/
private String getString(DataInputStream dis) throws IOException {
int size = dis.readByte();
for (int i = 0; i < size; i++) {
charBuffer[i] = dis.readChar();
}
return new String(charBuffer, 0, size);
}
/**
* Inputs a string from a DataInputStream. This method is not re-entrant.
*
* @param bb the input byte buffer
*
* @return the string
*
* @throws IOException if errors occur during reading
*/
private String getString(ByteBuffer bb) throws IOException {
int size = bb.get();
for (int i = 0; i < size; i++) {
charBuffer[i] = bb.getChar();
}
return new String(charBuffer, 0, size);
}
/**
* Dumps a binary form of the database. This method is not thread-safe.
*
* <p>Binary format is:
* <pre>
* MAGIC
* VERSION
* (int) numPhonemes
* (String) phoneme0
* (String) phoneme1
* (String) phonemeN
* (int) numEntries
* (String) nameWithPOS
* (byte) numPhonemes
* phoneme index 1
* phoneme index 2
* phoneme index n
* </pre>
*
* <p>Strings are formatted as: <code>(byte) len char0 char1 charN</code>
*
* <p>Limits: Strings: 128 chars
* <p>Limits: Strings: 128 phonemes per word
*
* @param lexicon the lexicon to dump
* @param path the path to dump the file to
*/
private void dumpBinaryLexicon(Map lexicon, String path) {
try {
FileOutputStream fos = new FileOutputStream(path);
DataOutputStream dos = new DataOutputStream(new
BufferedOutputStream(fos));
List phonemeList = findPhonemes(lexicon);
dos.writeInt(MAGIC);
dos.writeInt(VERSION);
dos.writeInt(phonemeList.size());
for (int i = 0; i < phonemeList.size(); i++) {
outString(dos, (String) phonemeList.get(i));
}
dos.writeInt(lexicon.keySet().size());
for (Iterator i = lexicon.keySet().iterator(); i.hasNext(); ) {
String key = (String) i.next();
outString(dos, key);
String[] phonemes = getPhones(lexicon, key);
dos.writeByte((byte) phonemes.length);
for (int index = 0; index < phonemes.length; index++) {
int phonemeIndex = phonemeList.indexOf(phonemes[index]);
if (phonemeIndex == -1) {
throw new Error("Can't find phoneme index");
}
dos.writeByte((byte) phonemeIndex);
}
}
dos.close();
} catch (FileNotFoundException fe) {
throw new Error("Can't dump binary database " +
fe.getMessage());
} catch (IOException ioe) {
throw new Error("Can't write binary database " +
ioe.getMessage());
}
}
/**
* Loads the binary lexicon from the given InputStream.
* This method is not thread safe.
*
* @param is the InputStream to load the database from
* @param estimatedSize estimate of how large the database is
*
* @return a <code>Map</code> containing the lexicon
*
* @throws IOException if an IO error occurs
*/
private Map loadMappedBinaryLexicon(FileInputStream is, int estimatedSize)
throws IOException {
FileChannel fc = is.getChannel();
MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY,
0, (int) fc.size());
bb.load();
int size = 0;
int numEntries = 0;
List phonemeList = new ArrayList();
// we get better performance for some reason if we
// just ignore estimated size
//
// Map lexicon = new HashMap();
Map lexicon = new LinkedHashMap(estimatedSize * 4 / 3);
if (bb.getInt() != MAGIC) {
throw new Error("bad magic number in lexicon");
}
if (bb.getInt() != VERSION) {
throw new Error("bad version number in lexicon");
}
size = bb.getInt();
for (int i = 0; i < size; i++) {
String phoneme = getString(bb);
phonemeList.add(phoneme);
}
numEntries = bb.getInt();
for (int i = 0; i < numEntries; i++) {
String wordAndPos = getString(bb);
String pos = Character.toString(
wordAndPos.charAt(wordAndPos.length() - 1));
if (!partsOfSpeech.contains(pos)) {
partsOfSpeech.add(pos);
}
int numPhonemes = bb.get();
String[] phonemes = new String[numPhonemes];
for (int j = 0; j < numPhonemes; j++) {
phonemes[j] = (String) phonemeList.get(bb.get());
}
lexicon.put(wordAndPos, phonemes);
}
fc.close();
return lexicon;
}
/**
* Loads the binary lexicon from the given InputStream.
* This method is not thread safe.
*
* @param is the InputStream to load the database from
* @param estimatedSize estimate of how large the database is
*
* @return a <code>Map</code> containing the lexicon
*
* @throws IOException if an IO error occurs
*/
private Map loadBinaryLexicon(InputStream is, int estimatedSize)
throws IOException {
DataInputStream dis = new DataInputStream(new
BufferedInputStream(is));
int size = 0;
int numEntries = 0;
List phonemeList = new ArrayList();
// we get better performance for some reason if we
// just ignore estimated size
//
Map lexicon = new LinkedHashMap();
if (dis.readInt() != MAGIC) {
throw new Error("bad magic number in lexicon");
}
if (dis.readInt() != VERSION) {
throw new Error("bad version number in lexicon");
}
size = dis.readInt();
for (int i = 0; i < size; i++) {
String phoneme = getString(dis);
phonemeList.add(phoneme);
}
numEntries = dis.readInt();
for (int i = 0; i < numEntries; i++) {
String wordAndPos = getString(dis);
String pos = Character.toString(
wordAndPos.charAt(wordAndPos.length() - 1));
if (!partsOfSpeech.contains(pos)) {
partsOfSpeech.add(pos);
}
int numPhonemes = dis.readByte();
String[] phonemes = new String[numPhonemes];
for (int j = 0; j < numPhonemes; j++) {
phonemes[j] = (String) phonemeList.get(dis.readByte());
}
lexicon.put(wordAndPos, phonemes);
}
dis.close();
return lexicon;
}
/**
* Dumps this lexicon (just the compiled form). Lexicon will be
* dumped to two binary files PATH_compiled.bin and
* PATH_addenda.bin
*
* @param path the root path to dump it to
*/
public void dumpBinary(String path) {
String compiledPath = path + "_compiled.bin";
String addendaPath = path + "_addenda.bin";
dumpBinaryLexicon(compiled, compiledPath);
dumpBinaryLexicon(addenda, addendaPath);
}
/**
* Returns a list of the unique phonemes in the lexicon.
*
* @param lexicon the lexicon of interest
*
* @return list the unique set of phonemes
*/
private List findPhonemes(Map lexicon) {
List phonemeList = new ArrayList();
for (Iterator i = lexicon.keySet().iterator(); i.hasNext(); ) {
String key = (String) i.next();
String[] phonemes = getPhones(lexicon, key);
for (int index = 0; index < phonemes.length; index++) {
if (!phonemeList.contains(phonemes[index])) {
phonemeList.add(phonemes[index]);
}
}
}
return phonemeList;
}
/**
* Tests to see if this lexicon is identical to the other for
* debugging purposes.
*
* @param other the other lexicon to compare to
*
* @return true if lexicons are identical
*/
public boolean compare(LexiconImpl other) {
return compare(addenda, other.addenda) &&
compare(compiled, other.compiled);
}
/**
* Determines if the two lexicons are identical for debugging purposes.
*
* @param lex this lex
* @param other the other lexicon to chd
*
* @return true if they are identical
*/
private boolean compare(Map lex, Map other) {
for (Iterator i = lex.keySet().iterator(); i.hasNext(); ) {
String key = (String) i.next();
String[] thisPhonemes = getPhones(lex, key);
String[] otherPhonemes = getPhones(other, key);
if (thisPhonemes == null) {
System.out.println(key + " not found in this.");
return false;
} else if (otherPhonemes == null) {
System.out.println(key + " not found in other.");
return false;
} else if (thisPhonemes.length == otherPhonemes.length) {
for (int j = 0; j < thisPhonemes.length; j++) {
if (!thisPhonemes[j].equals(otherPhonemes[j])) {
return false;
}
}
} else {
return false;
}
}
return true;
}
/**
* Fixes the part of speech if it is <code>null</code>. The
* default representation of a <code>null</code> part of speech
* is the number "0".
*/
static protected String fixPartOfSpeech(String partOfSpeech) {
return (partOfSpeech == null) ? "0" : partOfSpeech;
}
}
| |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.am.repository.management.api;
import io.gravitee.am.common.utils.RandomString;
import io.gravitee.am.model.ReferenceType;
import io.gravitee.am.model.alert.AlertTrigger;
import io.gravitee.am.model.alert.AlertTriggerType;
import io.gravitee.am.repository.management.AbstractManagementTest;
import io.gravitee.am.repository.management.api.search.AlertTriggerCriteria;
import io.reactivex.observers.TestObserver;
import io.reactivex.subscribers.TestSubscriber;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
/**
* @author Jeoffrey HAEYAERT (jeoffrey.haeyaert at graviteesource.com)
* @author GraviteeSource Team
*/
public class AlertTriggerRepositoryTest extends AbstractManagementTest {
private static final String DOMAIN_ID = "domain#1";
private static final String NOTIFIER_ID1 = "notifier#1";
private static final String NOTIFIER_ID2 = "notifier#2";
@Autowired
private AlertTriggerRepository alertTriggerRepository;
@Test
public void testFindById() {
// create idp
AlertTrigger alertTrigger = buildAlertTrigger();
AlertTrigger alertTriggerCreated = alertTriggerRepository.create(alertTrigger).blockingGet();
// fetch idp
TestObserver<AlertTrigger> testObserver = alertTriggerRepository.findById(alertTriggerCreated.getId()).test();
testObserver.awaitTerminalEvent();
testObserver.assertComplete();
testObserver.assertNoErrors();
testObserver.assertValue(found -> found.getId().equals(alertTrigger.getId()) && found.getAlertNotifiers().size() == 2);
}
@Test
public void testNotFoundById() {
alertTriggerRepository.findById("UNKNOWN").test().assertEmpty();
}
@Test
public void testCreate() {
AlertTrigger alertTrigger = buildAlertTrigger();
TestObserver<AlertTrigger> testObserver = alertTriggerRepository.create(alertTrigger).test();
testObserver.awaitTerminalEvent();
testObserver.assertComplete();
testObserver.assertNoErrors();
testObserver.assertValue(idp -> idp.getId().equals(alertTrigger.getId()));
}
@Test
public void testUpdate() {
// create idp
AlertTrigger alertTrigger = buildAlertTrigger();
AlertTrigger alertTriggerCreated = alertTriggerRepository.create(alertTrigger).blockingGet();
// update idp
AlertTrigger updatedAlertTrigger = buildAlertTrigger();
updatedAlertTrigger.setId(alertTriggerCreated.getId());
updatedAlertTrigger.setEnabled(false);
TestObserver<AlertTrigger> testObserver = alertTriggerRepository.update(updatedAlertTrigger).test();
testObserver.awaitTerminalEvent();
testObserver.assertComplete();
testObserver.assertNoErrors();
testObserver.assertValue(updated -> updated.getId().equals(updatedAlertTrigger.getId())
&& !updated.isEnabled());
}
@Test
public void testDelete() {
// create idp
AlertTrigger alertTrigger = buildAlertTrigger();
AlertTrigger alertTriggerCreated = alertTriggerRepository.create(alertTrigger).blockingGet();
// delete idp
TestObserver<Void> testObserver1 = alertTriggerRepository.delete(alertTriggerCreated.getId()).test();
testObserver1.awaitTerminalEvent();
// fetch idp
alertTriggerRepository.findById(alertTriggerCreated.getId()).test().assertEmpty();
}
@Test
public void findByCriteria_alertNotifiers() {
AlertTrigger alertTriggerToCreate = buildAlertTrigger();
alertTriggerToCreate.setAlertNotifiers(Collections.emptyList());
alertTriggerRepository.create(alertTriggerToCreate).blockingGet();
alertTriggerToCreate = buildAlertTrigger();
AlertTrigger alertTriggerCreated = alertTriggerRepository.create(alertTriggerToCreate).blockingGet();
AlertTriggerCriteria criteria = new AlertTriggerCriteria();
criteria.setEnabled(false);
criteria.setAlertNotifierIds(Collections.singletonList(NOTIFIER_ID1));
TestSubscriber<AlertTrigger> testObserver1 = alertTriggerRepository.findByCriteria(ReferenceType.DOMAIN, DOMAIN_ID, criteria).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertNoErrors();
testObserver1.assertNoValues();
alertTriggerCreated.setEnabled(false);
final AlertTrigger alertTriggerUpdated = alertTriggerRepository.update(alertTriggerCreated).blockingGet();
testObserver1 = alertTriggerRepository.findByCriteria(ReferenceType.DOMAIN, DOMAIN_ID, criteria).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertNoErrors();
testObserver1.assertValue(alertTrigger -> alertTrigger.getId().equals(alertTriggerUpdated.getId()));
}
@Test
public void findByCriteria_type() {
AlertTrigger alertTriggerToCreate = buildAlertTrigger();
AlertTrigger alertTriggerCreated = alertTriggerRepository.create(alertTriggerToCreate).blockingGet();
AlertTriggerCriteria criteria = new AlertTriggerCriteria();
criteria.setEnabled(true);
criteria.setType(AlertTriggerType.TOO_MANY_LOGIN_FAILURES);
TestSubscriber<AlertTrigger> testObserver1 = alertTriggerRepository.findByCriteria(ReferenceType.DOMAIN, DOMAIN_ID, criteria).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertNoErrors();
testObserver1.assertValue(alertTrigger -> alertTrigger.getId().equals(alertTriggerCreated.getId()));
}
@Test
public void findAll() {
TestSubscriber<AlertTrigger> testObserver1 = alertTriggerRepository.findAll(ReferenceType.DOMAIN, DOMAIN_ID).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertNoErrors();
testObserver1.assertNoValues();
AlertTrigger alertTriggerToCreate1 = buildAlertTrigger();
AlertTrigger alertTriggerToCreate2 = buildAlertTrigger();
alertTriggerToCreate2.setReferenceId("domain#2");
AlertTrigger alertTriggerCreated1 = alertTriggerRepository.create(alertTriggerToCreate1).blockingGet();
alertTriggerRepository.create(alertTriggerToCreate2).blockingGet();
testObserver1 = alertTriggerRepository.findAll(ReferenceType.DOMAIN, DOMAIN_ID).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertValue(alertTrigger -> alertTrigger.getId().equals(alertTriggerCreated1.getId()));
}
@Test
public void findByCriteriaWithEmptyAlertNotifierIdList() {
TestSubscriber<AlertTrigger> testObserver1 = alertTriggerRepository.findAll(ReferenceType.DOMAIN, DOMAIN_ID).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertNoErrors();
testObserver1.assertNoValues();
AlertTrigger alertTriggerToCreate1 = buildAlertTrigger();
AlertTrigger alertTriggerToCreate2 = buildAlertTrigger();
alertTriggerToCreate2.setReferenceId("domain#2");
AlertTrigger alertTriggerCreated1 = alertTriggerRepository.create(alertTriggerToCreate1).blockingGet();
alertTriggerRepository.create(alertTriggerToCreate2).blockingGet();
final AlertTriggerCriteria criteria = new AlertTriggerCriteria();
criteria.setAlertNotifierIds(Collections.emptyList());
testObserver1 = alertTriggerRepository.findByCriteria(ReferenceType.DOMAIN, DOMAIN_ID, criteria).test();
testObserver1.awaitTerminalEvent();
testObserver1.assertComplete();
testObserver1.assertValue(alertTrigger -> alertTrigger.getId().equals(alertTriggerCreated1.getId()));
}
private AlertTrigger buildAlertTrigger() {
AlertTrigger alertTrigger = new AlertTrigger();
alertTrigger.setId(RandomString.generate());
alertTrigger.setEnabled(true);
alertTrigger.setType(AlertTriggerType.TOO_MANY_LOGIN_FAILURES);
alertTrigger.setReferenceType(ReferenceType.DOMAIN);
alertTrigger.setReferenceId(DOMAIN_ID);
alertTrigger.setAlertNotifiers(Arrays.asList(NOTIFIER_ID1, NOTIFIER_ID2));
alertTrigger.setCreatedAt(new Date());
alertTrigger.setUpdatedAt(new Date());
return alertTrigger;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform.compute;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteCompute;
import org.apache.ignite.internal.IgniteComputeImpl;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.binary.BinaryObjectImpl;
import org.apache.ignite.internal.binary.BinaryRawReaderEx;
import org.apache.ignite.internal.binary.BinaryRawWriterEx;
import org.apache.ignite.internal.processors.platform.PlatformAbstractTarget;
import org.apache.ignite.internal.processors.platform.PlatformContext;
import org.apache.ignite.internal.processors.platform.utils.*;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.binary.BinaryObject;
import static org.apache.ignite.internal.processors.task.GridTaskThreadContextKey.TC_SUBGRID;
/**
* Interop compute.
*/
@SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored", "UnusedDeclaration"})
public class PlatformCompute extends PlatformAbstractTarget {
/** */
private static final int OP_AFFINITY = 1;
/** */
private static final int OP_BROADCAST = 2;
/** */
private static final int OP_EXEC = 3;
/** */
private static final int OP_EXEC_ASYNC = 4;
/** */
private static final int OP_UNICAST = 5;
/** Compute instance. */
private final IgniteComputeImpl compute;
/** Future for previous asynchronous operation. */
protected ThreadLocal<IgniteFuture<?>> curFut = new ThreadLocal<>();
/**
* Constructor.
*
* @param platformCtx Context.
* @param compute Compute instance.
*/
public PlatformCompute(PlatformContext platformCtx, IgniteComputeImpl compute) {
super(platformCtx);
this.compute = compute;
}
/** {@inheritDoc} */
@Override protected Object processInStreamOutObject(int type, BinaryRawReaderEx reader)
throws IgniteCheckedException {
switch (type) {
case OP_UNICAST:
return processClosures(reader.readLong(), reader, false, false);
case OP_BROADCAST:
return processClosures(reader.readLong(), reader, true, false);
case OP_AFFINITY:
return processClosures(reader.readLong(), reader, false, true);
default:
return super.processInStreamOutObject(type, reader);
}
}
/**
* Process closure execution request.
* @param taskPtr Task pointer.
* @param reader Reader.
* @param broadcast broadcast flag.
*/
private PlatformListenable processClosures(long taskPtr, BinaryRawReaderEx reader, boolean broadcast,
boolean affinity) {
PlatformAbstractTask task;
int size = reader.readInt();
if (size == 1) {
if (broadcast) {
PlatformBroadcastingSingleClosureTask task0 =
new PlatformBroadcastingSingleClosureTask(platformCtx, taskPtr);
task0.job(nextClosureJob(task0, reader));
task = task0;
}
else if (affinity) {
PlatformBalancingSingleClosureAffinityTask task0 =
new PlatformBalancingSingleClosureAffinityTask(platformCtx, taskPtr);
task0.job(nextClosureJob(task0, reader));
task0.affinity(reader.readString(), reader.readObjectDetached(), platformCtx.kernalContext());
task = task0;
}
else {
PlatformBalancingSingleClosureTask task0 = new PlatformBalancingSingleClosureTask(platformCtx, taskPtr);
task0.job(nextClosureJob(task0, reader));
task = task0;
}
}
else {
if (broadcast)
task = new PlatformBroadcastingMultiClosureTask(platformCtx, taskPtr);
else
task = new PlatformBalancingMultiClosureTask(platformCtx, taskPtr);
Collection<PlatformJob> jobs = new ArrayList<>(size);
for (int i = 0; i < size; i++)
jobs.add(nextClosureJob(task, reader));
if (broadcast)
((PlatformBroadcastingMultiClosureTask)task).jobs(jobs);
else
((PlatformBalancingMultiClosureTask)task).jobs(jobs);
}
platformCtx.kernalContext().task().setThreadContext(TC_SUBGRID, compute.clusterGroup().nodes());
return executeNative0(task);
}
/**
* Read the next closure job from the reader.
*
* @param task Task.
* @param reader Reader.
* @return Closure job.
*/
private PlatformJob nextClosureJob(PlatformAbstractTask task, BinaryRawReaderEx reader) {
return platformCtx.createClosureJob(task, reader.readLong(), reader.readObjectDetached());
}
/** {@inheritDoc} */
@Override protected void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer)
throws IgniteCheckedException {
switch (type) {
case OP_EXEC:
writer.writeObjectDetached(executeJavaTask(reader, false));
break;
case OP_EXEC_ASYNC:
writer.writeObjectDetached(executeJavaTask(reader, true));
break;
default:
super.processInStreamOutStream(type, reader, writer);
}
}
/**
* Execute native full-fledged task.
*
* @param taskPtr Pointer to the task.
* @param topVer Topology version.
*/
public PlatformListenable executeNative(long taskPtr, long topVer) {
final PlatformFullTask task = new PlatformFullTask(platformCtx, compute, taskPtr, topVer);
return executeNative0(task);
}
/**
* Set "withTimeout" state.
*
* @param timeout Timeout (milliseconds).
*/
public void withTimeout(long timeout) {
compute.withTimeout(timeout);
}
/**
* Set "withNoFailover" state.
*/
public void withNoFailover() {
compute.withNoFailover();
}
/** <inheritDoc /> */
@Override protected IgniteFuture currentFuture() throws IgniteCheckedException {
IgniteFuture<?> fut = curFut.get();
if (fut == null)
throw new IllegalStateException("Asynchronous operation not started.");
return fut;
}
/**
* Execute task.
*
* @param task Task.
*/
private PlatformListenable executeNative0(final PlatformAbstractTask task) {
IgniteInternalFuture fut = compute.executeAsync(task, null);
fut.listen(new IgniteInClosure<IgniteInternalFuture>() {
private static final long serialVersionUID = 0L;
@Override public void apply(IgniteInternalFuture fut) {
try {
fut.get();
task.onDone(null);
}
catch (IgniteCheckedException e) {
task.onDone(e);
}
}
});
return PlatformFutureUtils.getListenable(fut);
}
/**
* Execute task taking arguments from the given reader.
*
* @param reader Reader.
* @return Task result.
*/
protected Object executeJavaTask(BinaryRawReaderEx reader, boolean async) {
String taskName = reader.readString();
boolean keepBinary = reader.readBoolean();
Object arg = reader.readObjectDetached();
Collection<UUID> nodeIds = readNodeIds(reader);
IgniteCompute compute0 = computeForTask(nodeIds);
if (async)
compute0 = compute0.withAsync();
if (!keepBinary && arg instanceof BinaryObjectImpl)
arg = ((BinaryObject)arg).deserialize();
Object res = compute0.execute(taskName, arg);
if (async) {
curFut.set(compute0.future().chain(new C1<IgniteFuture, Object>() {
private static final long serialVersionUID = 0L;
@Override public Object apply(IgniteFuture fut) {
return toBinary(fut.get());
}
}));
return null;
}
else
return toBinary(res);
}
/**
* Convert object to binary form.
*
* @param src Source object.
* @return Result.
*/
private Object toBinary(Object src) {
return platformCtx.kernalContext().grid().binary().toBinary(src);
}
/**
* Read node IDs.
*
* @param reader Reader.
* @return Node IDs.
*/
protected Collection<UUID> readNodeIds(BinaryRawReaderEx reader) {
if (reader.readBoolean()) {
int len = reader.readInt();
List<UUID> res = new ArrayList<>(len);
for (int i = 0; i < len; i++)
res.add(reader.readUuid());
return res;
}
else
return null;
}
/**
* Get compute object for the given node IDs.
*
* @param nodeIds Node IDs.
* @return Compute object.
*/
protected IgniteCompute computeForTask(Collection<UUID> nodeIds) {
return nodeIds == null ? compute :
platformCtx.kernalContext().grid().compute(compute.clusterGroup().forNodeIds(nodeIds));
}
}
| |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.project;
import static com.google.common.base.Preconditions.checkState;
import static com.google.gerrit.server.permissions.LabelPermission.ForUser.ON_BEHALF_OF;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.LabelFunction;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.PermissionRange;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.PatchSetUtil;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.permissions.ChangePermission;
import com.google.gerrit.server.permissions.ChangePermissionOrLabel;
import com.google.gerrit.server.permissions.LabelPermission;
import com.google.gerrit.server.permissions.PermissionBackend.ForChange;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.permissions.RefPermission;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
/** Access control management for a user accessing a single change. */
class ChangeControl {
@Singleton
static class Factory {
private final ChangeData.Factory changeDataFactory;
private final ChangeNotes.Factory notesFactory;
private final ApprovalsUtil approvalsUtil;
private final PatchSetUtil patchSetUtil;
@Inject
Factory(
ChangeData.Factory changeDataFactory,
ChangeNotes.Factory notesFactory,
ApprovalsUtil approvalsUtil,
PatchSetUtil patchSetUtil) {
this.changeDataFactory = changeDataFactory;
this.notesFactory = notesFactory;
this.approvalsUtil = approvalsUtil;
this.patchSetUtil = patchSetUtil;
}
ChangeControl create(
RefControl refControl, ReviewDb db, Project.NameKey project, Change.Id changeId)
throws OrmException {
return create(refControl, notesFactory.create(db, project, changeId));
}
ChangeControl create(RefControl refControl, ChangeNotes notes) {
return new ChangeControl(changeDataFactory, approvalsUtil, refControl, notes, patchSetUtil);
}
}
private final ChangeData.Factory changeDataFactory;
private final ApprovalsUtil approvalsUtil;
private final RefControl refControl;
private final ChangeNotes notes;
private final PatchSetUtil patchSetUtil;
ChangeControl(
ChangeData.Factory changeDataFactory,
ApprovalsUtil approvalsUtil,
RefControl refControl,
ChangeNotes notes,
PatchSetUtil patchSetUtil) {
this.changeDataFactory = changeDataFactory;
this.approvalsUtil = approvalsUtil;
this.refControl = refControl;
this.notes = notes;
this.patchSetUtil = patchSetUtil;
}
ChangeControl forUser(CurrentUser who) {
if (getUser().equals(who)) {
return this;
}
return new ChangeControl(
changeDataFactory, approvalsUtil, getRefControl().forUser(who), notes, patchSetUtil);
}
private RefControl getRefControl() {
return refControl;
}
private CurrentUser getUser() {
return getRefControl().getUser();
}
private ProjectControl getProjectControl() {
return getRefControl().getProjectControl();
}
private Change getChange() {
return notes.getChange();
}
private ChangeNotes getNotes() {
return notes;
}
/** Can this user see this change? */
private boolean isVisible(ReviewDb db, @Nullable ChangeData cd) throws OrmException {
if (getChange().isPrivate() && !isPrivateVisible(db, cd)) {
return false;
}
return isRefVisible();
}
/** Can the user see this change? Does not account for draft status */
private boolean isRefVisible() {
return getRefControl().isVisible();
}
/** Can this user abandon this change? */
private boolean canAbandon(ReviewDb db) throws OrmException {
return (isOwner() // owner (aka creator) of the change can abandon
|| getRefControl().isOwner() // branch owner can abandon
|| getProjectControl().isOwner() // project owner can abandon
|| getRefControl().canAbandon() // user can abandon a specific ref
|| getProjectControl().isAdmin())
&& !isPatchSetLocked(db);
}
/** Can this user delete this change? */
private boolean canDelete(Change.Status status) {
switch (status) {
case NEW:
case ABANDONED:
return (isOwner() && getRefControl().canDeleteOwnChanges())
|| getProjectControl().isAdmin();
case MERGED:
default:
return false;
}
}
/** Can this user rebase this change? */
private boolean canRebase(ReviewDb db) throws OrmException {
return (isOwner() || getRefControl().canSubmit(isOwner()) || getRefControl().canRebase())
&& refControl.asForRef().testOrFalse(RefPermission.CREATE_CHANGE)
&& !isPatchSetLocked(db);
}
/** Can this user restore this change? */
private boolean canRestore(ReviewDb db) throws OrmException {
// Anyone who can abandon the change can restore it, as long as they can create changes.
return canAbandon(db) && refControl.asForRef().testOrFalse(RefPermission.CREATE_CHANGE);
}
/** The range of permitted values associated with a label permission. */
private PermissionRange getRange(String permission) {
return getRefControl().getRange(permission, isOwner());
}
/** Can this user add a patch set to this change? */
private boolean canAddPatchSet(ReviewDb db) throws OrmException {
if (!refControl.asForRef().testOrFalse(RefPermission.CREATE_CHANGE) || isPatchSetLocked(db)) {
return false;
}
if (isOwner()) {
return true;
}
return getRefControl().canAddPatchSet();
}
/** Is the current patch set locked against state changes? */
private boolean isPatchSetLocked(ReviewDb db) throws OrmException {
if (getChange().getStatus() == Change.Status.MERGED) {
return false;
}
for (PatchSetApproval ap :
approvalsUtil.byPatchSet(
db, getNotes(), getUser(), getChange().currentPatchSetId(), null, null)) {
LabelType type =
getProjectControl()
.getProjectState()
.getLabelTypes(getNotes(), getUser())
.byLabel(ap.getLabel());
if (type != null
&& ap.getValue() == 1
&& type.getFunction() == LabelFunction.PATCH_SET_LOCK) {
return true;
}
}
return false;
}
/** Is this user the owner of the change? */
private boolean isOwner() {
if (getUser().isIdentifiedUser()) {
Account.Id id = getUser().asIdentifiedUser().getAccountId();
return id.equals(getChange().getOwner());
}
return false;
}
/** Is this user assigned to this change? */
private boolean isAssignee() {
Account.Id currentAssignee = notes.getChange().getAssignee();
if (currentAssignee != null && getUser().isIdentifiedUser()) {
Account.Id id = getUser().getAccountId();
return id.equals(currentAssignee);
}
return false;
}
/** Is this user a reviewer for the change? */
private boolean isReviewer(ReviewDb db, @Nullable ChangeData cd) throws OrmException {
if (getUser().isIdentifiedUser()) {
Collection<Account.Id> results = changeData(db, cd).reviewers().all();
return results.contains(getUser().getAccountId());
}
return false;
}
/** Can this user edit the topic name? */
private boolean canEditTopicName() {
if (getChange().getStatus().isOpen()) {
return isOwner() // owner (aka creator) of the change can edit topic
|| getRefControl().isOwner() // branch owner can edit topic
|| getProjectControl().isOwner() // project owner can edit topic
|| getRefControl().canEditTopicName() // user can edit topic on a specific ref
|| getProjectControl().isAdmin();
}
return getRefControl().canForceEditTopicName();
}
/** Can this user edit the description? */
private boolean canEditDescription() {
if (getChange().getStatus().isOpen()) {
return isOwner() // owner (aka creator) of the change can edit desc
|| getRefControl().isOwner() // branch owner can edit desc
|| getProjectControl().isOwner() // project owner can edit desc
|| getProjectControl().isAdmin();
}
return false;
}
private boolean canEditAssignee() {
return isOwner()
|| getProjectControl().isOwner()
|| getRefControl().canEditAssignee()
|| isAssignee();
}
/** Can this user edit the hashtag name? */
private boolean canEditHashtags() {
return isOwner() // owner (aka creator) of the change can edit hashtags
|| getRefControl().isOwner() // branch owner can edit hashtags
|| getProjectControl().isOwner() // project owner can edit hashtags
|| getRefControl().canEditHashtags() // user can edit hashtag on a specific ref
|| getProjectControl().isAdmin();
}
private ChangeData changeData(ReviewDb db, @Nullable ChangeData cd) {
return cd != null ? cd : changeDataFactory.create(db, getNotes());
}
private boolean isPrivateVisible(ReviewDb db, ChangeData cd) throws OrmException {
return isOwner()
|| isReviewer(db, cd)
|| getRefControl().canViewPrivateChanges()
|| getUser().isInternalUser();
}
ForChange asForChange(@Nullable ChangeData cd, @Nullable Provider<ReviewDb> db) {
return new ForChangeImpl(cd, db);
}
private class ForChangeImpl extends ForChange {
private ChangeData cd;
private Map<String, PermissionRange> labels;
ForChangeImpl(@Nullable ChangeData cd, @Nullable Provider<ReviewDb> db) {
this.cd = cd;
this.db = db;
}
private ReviewDb db() {
if (db != null) {
return db.get();
} else if (cd != null) {
return cd.db();
} else {
return null;
}
}
private ChangeData changeData() {
if (cd == null) {
ReviewDb reviewDb = db();
checkState(reviewDb != null, "need ReviewDb");
cd = changeDataFactory.create(reviewDb, getNotes());
}
return cd;
}
@Override
public CurrentUser user() {
return getUser();
}
@Override
public ForChange user(CurrentUser user) {
return user().equals(user) ? this : forUser(user).asForChange(cd, db);
}
@Override
public void check(ChangePermissionOrLabel perm)
throws AuthException, PermissionBackendException {
if (!can(perm)) {
throw new AuthException(perm.describeForException() + " not permitted");
}
}
@Override
public <T extends ChangePermissionOrLabel> Set<T> test(Collection<T> permSet)
throws PermissionBackendException {
Set<T> ok = newSet(permSet);
for (T perm : permSet) {
if (can(perm)) {
ok.add(perm);
}
}
return ok;
}
private boolean can(ChangePermissionOrLabel perm) throws PermissionBackendException {
if (perm instanceof ChangePermission) {
return can((ChangePermission) perm);
} else if (perm instanceof LabelPermission) {
return can((LabelPermission) perm);
} else if (perm instanceof LabelPermission.WithValue) {
return can((LabelPermission.WithValue) perm);
}
throw new PermissionBackendException(perm + " unsupported");
}
private boolean can(ChangePermission perm) throws PermissionBackendException {
try {
switch (perm) {
case READ:
return isVisible(db(), changeData());
case ABANDON:
return canAbandon(db());
case DELETE:
return canDelete(getChange().getStatus());
case ADD_PATCH_SET:
return canAddPatchSet(db());
case EDIT_ASSIGNEE:
return canEditAssignee();
case EDIT_DESCRIPTION:
return canEditDescription();
case EDIT_HASHTAGS:
return canEditHashtags();
case EDIT_TOPIC_NAME:
return canEditTopicName();
case REBASE:
return canRebase(db());
case RESTORE:
return canRestore(db());
case SUBMIT:
return getRefControl().canSubmit(isOwner());
case REMOVE_REVIEWER:
case SUBMIT_AS:
return getRefControl().canPerform(perm.permissionName().get());
}
} catch (OrmException e) {
throw new PermissionBackendException("unavailable", e);
}
throw new PermissionBackendException(perm + " unsupported");
}
private boolean can(LabelPermission perm) {
return !label(perm.permissionName().get()).isEmpty();
}
private boolean can(LabelPermission.WithValue perm) {
PermissionRange r = label(perm.permissionName().get());
if (perm.forUser() == ON_BEHALF_OF && r.isEmpty()) {
return false;
}
return r.contains(perm.value());
}
private PermissionRange label(String permission) {
if (labels == null) {
labels = Maps.newHashMapWithExpectedSize(4);
}
PermissionRange r = labels.get(permission);
if (r == null) {
r = getRange(permission);
labels.put(permission, r);
}
return r;
}
}
static <T extends ChangePermissionOrLabel> Set<T> newSet(Collection<T> permSet) {
if (permSet instanceof EnumSet) {
@SuppressWarnings({"unchecked", "rawtypes"})
Set<T> s = ((EnumSet) permSet).clone();
s.clear();
return s;
}
return Sets.newHashSetWithExpectedSize(permSet.size());
}
}
| |
package com.custom.ui.search.helper;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.transform;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import models.user.User;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import play.data.DynamicForm;
import utils.ExceptionHandler;
import utils.GridViewModel;
import utils.GridViewModel.PageData;
import utils.GridViewModel.RowViewModel;
import com.avaje.ebean.Expr;
import com.avaje.ebean.Expression;
import com.custom.domain.Status;
import com.custom.ui.search.proxy.UserProxyUISearch;
import com.google.common.base.Function;
import com.mnt.core.helper.ASearchContext;
import com.mnt.core.ui.component.BuildGridActionButton;
import com.mnt.core.ui.component.BuildUIButton;
import com.mnt.core.ui.component.GridActionButton;
import com.mnt.core.ui.component.UIButton;
import com.mnt.core.ui.component.UIButton.ButtonActionType;
import controllers.routes;
public class UserStatusSearchContext extends ASearchContext<UserProxyUISearch>{
private static UserStatusSearchContext searchContext = null;
public static UserStatusSearchContext getInstance(){
return new UserStatusSearchContext();
}
@Override
public boolean isMultiSelectSearch() {
return false;
}
public String entityName(){
return "User";
}
public UserStatusSearchContext() {
super(UserProxyUISearch.class,null);
}
public UserStatusSearchContext(UserProxyUISearch u) {
super(UserProxyUISearch.class,u);
}
@Override
public void buildGridButton() {
getGridActions().add(BuildGridActionButton.me().withVisibilityTrue().withIcon(GridActionButton.IconType.Tick)
.withUrl(routes.Users.showEdit().url()).
withTooltip("Approve User").withTargetModal(ButtonActionType.EDIT_TYPE));
getGridActions().add(BuildGridActionButton.me().withVisibilityTrue()
.withIcon(GridActionButton.IconType.Cross)
.withUrl("/userStatusdisapprove").withTooltip("Disapprove User"));
}
@Override
protected void buildButton() {
super.getButtonActions().add(new UIButton() {
@Override
public boolean visibility() {
return true;
}
@Override
public String url() {
return "/userStatusApprove";
}
@Override
public ButtonActionType target() {
return ButtonActionType.ACTION;
}
@Override
public String label() {
return "Approve User";
}
@Override
public String id() {
return "userApproveButton";
}
});
super.getButtonActions().add(new UIButton() {
@Override
public boolean visibility() {
return true;
}
@Override
public String url() {
return "/userStatusdisapprove";
}
@Override
public ButtonActionType target() {
return ButtonActionType.ACTION;
}
@Override
public String label() {
return "Disapprove User";
}
@Override
public String id() {
return "userdisapproveButton";
}
});
}
@Override
public UIButton showAddButton(){
return BuildUIButton.me();
}
@Override
public UIButton showEditButton(){
return BuildUIButton.me();
}
@Override
public String searchUrl() {
return routes.Status.userSearch().url();
}
@Override
public String editUrl() {
return routes.Users.edit().url();
}
@Override
public String showEditUrl() {
return routes.Users.showEdit().url();
}
@Override
public String generateExcel() {
return routes.Status.excelReportUser().url();
}
@Override
public String createUrl() {
return "#";
}
@Override
public String deleteUrl() {
return "#";
}
@Override
public HSSFWorkbook doExcel(DynamicForm form) {
String email = form.data().get("email");
User user1 = User.findByEmail(email);
Expression exp1 = Expr.eq("companyobject.companyCode", user1.companyobject.getCompanyCode());
Expression exp2 = Expr.ne("email", user1.email);
List<User> result = null;
if(form.get("status")==null){
result=User.find.where().eq("userStatus", Status.PendingApproval).add(exp1).add(exp2).findList();
}else{
result=User.find.where().ilike("userStatus",form.get("status")).add(exp1).add(exp2).findList();
}
List<UserProxyUISearch> userResult= new ArrayList<UserProxyUISearch>(result.size());
for(int i=0; i<result.size();i++)
{
UserProxyUISearch user = new UserProxyUISearch();
if(result.get(i).designation != "SuperAdmin" && result.get(i).designation != "Admin" && result.get(i).companyobject != null)
{
user.id = result.get(i).id;
user.firstName = result.get(i).firstName;
user.lastName=result.get(i).lastName;
user.email= result.get(i).email;
user.companyName= result.get(i).companyobject.companyName;
userResult.add(user);
}
}
try {
return super.getExcelExport(userResult);
} catch (Exception e) {
ExceptionHandler.onError(e);
}
return null;
}
public GridViewModel doSearch(DynamicForm form) {
String email = form.data().get("email");
User user1 = User.findByEmail(email);
Expression exp1 = Expr.eq("companyobject.companyCode", user1.companyobject.getCompanyCode());
Expression exp2 = Expr.ne("email", user1.email);
int page = Integer.parseInt(form.get("page"));
int limit = Integer.parseInt(form.get("rows"));
GridViewModel.PageData pageData = new PageData(limit,
page);
int count=0;
double min = Double.parseDouble(form.get("rows"));
int total_pages=0;
if(form.get("status") == null ){
count = User.find.where().eq("userStatus", Status.PendingApproval).add(exp1).add(exp2).findRowCount();
}else{
count = User.find.where().ilike("userStatus", form.get("status")).add(exp1).add(exp2).findRowCount();
}
if(count > 0){
total_pages = (int) Math.ceil(count/min);
}
else{
total_pages = 0;
}
if(page > total_pages){
page = total_pages;
}
int start = limit*page - limit;//orderBy(sidx+" "+sord)
List<User> result = null;
if(form.get("status")==null){
result=User.find.where().eq("userStatus", Status.PendingApproval).add(exp1).add(exp2).setFirstRow(start).setMaxRows(limit).findList();
}else{
result=User.find.where().ilike("userStatus",form.get("status")).add(exp1).add(exp2).setFirstRow(start).setMaxRows(limit).findList();
}
List<GridViewModel.RowViewModel> rows = transform(result, toJqGridFormat()) ;
GridViewModel gridViewModel = new GridViewModel(pageData, count, rows);
return gridViewModel;
}
private Function<User,RowViewModel> toJqGridFormat() {
return new Function<User, RowViewModel>() {
@Override
public RowViewModel apply(User user) {
try {
return new GridViewModel.RowViewModel((user.id).intValue(), newArrayList(getResultStr(User.class,user)));
} catch (Exception e) {
ExceptionHandler.onError(e);
}
return null;
}
};
}
@Override
public Map<String, String> autoCompleteUrls() {
return User.autoCompleteAction;
}
}
| |
package cucumber.runtime;
import cucumber.api.PendingException;
import cucumber.api.Scenario;
import cucumber.runtime.io.ClasspathResourceLoader;
import cucumber.runtime.io.Resource;
import cucumber.runtime.io.ResourceLoader;
import cucumber.runtime.model.CucumberFeature;
import gherkin.I18n;
import gherkin.formatter.JSONFormatter;
import gherkin.formatter.Reporter;
import gherkin.formatter.model.Step;
import gherkin.formatter.model.Tag;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.internal.AssumptionViolatedException;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import static cucumber.runtime.TestHelper.feature;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyCollectionOf;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
public class RuntimeTest {
private static final I18n ENGLISH = new I18n("en");
@Ignore
@Test
public void runs_feature_with_json_formatter() throws Exception {
CucumberFeature feature = feature("test.feature", "" +
"Feature: feature name\n" +
" Background: background name\n" +
" Given b\n" +
" Scenario: scenario name\n" +
" When s\n");
StringBuilder out = new StringBuilder();
JSONFormatter jsonFormatter = new JSONFormatter(out);
List<Backend> backends = asList(mock(Backend.class));
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
RuntimeOptions runtimeOptions = new RuntimeOptions(new Env());
Runtime runtime = new Runtime(new ClasspathResourceLoader(classLoader), classLoader, backends, runtimeOptions);
feature.run(jsonFormatter, jsonFormatter, runtime);
jsonFormatter.done();
String expected = "" +
"[\n" +
" {\n" +
" \"id\": \"feature-name\",\n" +
" \"description\": \"\",\n" +
" \"name\": \"feature name\",\n" +
" \"keyword\": \"Feature\",\n" +
" \"line\": 1,\n" +
" \"elements\": [\n" +
" {\n" +
" \"description\": \"\",\n" +
" \"name\": \"background name\",\n" +
" \"keyword\": \"Background\",\n" +
" \"line\": 2,\n" +
" \"steps\": [\n" +
" {\n" +
" \"result\": {\n" +
" \"status\": \"undefined\"\n" +
" },\n" +
" \"name\": \"b\",\n" +
" \"keyword\": \"Given \",\n" +
" \"line\": 3,\n" +
" \"match\": {}\n" +
" }\n" +
" ],\n" +
" \"type\": \"background\"\n" +
" },\n" +
" {\n" +
" \"id\": \"feature-name;scenario-name\",\n" +
" \"description\": \"\",\n" +
" \"name\": \"scenario name\",\n" +
" \"keyword\": \"Scenario\",\n" +
" \"line\": 4,\n" +
" \"steps\": [\n" +
" {\n" +
" \"result\": {\n" +
" \"status\": \"undefined\"\n" +
" },\n" +
" \"name\": \"s\",\n" +
" \"keyword\": \"When \",\n" +
" \"line\": 5,\n" +
" \"match\": {}\n" +
" }\n" +
" ],\n" +
" \"type\": \"scenario\"\n" +
" }\n" +
" ],\n" +
" \"uri\": \"test.feature\"\n" +
" }\n" +
"]";
assertEquals(expected, out.toString());
}
@Test
public void strict_without_pending_steps_or_errors() {
Runtime runtime = createStrictRuntime();
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_without_pending_steps_or_errors() {
Runtime runtime = createNonStrictRuntime();
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_undefined_steps() {
Runtime runtime = createNonStrictRuntime();
runtime.undefinedStepsTracker.addUndefinedStep(new Step(null, "Given ", "A", 1, null, null), ENGLISH);
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void strict_with_undefined_steps() {
Runtime runtime = createStrictRuntime();
runtime.undefinedStepsTracker.addUndefinedStep(new Step(null, "Given ", "A", 1, null, null), ENGLISH);
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void strict_with_pending_steps_and_no_errors() {
Runtime runtime = createStrictRuntime();
runtime.addError(new PendingException());
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void non_strict_with_pending_steps() {
Runtime runtime = createNonStrictRuntime();
runtime.addError(new PendingException());
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_failed_junit_assumption() {
Runtime runtime = createNonStrictRuntime();
runtime.addError(new AssumptionViolatedException("should be treated like pending"));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_errors() {
Runtime runtime = createNonStrictRuntime();
runtime.addError(new RuntimeException());
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void strict_with_errors() {
Runtime runtime = createStrictRuntime();
runtime.addError(new RuntimeException());
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void should_pass_if_no_features_are_found() {
ResourceLoader resourceLoader = createResourceLoaderThatFindsNoFeatures();
Runtime runtime = createStrictRuntime(resourceLoader);
runtime.run();
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void should_throw_cucumer_exception_if_no_backends_are_found() throws Exception {
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
new Runtime(new ClasspathResourceLoader(classLoader), classLoader, Collections.<Backend>emptyList(),
new RuntimeOptions(new Env()));
fail("A CucumberException should have been thrown");
} catch (CucumberException e) {
assertEquals("No backends were found. Please make sure you have a backend module on your CLASSPATH.", e.getMessage());
}
}
@Test
public void should_add_passed_result_to_the_summary_counter() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
StepDefinitionMatch match = mock(StepDefinitionMatch.class);
Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 passed)%n" +
"1 Steps (1 passed)%n")));
}
@Test
public void should_add_pending_result_to_the_summary_counter() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
StepDefinitionMatch match = createExceptionThrowingMatch(new PendingException());
Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 pending)%n" +
"1 Steps (1 pending)%n")));
}
@Test
public void should_add_failed_result_to_the_summary_counter() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
StepDefinitionMatch match = createExceptionThrowingMatch(new Exception());
Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 failed)%n" +
"1 Steps (1 failed)%n")));
}
@Test
public void should_add_ambiguous_match_as_failed_result_to_the_summary_counter() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
Runtime runtime = createRuntimeWithMockedGlueWithAmbiguousMatch("--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 failed)%n" +
"1 Steps (1 failed)%n")));
}
@Test
public void should_add_skipped_result_to_the_summary_counter() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
StepDefinitionMatch match = createExceptionThrowingMatch(new Exception());
Runtime runtime = createRuntimeWithMockedGlue(match, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 failed)%n" +
"2 Steps (1 failed, 1 skipped)%n")));
}
@Test
public void should_add_undefined_result_to_the_summary_counter() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
Runtime runtime = createRuntimeWithMockedGlue(null, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 undefined)%n" +
"1 Steps (1 undefined)%n")));
}
@Test
public void should_fail_the_scenario_if_before_fails() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
StepDefinitionMatch match = mock(StepDefinitionMatch.class);
HookDefinition hook = createExceptionThrowingHook();
Runtime runtime = createRuntimeWithMockedGlue(match, hook, true, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runtime.runBeforeHooks(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 failed)%n" +
"1 Steps (1 skipped)%n")));
}
@Test
public void should_fail_the_scenario_if_after_fails() throws Throwable {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Reporter reporter = mock(Reporter.class);
StepDefinitionMatch match = mock(StepDefinitionMatch.class);
HookDefinition hook = createExceptionThrowingHook();
Runtime runtime = createRuntimeWithMockedGlue(match, hook, false, "--monochrome");
runtime.buildBackendWorlds(reporter, Collections.<Tag>emptySet());
runStep(reporter, runtime);
runtime.runAfterHooks(reporter, Collections.<Tag>emptySet());
runtime.disposeBackendWorlds();
runtime.printSummary(new PrintStream(baos));
assertThat(baos.toString(), startsWith(String.format(
"1 Scenarios (1 failed)%n" +
"1 Steps (1 passed)%n")));
}
private StepDefinitionMatch createExceptionThrowingMatch(Exception exception) throws Throwable {
StepDefinitionMatch match = mock(StepDefinitionMatch.class);
doThrow(exception).when(match).runStep((I18n)any());
return match;
}
private HookDefinition createExceptionThrowingHook() throws Throwable {
HookDefinition hook = mock(HookDefinition.class);
when(hook.matches(anyCollectionOf(Tag.class))).thenReturn(true);
doThrow(new Exception()).when(hook).execute((Scenario)any());
return hook;
}
public void runStep(Reporter reporter, Runtime runtime) {
Step step = mock(Step.class);
I18n i18n = mock(I18n.class);
runtime.runStep("<uri>", step, reporter, i18n);
}
private ResourceLoader createResourceLoaderThatFindsNoFeatures() {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
when(resourceLoader.resources(anyString(), eq(".feature"))).thenReturn(Collections.<Resource>emptyList());
return resourceLoader;
}
private Runtime createStrictRuntime() {
return createRuntime("-g", "anything", "--strict");
}
private Runtime createNonStrictRuntime() {
return createRuntime("-g", "anything");
}
private Runtime createStrictRuntime(ResourceLoader resourceLoader) {
return createRuntime(resourceLoader, Thread.currentThread().getContextClassLoader(), "-g", "anything", "--strict");
}
private Runtime createRuntime(String... runtimeArgs) {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
ClassLoader classLoader = mock(ClassLoader.class);
return createRuntime(resourceLoader, classLoader, runtimeArgs);
}
private Runtime createRuntime(ResourceLoader resourceLoader, ClassLoader classLoader, String... runtimeArgs) {
RuntimeOptions runtimeOptions = new RuntimeOptions(new Env(), runtimeArgs);
Backend backend = mock(Backend.class);
Collection<Backend> backends = Arrays.asList(backend);
return new Runtime(resourceLoader, classLoader, backends, runtimeOptions);
}
private Runtime createRuntimeWithMockedGlue(StepDefinitionMatch match, String... runtimeArgs) {
return createRuntimeWithMockedGlue(match, false, null, false, runtimeArgs);
}
private Runtime createRuntimeWithMockedGlue(StepDefinitionMatch match, HookDefinition hook, boolean isBefore,
String... runtimeArgs){
return createRuntimeWithMockedGlue(match, false, hook, isBefore, runtimeArgs);
}
private Runtime createRuntimeWithMockedGlueWithAmbiguousMatch(String... runtimeArgs) {
return createRuntimeWithMockedGlue(mock(StepDefinitionMatch.class), true, null, false, runtimeArgs);
}
private Runtime createRuntimeWithMockedGlue(StepDefinitionMatch match, boolean isAmbiguous, HookDefinition hook,
boolean isBefore, String... runtimeArgs) {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
ClassLoader classLoader = mock(ClassLoader.class);
RuntimeOptions runtimeOptions = new RuntimeOptions(new Env(), runtimeArgs);
Backend backend = mock(Backend.class);
RuntimeGlue glue = mock(RuntimeGlue.class);
mockMatch(glue, match, isAmbiguous);
mockHook(glue, hook, isBefore);
Collection<Backend> backends = Arrays.asList(backend);
return new Runtime(resourceLoader, classLoader, backends, runtimeOptions, glue);
}
private void mockMatch(RuntimeGlue glue, StepDefinitionMatch match, boolean isAmbiguous) {
if (isAmbiguous) {
Exception exception = new AmbiguousStepDefinitionsException(Arrays.asList(match, match));
doThrow(exception).when(glue).stepDefinitionMatch(anyString(), (Step)any(), (I18n)any());
} else {
when(glue.stepDefinitionMatch(anyString(), (Step)any(), (I18n)any())).thenReturn(match);
}
}
private void mockHook(RuntimeGlue glue, HookDefinition hook, boolean isBefore) {
if (isBefore) {
when(glue.getBeforeHooks()).thenReturn(Arrays.asList(hook));
} else {
when(glue.getAfterHooks()).thenReturn(Arrays.asList(hook));
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import com.google.common.collect.Maps;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Map;
/**
*
*/
public class ScriptFilterParser implements FilterParser {
public static final String NAME = "script";
@Inject
public ScriptFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
boolean cache = false; // no need to cache it by default, changes a lot?
CacheKeyFilter.Key cacheKey = null;
// also, when caching, since its isCacheable is false, will result in loading all bit set...
String script = null;
String scriptLang = null;
Map<String, Object> params = null;
String filterName = null;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("params".equals(currentFieldName)) {
params = parser.map();
} else {
throw new QueryParsingException(parseContext.index(), "[script] filter does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("script".equals(currentFieldName)) {
script = parser.text();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else {
throw new QueryParsingException(parseContext.index(), "[script] filter does not support [" + currentFieldName + "]");
}
}
}
if (script == null) {
throw new QueryParsingException(parseContext.index(), "script must be provided with a [script] filter");
}
if (params == null) {
params = Maps.newHashMap();
}
Filter filter = new ScriptFilter(scriptLang, script, params, parseContext.scriptService());
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
}
public static class ScriptFilter extends Filter {
private final String script;
private final Map<String, Object> params;
private final SearchScript searchScript;
private ScriptFilter(String scriptLang, String script, Map<String, Object> params, ScriptService scriptService) {
this.script = script;
this.params = params;
SearchContext context = SearchContext.current();
if (context == null) {
throw new ElasticSearchIllegalStateException("No search context on going...");
}
this.searchScript = context.scriptService().search(context.lookup(), scriptLang, script, params);
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append("ScriptFilter(");
buffer.append(script);
buffer.append(")");
return buffer.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ScriptFilter that = (ScriptFilter) o;
if (params != null ? !params.equals(that.params) : that.params != null) return false;
if (script != null ? !script.equals(that.script) : that.script != null) return false;
return true;
}
@Override
public int hashCode() {
int result = script != null ? script.hashCode() : 0;
result = 31 * result + (params != null ? params.hashCode() : 0);
return result;
}
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
searchScript.setNextReader(context);
// LUCENE 4 UPGRADE: we can simply wrap this here since it is not cacheable and if we are not top level we will get a null passed anyway
return BitsFilteredDocIdSet.wrap(new ScriptDocSet(context.reader().maxDoc(), acceptDocs, searchScript), acceptDocs);
}
static class ScriptDocSet extends MatchDocIdSet {
private final SearchScript searchScript;
public ScriptDocSet(int maxDoc, @Nullable Bits acceptDocs, SearchScript searchScript) {
super(maxDoc, acceptDocs);
this.searchScript = searchScript;
}
@Override
public boolean isCacheable() {
return true;
}
@Override
protected boolean matchDoc(int doc) {
searchScript.setNextDocId(doc);
Object val = searchScript.run();
if (val == null) {
return false;
}
if (val instanceof Boolean) {
return (Boolean) val;
}
if (val instanceof Number) {
return ((Number) val).longValue() != 0;
}
throw new ElasticSearchIllegalArgumentException("Can't handle type [" + val + "] in script filter");
}
}
}
}
| |
/*******************************************************************************
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.cloud.dataflow.sdk.runners.worker;
import static com.google.cloud.dataflow.sdk.util.Structs.getBytes;
import static com.google.cloud.dataflow.sdk.util.Structs.getString;
import com.google.api.services.dataflow.model.MultiOutputInfo;
import com.google.api.services.dataflow.model.SideInputInfo;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.transforms.Combine;
import com.google.cloud.dataflow.sdk.transforms.DoFn;
import com.google.cloud.dataflow.sdk.util.CloudObject;
import com.google.cloud.dataflow.sdk.util.DoFnInfo;
import com.google.cloud.dataflow.sdk.util.NullSideInputReader;
import com.google.cloud.dataflow.sdk.util.PropertyNames;
import com.google.cloud.dataflow.sdk.util.SerializableUtils;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.util.common.worker.ParDoFn;
import com.google.cloud.dataflow.sdk.util.common.worker.StateSampler;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.common.base.Preconditions;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Nullable;
/**
* A {@link ParDoFn} wrapping a decoded user {@link CombineFn}.
*/
class CombineValuesFn extends ParDoFnBase {
/**
* The optimizer may split run the user combiner in 3 separate
* phases (ADD, MERGE, and EXTRACT), on separate VMs, as it sees
* fit. The CombinerPhase dictates which DoFn is actually running in
* the worker.
*/
// TODO: These strings are part of the service definition, and
// should be added into the definition of the ParDoInstruction,
// but the protiary definitions don't allow for enums yet.
public static class CombinePhase {
public static final String ALL = "all";
public static final String ADD = "add";
public static final String MERGE = "merge";
public static final String EXTRACT = "extract";
}
static CombineValuesFn of(
PipelineOptions options,
Combine.KeyedCombineFn<?, ?, ?, ?> combineFn,
String phase,
String stepName,
DataflowExecutionContext executionContext,
CounterSet.AddCounterMutator addCounterMutator)
throws Exception {
return new CombineValuesFn(
options, combineFn, phase, stepName, executionContext, addCounterMutator);
}
/**
* A {@link ParDoFnFactory} to create instances of {@link CombineValuesFn} according to
* specifications from the Dataflow service.
*/
static final class Factory implements ParDoFnFactory {
@Override
public ParDoFn create(
PipelineOptions options,
final CloudObject cloudUserFn,
String stepName,
@Nullable List<SideInputInfo> sideInputInfos,
@Nullable List<MultiOutputInfo> multiOutputInfos,
int numOutputs,
DataflowExecutionContext executionContext,
CounterSet.AddCounterMutator addCounterMutator,
StateSampler stateSampler)
throws Exception {
Preconditions.checkArgument(
sideInputInfos == null || sideInputInfos.size() == 0,
"unexpected side inputs for CombineValuesFn");
Preconditions.checkArgument(
numOutputs == 1, "expected exactly one output for CombineValuesFn");
Object deserializedFn =
SerializableUtils.deserializeFromByteArray(
getBytes(cloudUserFn, PropertyNames.SERIALIZED_FN),
"serialized user fn");
Preconditions.checkArgument(
deserializedFn instanceof Combine.KeyedCombineFn);
Combine.KeyedCombineFn<?, ?, ?, ?> combineFn =
(Combine.KeyedCombineFn<?, ?, ?, ?>) deserializedFn;
// Get the combine phase, default to ALL. (The implementation
// doesn't have to split the combiner).
String phase = getString(cloudUserFn, PropertyNames.PHASE, CombinePhase.ALL);
return CombineValuesFn.of(
options,
combineFn,
phase,
stepName,
executionContext,
addCounterMutator);
}
}
@Override
protected DoFnInfo<?, ?> getDoFnInfo() {
DoFn doFn = null;
switch (phase) {
case CombinePhase.ALL:
doFn = new CombineValuesDoFn(combineFn);
break;
case CombinePhase.ADD:
doFn = new AddInputsDoFn(combineFn);
break;
case CombinePhase.MERGE:
doFn = new MergeAccumulatorsDoFn(combineFn);
break;
case CombinePhase.EXTRACT:
doFn = new ExtractOutputDoFn(combineFn);
break;
default:
throw new IllegalArgumentException(
"phase must be one of 'all', 'add', 'merge', 'extract'");
}
return new DoFnInfo<>(doFn, null);
}
private final String phase;
private final Combine.KeyedCombineFn<?, ?, ?, ?> combineFn;
private CombineValuesFn(
PipelineOptions options,
Combine.KeyedCombineFn<?, ?, ?, ?> combineFn,
String phase,
String stepName,
DataflowExecutionContext executionContext,
CounterSet.AddCounterMutator addCounterMutator) {
super(
options,
NullSideInputReader.empty(),
Arrays.asList("output"),
stepName,
executionContext,
addCounterMutator);
this.phase = phase;
this.combineFn = combineFn;
}
/**
* The ALL phase is the unsplit combiner, in case combiner lifting
* is disabled or the optimizer chose not to lift this combiner.
*/
private static class CombineValuesDoFn<K, InputT, OutputT>
extends DoFn<KV<K, Iterable<InputT>>, KV<K, OutputT>>{
private static final long serialVersionUID = 0L;
private final Combine.KeyedCombineFn<K, InputT, ?, OutputT> combineFn;
private CombineValuesDoFn(
Combine.KeyedCombineFn<K, InputT, ?, OutputT> combineFn) {
this.combineFn = combineFn;
}
@Override
public void processElement(ProcessContext c) {
KV<K, Iterable<InputT>> kv = c.element();
K key = kv.getKey();
c.output(KV.of(key, this.combineFn.apply(key, kv.getValue())));
}
}
/*
* ADD phase: KV<K, Iterable<InputT>> -> KV<K, AccumT>.
*/
private static class AddInputsDoFn<K, InputT, AccumT>
extends DoFn<KV<K, Iterable<InputT>>, KV<K, AccumT>>{
private static final long serialVersionUID = 0L;
private final Combine.KeyedCombineFn<K, InputT, AccumT, ?> combineFn;
private AddInputsDoFn(
Combine.KeyedCombineFn<K, InputT, AccumT, ?> combineFn) {
this.combineFn = combineFn;
}
@Override
public void processElement(ProcessContext c) {
KV<K, Iterable<InputT>> kv = c.element();
K key = kv.getKey();
AccumT accum = this.combineFn.createAccumulator(key);
for (InputT input : kv.getValue()) {
accum = this.combineFn.addInput(key, accum, input);
}
c.output(KV.of(key, accum));
}
}
/*
* MERGE phase: KV<K, Iterable<AccumT>> -> KV<K, AccumT>.
*/
private static class MergeAccumulatorsDoFn<K, AccumT>
extends DoFn<KV<K, Iterable<AccumT>>, KV<K, AccumT>>{
private static final long serialVersionUID = 0L;
private final Combine.KeyedCombineFn<K, ?, AccumT, ?> combineFn;
private MergeAccumulatorsDoFn(
Combine.KeyedCombineFn<K, ?, AccumT, ?> combineFn) {
this.combineFn = combineFn;
}
@Override
public void processElement(ProcessContext c) {
KV<K, Iterable<AccumT>> kv = c.element();
K key = kv.getKey();
AccumT accum = this.combineFn.mergeAccumulators(key, kv.getValue());
c.output(KV.of(key, accum));
}
}
/*
* EXTRACT phase: KV<K, AccumT> -> KV<K, OutputT>.
*/
private static class ExtractOutputDoFn<K, AccumT, OutputT>
extends DoFn<KV<K, AccumT>, KV<K, OutputT>>{
private static final long serialVersionUID = 0L;
private final Combine.KeyedCombineFn<K, ?, AccumT, OutputT> combineFn;
private ExtractOutputDoFn(
Combine.KeyedCombineFn<K, ?, AccumT, OutputT> combineFn) {
this.combineFn = combineFn;
}
@Override
public void processElement(ProcessContext c) {
KV<K, AccumT> kv = c.element();
K key = kv.getKey();
OutputT output = this.combineFn.extractOutput(key, kv.getValue());
c.output(KV.of(key, output));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.qjournal.client;
import static org.apache.hadoop.hdfs.qjournal.QJMTestUtil.FAKE_NSINFO;
import static org.apache.hadoop.hdfs.qjournal.QJMTestUtil.JID;
import static org.apache.hadoop.hdfs.qjournal.QJMTestUtil.writeSegment;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.SortedSet;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdfs.qjournal.MiniJournalCluster;
import org.apache.hadoop.hdfs.qjournal.QJMTestUtil;
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol;
import org.apache.hadoop.hdfs.qjournal.server.JournalFaultInjector;
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
import org.apache.hadoop.hdfs.server.namenode.EditLogOutputStream;
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.util.Holder;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.MoreExecutors;
public class TestQJMWithFaults {
private static final Log LOG = LogFactory.getLog(
TestQJMWithFaults.class);
private static final String RAND_SEED_PROPERTY =
"TestQJMWithFaults.random-seed";
private static final int NUM_WRITER_ITERS = 500;
private static final int SEGMENTS_PER_WRITER = 2;
private static final Configuration conf = new Configuration();
static {
// Don't retry connections - it just slows down the tests.
conf.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0);
// Make tests run faster by avoiding fsync()
EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
}
// Set up fault injection mock.
private static final JournalFaultInjector faultInjector =
JournalFaultInjector.instance = Mockito.mock(JournalFaultInjector.class);
/**
* Run through the creation of a log without any faults injected,
* and count how many RPCs are made to each node. This sets the
* bounds for the other test cases, so they can exhaustively explore
* the space of potential failures.
*/
private static long determineMaxIpcNumber() throws Exception {
Configuration conf = new Configuration();
MiniJournalCluster cluster = new MiniJournalCluster.Builder(conf).build();
QuorumJournalManager qjm = null;
long ret;
try {
qjm = createInjectableQJM(cluster);
qjm.format(FAKE_NSINFO);
doWorkload(cluster, qjm);
SortedSet<Integer> ipcCounts = Sets.newTreeSet();
for (AsyncLogger l : qjm.getLoggerSetForTests().getLoggersForTests()) {
InvocationCountingChannel ch = (InvocationCountingChannel)l;
ch.waitForAllPendingCalls();
ipcCounts.add(ch.getRpcCount());
}
// All of the loggers should have sent the same number of RPCs, since there
// were no failures.
assertEquals(1, ipcCounts.size());
ret = ipcCounts.first();
LOG.info("Max IPC count = " + ret);
} finally {
IOUtils.closeStream(qjm);
cluster.shutdown();
}
return ret;
}
/**
* Sets up two of the nodes to each drop a single RPC, at all
* possible combinations of RPCs. This may result in the
* active writer failing to write. After this point, a new writer
* should be able to recover and continue writing without
* data loss.
*/
@Test
public void testRecoverAfterDoubleFailures() throws Exception {
final long MAX_IPC_NUMBER = determineMaxIpcNumber();
for (int failA = 1; failA <= MAX_IPC_NUMBER; failA++) {
for (int failB = 1; failB <= MAX_IPC_NUMBER; failB++) {
String injectionStr = "(" + failA + ", " + failB + ")";
LOG.info("\n\n-------------------------------------------\n" +
"Beginning test, failing at " + injectionStr + "\n" +
"-------------------------------------------\n\n");
MiniJournalCluster cluster = new MiniJournalCluster.Builder(conf)
.build();
QuorumJournalManager qjm = null;
try {
qjm = createInjectableQJM(cluster);
qjm.format(FAKE_NSINFO);
List<AsyncLogger> loggers = qjm.getLoggerSetForTests().getLoggersForTests();
failIpcNumber(loggers.get(0), failA);
failIpcNumber(loggers.get(1), failB);
int lastAckedTxn = doWorkload(cluster, qjm);
if (lastAckedTxn < 6) {
LOG.info("Failed after injecting failures at " + injectionStr +
". This is expected since we injected a failure in the " +
"majority.");
}
qjm.close();
qjm = null;
// Now should be able to recover
qjm = createInjectableQJM(cluster);
long lastRecoveredTxn = QJMTestUtil.recoverAndReturnLastTxn(qjm);
assertTrue(lastRecoveredTxn >= lastAckedTxn);
writeSegment(cluster, qjm, lastRecoveredTxn + 1, 3, true);
} catch (Throwable t) {
// Test failure! Rethrow with the test setup info so it can be
// easily triaged.
throw new RuntimeException("Test failed with injection: " + injectionStr,
t);
} finally {
cluster.shutdown();
cluster = null;
IOUtils.closeStream(qjm);
qjm = null;
}
}
}
}
/**
* Test case in which three JournalNodes randomly flip flop between
* up and down states every time they get an RPC.
*
* The writer keeps track of the latest ACKed edit, and on every
* recovery operation, ensures that it recovers at least to that
* point or higher. Since at any given point, a majority of JNs
* may be injecting faults, any writer operation is allowed to fail,
* so long as the exception message indicates it failed due to injected
* faults.
*
* Given a random seed, the test should be entirely deterministic.
*/
@Test
public void testRandomized() throws Exception {
long seed;
Long userSpecifiedSeed = Long.getLong(RAND_SEED_PROPERTY);
if (userSpecifiedSeed != null) {
LOG.info("Using seed specified in system property");
seed = userSpecifiedSeed;
// If the user specifies a seed, then we should gather all the
// IPC trace information so that debugging is easier. This makes
// the test run about 25% slower otherwise.
GenericTestUtils.setLogLevel(ProtobufRpcEngine.LOG, Level.ALL);
} else {
seed = new Random().nextLong();
}
LOG.info("Random seed: " + seed);
Random r = new Random(seed);
MiniJournalCluster cluster = new MiniJournalCluster.Builder(conf)
.build();
// Format the cluster using a non-faulty QJM.
QuorumJournalManager qjmForInitialFormat =
createInjectableQJM(cluster);
qjmForInitialFormat.format(FAKE_NSINFO);
qjmForInitialFormat.close();
try {
long txid = 0;
long lastAcked = 0;
for (int i = 0; i < NUM_WRITER_ITERS; i++) {
LOG.info("Starting writer " + i + "\n-------------------");
QuorumJournalManager qjm = createRandomFaultyQJM(cluster, r);
try {
long recovered;
try {
recovered = QJMTestUtil.recoverAndReturnLastTxn(qjm);
} catch (Throwable t) {
LOG.info("Failed recovery", t);
checkException(t);
continue;
}
assertTrue("Recovered only up to txnid " + recovered +
" but had gotten an ack for " + lastAcked,
recovered >= lastAcked);
txid = recovered + 1;
// Periodically purge old data on disk so it's easier to look
// at failure cases.
if (txid > 100 && i % 10 == 1) {
qjm.purgeLogsOlderThan(txid - 100);
}
Holder<Throwable> thrown = new Holder<Throwable>(null);
for (int j = 0; j < SEGMENTS_PER_WRITER; j++) {
lastAcked = writeSegmentUntilCrash(cluster, qjm, txid, 4, thrown);
if (thrown.held != null) {
LOG.info("Failed write", thrown.held);
checkException(thrown.held);
break;
}
txid += 4;
}
} finally {
qjm.close();
}
}
} finally {
cluster.shutdown();
}
}
private void checkException(Throwable t) {
GenericTestUtils.assertExceptionContains("Injected", t);
if (t.toString().contains("AssertionError")) {
throw new RuntimeException("Should never see AssertionError in fault test!",
t);
}
}
private long writeSegmentUntilCrash(MiniJournalCluster cluster,
QuorumJournalManager qjm, long txid, int numTxns, Holder<Throwable> thrown) {
long firstTxId = txid;
long lastAcked = txid - 1;
try {
EditLogOutputStream stm = qjm.startLogSegment(txid,
NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION);
for (int i = 0; i < numTxns; i++) {
QJMTestUtil.writeTxns(stm, txid++, 1);
lastAcked++;
}
stm.close();
qjm.finalizeLogSegment(firstTxId, lastAcked);
} catch (Throwable t) {
thrown.held = t;
}
return lastAcked;
}
/**
* Run a simple workload of becoming the active writer and writing
* two log segments: 1-3 and 4-6.
*/
private static int doWorkload(MiniJournalCluster cluster,
QuorumJournalManager qjm) throws IOException {
int lastAcked = 0;
try {
qjm.recoverUnfinalizedSegments();
writeSegment(cluster, qjm, 1, 3, true);
lastAcked = 3;
writeSegment(cluster, qjm, 4, 3, true);
lastAcked = 6;
} catch (QuorumException qe) {
LOG.info("Failed to write at txid " + lastAcked,
qe);
}
return lastAcked;
}
/**
* Inject a failure at the given IPC number, such that the JN never
* receives the RPC. The client side sees an IOException. Future
* IPCs after this number will be received as usual.
*/
private void failIpcNumber(AsyncLogger logger, int idx) {
((InvocationCountingChannel)logger).failIpcNumber(idx);
}
private static class RandomFaultyChannel extends IPCLoggerChannel {
private final Random random;
private final float injectionProbability = 0.1f;
private boolean isUp = true;
public RandomFaultyChannel(Configuration conf, NamespaceInfo nsInfo,
String journalId, InetSocketAddress addr, long seed) {
super(conf, nsInfo, journalId, addr);
this.random = new Random(seed);
}
@Override
protected QJournalProtocol createProxy() throws IOException {
QJournalProtocol realProxy = super.createProxy();
return mockProxy(
new WrapEveryCall<Object>(realProxy) {
@Override
void beforeCall(InvocationOnMock invocation) throws Exception {
if (random.nextFloat() < injectionProbability) {
isUp = !isUp;
LOG.info("transitioned " + addr + " to " +
(isUp ? "up" : "down"));
}
if (!isUp) {
throw new IOException("Injected - faking being down");
}
if (invocation.getMethod().getName().equals("acceptRecovery")) {
if (random.nextFloat() < injectionProbability) {
Mockito.doThrow(new IOException(
"Injected - faking fault before persisting paxos data"))
.when(faultInjector).beforePersistPaxosData();
} else if (random.nextFloat() < injectionProbability) {
Mockito.doThrow(new IOException(
"Injected - faking fault after persisting paxos data"))
.when(faultInjector).afterPersistPaxosData();
}
}
}
@Override
public void afterCall(InvocationOnMock invocation, boolean succeeded) {
Mockito.reset(faultInjector);
}
});
}
@Override
protected ExecutorService createSingleThreadExecutor() {
return MoreExecutors.sameThreadExecutor();
}
}
private static class InvocationCountingChannel extends IPCLoggerChannel {
private int rpcCount = 0;
private final Map<Integer, Callable<Void>> injections = Maps.newHashMap();
public InvocationCountingChannel(Configuration conf, NamespaceInfo nsInfo,
String journalId, InetSocketAddress addr) {
super(conf, nsInfo, journalId, addr);
}
int getRpcCount() {
return rpcCount;
}
void failIpcNumber(final int idx) {
Preconditions.checkArgument(idx > 0,
"id must be positive");
inject(idx, new Callable<Void>() {
@Override
public Void call() throws Exception {
throw new IOException("injected failed IPC at " + idx);
}
});
}
private void inject(int beforeRpcNumber, Callable<Void> injectedCode) {
injections.put(beforeRpcNumber, injectedCode);
}
@Override
protected QJournalProtocol createProxy() throws IOException {
final QJournalProtocol realProxy = super.createProxy();
QJournalProtocol mock = mockProxy(
new WrapEveryCall<Object>(realProxy) {
void beforeCall(InvocationOnMock invocation) throws Exception {
rpcCount++;
String callStr = "[" + addr + "] " +
invocation.getMethod().getName() + "(" +
Joiner.on(", ").join(invocation.getArguments()) + ")";
Callable<Void> inject = injections.get(rpcCount);
if (inject != null) {
LOG.info("Injecting code before IPC #" + rpcCount + ": " +
callStr);
inject.call();
} else {
LOG.info("IPC call #" + rpcCount + ": " + callStr);
}
}
});
return mock;
}
}
private static QJournalProtocol mockProxy(WrapEveryCall<Object> wrapper)
throws IOException {
QJournalProtocol mock = Mockito.mock(QJournalProtocol.class,
Mockito.withSettings()
.defaultAnswer(wrapper)
.extraInterfaces(Closeable.class));
return mock;
}
private static abstract class WrapEveryCall<T> implements Answer<T> {
private final Object realObj;
WrapEveryCall(Object realObj) {
this.realObj = realObj;
}
@SuppressWarnings("unchecked")
@Override
public T answer(InvocationOnMock invocation) throws Throwable {
// Don't want to inject an error on close() since that isn't
// actually an IPC call!
if (!Closeable.class.equals(
invocation.getMethod().getDeclaringClass())) {
beforeCall(invocation);
}
boolean success = false;
try {
T ret = (T) invocation.getMethod().invoke(realObj,
invocation.getArguments());
success = true;
return ret;
} catch (InvocationTargetException ite) {
throw ite.getCause();
} finally {
afterCall(invocation, success);
}
}
abstract void beforeCall(InvocationOnMock invocation) throws Exception;
void afterCall(InvocationOnMock invocation, boolean succeeded) {}
}
private static QuorumJournalManager createInjectableQJM(MiniJournalCluster cluster)
throws IOException, URISyntaxException {
AsyncLogger.Factory spyFactory = new AsyncLogger.Factory() {
@Override
public AsyncLogger createLogger(Configuration conf, NamespaceInfo nsInfo,
String journalId, InetSocketAddress addr) {
return new InvocationCountingChannel(conf, nsInfo, journalId, addr);
}
};
return new QuorumJournalManager(conf, cluster.getQuorumJournalURI(JID),
FAKE_NSINFO, spyFactory);
}
private static QuorumJournalManager createRandomFaultyQJM(
MiniJournalCluster cluster, final Random seedGenerator)
throws IOException, URISyntaxException {
AsyncLogger.Factory spyFactory = new AsyncLogger.Factory() {
@Override
public AsyncLogger createLogger(Configuration conf, NamespaceInfo nsInfo,
String journalId, InetSocketAddress addr) {
return new RandomFaultyChannel(conf, nsInfo, journalId, addr,
seedGenerator.nextLong());
}
};
return new QuorumJournalManager(conf, cluster.getQuorumJournalURI(JID),
FAKE_NSINFO, spyFactory);
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.introduceParameter;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.lang.Language;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.javadoc.PsiDocTag;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.util.FieldConflictsResolver;
import com.intellij.refactoring.util.RefactoringUtil;
import com.intellij.refactoring.util.javadoc.MethodJavaDocHelper;
import com.intellij.refactoring.util.usageInfo.DefaultConstructorImplicitUsageInfo;
import com.intellij.usageView.UsageInfo;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.VisibilityUtil;
import com.intellij.util.containers.MultiMap;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntProcedure;
import org.jetbrains.annotations.Nullable;
/**
* @author Maxim.Medvedev
*/
public class JavaIntroduceParameterMethodUsagesProcessor implements IntroduceParameterMethodUsagesProcessor {
private static final Logger LOG =
Logger.getInstance("#com.intellij.refactoring.introduceParameter.JavaIntroduceParameterMethodUsagesProcessor");
private static final JavaLanguage myLanguage = Language.findInstance(JavaLanguage.class);
private static boolean isJavaUsage(UsageInfo usage) {
PsiElement e = usage.getElement();
return e != null && e.getLanguage().is(myLanguage);
}
public boolean isMethodUsage(UsageInfo usage) {
return RefactoringUtil.isMethodUsage(usage.getElement()) && isJavaUsage(usage);
}
public boolean processChangeMethodUsage(IntroduceParameterData data, UsageInfo usage, UsageInfo[] usages) throws IncorrectOperationException {
if (!isMethodUsage(usage)) return true;
final PsiElement ref = usage.getElement();
PsiCall callExpression = RefactoringUtil.getCallExpressionByMethodReference(ref);
PsiExpressionList argList = RefactoringUtil.getArgumentListByMethodReference(ref);
PsiExpression[] oldArgs = argList.getExpressions();
final PsiExpression anchor;
if (!data.getMethodToSearchFor().isVarArgs()) {
anchor = getLast(oldArgs);
}
else {
final PsiParameter[] parameters = data.getMethodToSearchFor().getParameterList().getParameters();
if (parameters.length > oldArgs.length) {
anchor = getLast(oldArgs);
}
else {
LOG.assertTrue(parameters.length > 0);
final int lastNonVararg = parameters.length - 2;
anchor = lastNonVararg >= 0 ? oldArgs[lastNonVararg] : null;
}
}
//if we insert parameter in method usage which is contained in method in which we insert this parameter too, we must insert parameter name instead of its initializer
PsiMethod method = PsiTreeUtil.getParentOfType(argList, PsiMethod.class);
if (method != null && isMethodInUsages(data, method, usages)) {
argList
.addAfter(JavaPsiFacade.getElementFactory(data.getProject()).createExpressionFromText(data.getParameterName(), argList), anchor);
}
else {
ChangeContextUtil.encodeContextInfo(data.getParameterInitializer(), true);
PsiExpression newArg = (PsiExpression)argList.addAfter(data.getParameterInitializer(), anchor);
ChangeContextUtil.decodeContextInfo(newArg, null, null);
ChangeContextUtil.clearContextInfo(data.getParameterInitializer());
// here comes some postprocessing...
new OldReferenceResolver(callExpression, newArg, data.getMethodToReplaceIn(), data.getReplaceFieldsWithGetters(),
data.getParameterInitializer()).resolve();
}
removeParametersFromCall(callExpression.getArgumentList(), data.getParametersToRemove());
return false;
}
private static boolean isMethodInUsages(IntroduceParameterData data, PsiMethod method, UsageInfo[] usages) {
PsiManager manager = PsiManager.getInstance(data.getProject());
for (UsageInfo info : usages) {
if (!(info instanceof DefaultConstructorImplicitUsageInfo) && manager.areElementsEquivalent(info.getElement(), method)) {
return true;
}
}
return false;
}
private static void removeParametersFromCall(final PsiExpressionList argList, TIntArrayList parametersToRemove) {
final PsiExpression[] exprs = argList.getExpressions();
parametersToRemove.forEachDescending(new TIntProcedure() {
public boolean execute(final int paramNum) {
try {
exprs[paramNum].delete();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
return true;
}
});
}
@Nullable
private static PsiExpression getLast(PsiExpression[] oldArgs) {
PsiExpression anchor;
if (oldArgs.length > 0) {
anchor = oldArgs[oldArgs.length - 1];
}
else {
anchor = null;
}
return anchor;
}
public void findConflicts(IntroduceParameterData data, UsageInfo[] usages, MultiMap<PsiElement, String> conflicts) {
}
public boolean processChangeMethodSignature(IntroduceParameterData data, UsageInfo usage, UsageInfo[] usages) throws IncorrectOperationException {
if (!(usage.getElement() instanceof PsiMethod) || !isJavaUsage(usage)) return true;
PsiMethod method = (PsiMethod)usage.getElement();
final FieldConflictsResolver fieldConflictsResolver = new FieldConflictsResolver(data.getParameterName(), method.getBody());
final MethodJavaDocHelper javaDocHelper = new MethodJavaDocHelper(method);
PsiElementFactory factory = JavaPsiFacade.getInstance(data.getProject()).getElementFactory();
final PsiParameter[] parameters = method.getParameterList().getParameters();
data.getParametersToRemove().forEachDescending(new TIntProcedure() {
public boolean execute(final int paramNum) {
try {
PsiParameter param = parameters[paramNum];
PsiDocTag tag = javaDocHelper.getTagForParameter(param);
if (tag != null) {
tag.delete();
}
param.delete();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
return true;
}
});
PsiParameter parameter = factory.createParameter(data.getParameterName(), data.getForcedType());
PsiUtil.setModifierProperty(parameter, PsiModifier.FINAL, data.isDeclareFinal());
final PsiParameter anchorParameter = getAnchorParameter(method);
final PsiParameterList parameterList = method.getParameterList();
parameter = (PsiParameter)parameterList.addAfter(parameter, anchorParameter);
JavaCodeStyleManager.getInstance(data.getProject()).shortenClassReferences(parameter);
final PsiDocTag tagForAnchorParameter = javaDocHelper.getTagForParameter(anchorParameter);
javaDocHelper.addParameterAfter(data.getParameterName(), tagForAnchorParameter);
fieldConflictsResolver.fix();
return false;
}
@Nullable
private static PsiParameter getAnchorParameter(PsiMethod methodToReplaceIn) {
PsiParameterList parameterList = methodToReplaceIn.getParameterList();
final PsiParameter anchorParameter;
final PsiParameter[] parameters = parameterList.getParameters();
final int length = parameters.length;
if (!methodToReplaceIn.isVarArgs()) {
anchorParameter = length > 0 ? parameters[length - 1] : null;
}
else {
LOG.assertTrue(length > 0);
LOG.assertTrue(parameters[length - 1].isVarArgs());
anchorParameter = length > 1 ? parameters[length - 2] : null;
}
return anchorParameter;
}
public boolean processAddDefaultConstructor(IntroduceParameterData data, UsageInfo usage, UsageInfo[] usages) {
if (!(usage.getElement() instanceof PsiClass) || !isJavaUsage(usage)) return true;
PsiClass aClass = (PsiClass)usage.getElement();
if (!(aClass instanceof PsiAnonymousClass)) {
final PsiElementFactory factory = JavaPsiFacade.getInstance(data.getProject()).getElementFactory();
PsiMethod constructor = factory.createMethodFromText(aClass.getName() + "(){}", aClass);
constructor = (PsiMethod)CodeStyleManager.getInstance(data.getProject()).reformat(constructor);
constructor = (PsiMethod)aClass.add(constructor);
PsiUtil.setModifierProperty(constructor, VisibilityUtil.getVisibilityModifier(aClass.getModifierList()), true);
processAddSuperCall(data, new UsageInfo(constructor), usages);
}
else {
return true;
}
return false;
}
public boolean processAddSuperCall(IntroduceParameterData data, UsageInfo usage, UsageInfo[] usages) throws IncorrectOperationException {
if (!(usage.getElement() instanceof PsiMethod) || !isJavaUsage(usage)) return true;
PsiMethod constructor = (PsiMethod)usage.getElement();
if (!constructor.isConstructor()) return true;
final PsiElementFactory factory = JavaPsiFacade.getInstance(data.getProject()).getElementFactory();
PsiExpressionStatement superCall = (PsiExpressionStatement)factory.createStatementFromText("super();", constructor);
superCall = (PsiExpressionStatement)CodeStyleManager.getInstance(data.getProject()).reformat(superCall);
PsiCodeBlock body = constructor.getBody();
final PsiStatement[] statements = body.getStatements();
if (statements.length > 0) {
superCall = (PsiExpressionStatement)body.addBefore(superCall, statements[0]);
}
else {
superCall = (PsiExpressionStatement)body.add(superCall);
}
processChangeMethodUsage(data, new ExternalUsageInfo(((PsiMethodCallExpression)superCall.getExpression()).getMethodExpression()), usages);
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.rtrep.v1;
import java.util.Collection;
import java.util.Date;
import javax.xml.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.common.FaultException;
import org.apache.ode.bpel.evt.ActivityFailureEvent;
import org.apache.ode.bpel.evt.ActivityRecoveryEvent;
import org.apache.ode.bpel.evt.VariableModificationEvent;
import org.apache.ode.bpel.rtrep.v1.OFailureHandling;
import org.apache.ode.bpel.rtrep.v1.OInvoke;
import org.apache.ode.bpel.rtrep.v1.OScope;
import org.apache.ode.bpel.rtrep.v1.channels.ActivityRecoveryChannel;
import org.apache.ode.bpel.rtrep.v1.channels.ActivityRecoveryChannelListener;
import org.apache.ode.bpel.rtrep.v1.channels.FaultData;
import org.apache.ode.bpel.rtrep.v1.channels.InvokeResponseChannel;
import org.apache.ode.bpel.rtrep.v1.channels.InvokeResponseChannelListener;
import org.apache.ode.bpel.rtrep.v1.channels.TerminationChannelListener;
import org.apache.ode.utils.DOMUtils;
import org.apache.ode.bpel.evar.ExternalVariableModuleException;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* JacobRunnable that performs the work of the <code>invoke</code> activity.
*/
public class INVOKE extends ACTIVITY {
private static final long serialVersionUID = 992248281026821783L;
private static final Log __log = LogFactory.getLog(INVOKE.class);
private OInvoke _oinvoke;
// Records number of invocations on the activity.
private int _invoked;
// Date/time of last failure.
private Date _lastFailure;
// Reason for last failure.
private String _failureReason;
// Data associated with failure.
private Element _failureData;
public INVOKE(ActivityInfo self, ScopeFrame scopeFrame, LinkFrame linkFrame) {
super(self, scopeFrame, linkFrame);
_oinvoke = (OInvoke) _self.o;
_invoked = 0;
}
public final void run() {
Element outboundMsg;
try {
outboundMsg = setupOutbound(_oinvoke, _oinvoke.initCorrelationsInput);
} catch (FaultException e) {
__log.error(e);
FaultData fault = createFault(e.getQName(), _oinvoke);
_self.parent.completed(fault, CompensationHandler.emptySet());
return;
} catch (ExternalVariableModuleException e) {
__log.error(e);
_self.parent.failure(e.toString(), null);
return;
}
++_invoked;
// if there is no output variable, then this is a one-way invoke
boolean isTwoWay = _oinvoke.outputVar != null;
try {
if (!isTwoWay) {
FaultData faultData = null;
getBpelRuntime().invoke(null, _scopeFrame.resolve(_oinvoke.partnerLink), _oinvoke.operation, outboundMsg, null);
_self.parent.completed(faultData, CompensationHandler.emptySet());
} else /* two-way */{
final VariableInstance outputVar = _scopeFrame.resolve(_oinvoke.outputVar);
final InvokeResponseChannel invokeResponseChannel = newChannel(InvokeResponseChannel.class);
final String mexId = getBpelRuntime().invoke(invokeResponseChannel.export(),
_scopeFrame.resolve(_oinvoke.partnerLink), _oinvoke.operation, outboundMsg, invokeResponseChannel);
object(false, new InvokeResponseChannelListener(invokeResponseChannel) {
private static final long serialVerstmptmpionUID = 4496880438819196765L;
public void onResponse() {
// we don't have to write variable data -> this already
// happened in the nativeAPI impl
FaultData fault = null;
Element response;
try {
response = getBpelRuntime().getPartnerResponse(mexId);
} catch (Exception e) {
__log.error("Exception while processing invoke response", e);
throw new RuntimeException(e);
}
try {
initializeVariable(outputVar, response);
} catch (ExternalVariableModuleException e) {
__log.error("Exception while initializing external variable", e);
_self.parent.failure(e.toString(), null);
return;
}
// Generating event
VariableModificationEvent se = new VariableModificationEvent(outputVar.declaration.name);
se.setNewValue(response);
if (_oinvoke.debugInfo != null)
se.setLineNo(_oinvoke.debugInfo.startLine);
sendEvent(se);
try {
for (OScope.CorrelationSet anInitCorrelationsOutput : _oinvoke.initCorrelationsOutput) {
initializeCorrelation(_scopeFrame.resolve(anInitCorrelationsOutput), outputVar);
}
if (_oinvoke.partnerLink.hasPartnerRole()) {
// Trying to initialize partner epr based on a message-provided epr/session.
if (!getBpelRuntime().isPartnerRoleEndpointInitialized(_scopeFrame
.resolve(_oinvoke.partnerLink)) || !_oinvoke.partnerLink.initializePartnerRole) {
Node fromEpr = getBpelRuntime().getSourceEPR(mexId);
if (fromEpr != null) {
getBpelRuntime().writeEndpointReference(
_scopeFrame.resolve(_oinvoke.partnerLink), (Element) fromEpr);
}
}
String partnersSessionId = getBpelRuntime().getSourceSessionId(mexId);
if (partnersSessionId != null)
getBpelRuntime().initializePartnersSessionId(_scopeFrame.resolve(_oinvoke.partnerLink),
partnersSessionId);
}
} catch (FaultException e) {
fault = createFault(e.getQName(), _oinvoke);
}
// TODO update output variable with data from non-initiate correlation sets
_self.parent.completed(fault, CompensationHandler.emptySet());
getBpelRuntime().releasePartnerMex(mexId, fault == null);
}
public void onFault() {
QName faultName = getBpelRuntime().getPartnerFault(mexId);
Element msg = getBpelRuntime().getPartnerResponse(mexId);
QName msgType = getBpelRuntime().getPartnerResponseType(mexId);
FaultData fault = createFault(faultName, msg,
_oinvoke.getOwner().messageTypes.get(msgType), _self.o);
_self.parent.completed(fault, CompensationHandler.emptySet());
getBpelRuntime().releasePartnerMex(mexId, false);
}
public void onFailure() {
// This indicates a communication failure. We don't throw a fault,
// because there is no fault, instead we'll re-incarnate the invoke
// and either retry or indicate failure condition.
// admin to resume the process.
String reason = getBpelRuntime().getPartnerFaultExplanation(mexId);
__log.error("Failure during invoke: " + reason);
try {
Element el = DOMUtils.stringToDOM("<invokeFailure><![CDATA["+reason+"]]></invokeFailure>");
_self.parent.failure(reason, el);
} catch (Exception e) {
_self.parent.failure(reason, null);
}
getBpelRuntime().releasePartnerMex(mexId, false);
}
}.or(new TerminationChannelListener(_self.self) {
private static final long serialVersionUID = 4219496341785922396L;
public void terminate() {
_self.parent.completed(null, CompensationHandler.emptySet());
object(new InvokeResponseChannelListener(invokeResponseChannel) {
private static final long serialVersionUID = 688746737897792929L;
public void onFailure() {
__log.debug("Failure on invoke ignored, the invoke has already been terminated: " + _oinvoke.toString());
}
public void onFault() {
__log.debug("Fault on invoke ignored, the invoke has already been terminated: " + _oinvoke.toString());
}
public void onResponse() {
__log.debug("Response on invoke ignored, the invoke has already been terminated: " + _oinvoke.toString());
}
});
}
}));
}
} catch (FaultException fault) {
__log.error(fault);
FaultData faultData = createFault(fault.getQName(), _oinvoke, fault.getMessage());
_self.parent.completed(faultData, CompensationHandler.emptySet());
}
}
private Element setupOutbound(OInvoke oinvoke, Collection<OScope.CorrelationSet> outboundInitiations)
throws FaultException, ExternalVariableModuleException {
if (outboundInitiations.size() > 0) {
for (OScope.CorrelationSet c : outboundInitiations) {
initializeCorrelation(_scopeFrame.resolve(c), _scopeFrame.resolve(oinvoke.inputVar));
}
}
if (oinvoke.operation.getInput().getMessage().getParts().size() > 0) {
sendVariableReadEvent(_scopeFrame.resolve(oinvoke.inputVar));
Node outboundMsg = fetchVariableData(_scopeFrame.resolve(oinvoke.inputVar), false);
// TODO outbound message should be updated with non-initiate correlation sets
assert outboundMsg instanceof Element;
return (Element) outboundMsg;
} else return null;
}
private void requireRecovery() {
if (__log.isDebugEnabled())
__log.debug("ActivityRecovery: Invoke activity " + _self.aId + " requires recovery");
sendEvent(new ActivityFailureEvent(_failureReason));
final ActivityRecoveryChannel recoveryChannel = newChannel(ActivityRecoveryChannel.class);
getBpelRuntime().registerActivityForRecovery(recoveryChannel, _self.aId, _failureReason, _lastFailure, _failureData,
new String[] { "retry", "cancel", "fault" }, _invoked - 1);
object(false, new ActivityRecoveryChannelListener(recoveryChannel) {
private static final long serialVersionUID = 8397883882810521685L;
public void retry() {
if (__log.isDebugEnabled())
__log.debug("ActivityRecovery: Retrying invoke activity " + _self.aId + " (user initiated)");
sendEvent(new ActivityRecoveryEvent("retry"));
getBpelRuntime().unregisterActivityForRecovery(recoveryChannel);
instance(INVOKE.this);
}
public void cancel() {
if (__log.isDebugEnabled())
__log.debug("ActivityRecovery: Cancelling invoke activity " + _self.aId + " (user initiated)");
sendEvent(new ActivityRecoveryEvent("cancel"));
getBpelRuntime().unregisterActivityForRecovery(recoveryChannel);
_self.parent.cancelled();
}
public void fault(FaultData faultData) {
if (__log.isDebugEnabled())
__log.debug("ActivityRecovery: Faulting invoke activity " + _self.aId + " (user initiated)");
sendEvent(new ActivityRecoveryEvent("fault"));
getBpelRuntime().unregisterActivityForRecovery(recoveryChannel);
if (faultData == null)
faultData = createFault(OFailureHandling.FAILURE_FAULT_NAME, _self.o, _failureReason);
_self.parent.completed(faultData, CompensationHandler.emptySet());
}
}.or(new TerminationChannelListener(_self.self) {
private static final long serialVersionUID = 2148587381204858397L;
public void terminate() {
if (__log.isDebugEnabled())
__log.debug("ActivityRecovery: Cancelling invoke activity " + _self.aId + " (terminated by scope)");
getBpelRuntime().unregisterActivityForRecovery(recoveryChannel);
_self.parent.completed(null, CompensationHandler.emptySet());
}
}));
}
}
| |
/*******************************************************************************
* Copyright (c) : See the COPYRIGHT file in top-level/project directory
*******************************************************************************/
package edu.crest.dlt.transfer;
import java.net.Socket;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import edu.crest.dlt.exnode.Mapping;
import edu.crest.dlt.ibp.Depot;
public class Scoreboard
{
private static Logger log = Logger.getLogger(Scoreboard.class.getName());
private Progress transfer_progress = null;
private Map<String, TransferStatistics> host_statistics = null;
public Scoreboard()
{
transfer_progress = new Progress(0);
host_statistics = new HashMap<String, TransferStatistics>();
}
public synchronized double percent_completed()
{
return transfer_progress.percent_transferred();
}
public synchronized void progress_listener(ProgressListener progress_listener)
{
transfer_progress.progress_listener(progress_listener);
}
public synchronized void progress_start(long bytes_expected)
{
transfer_progress.progress_reset(bytes_expected);
}
private synchronized void progress_update(Depot depot_tried, long offset_transferred,
long bytes_transferred, String transfer_status)
{
transfer_progress.try_update(depot_tried, offset_transferred, bytes_transferred,
transfer_status);
}
public synchronized void progress_error()
{
transfer_progress.notify_error();
transfer_progress.try_end(0);
}
public synchronized Socket try_start(Depot depot_to_try)
{
transfer_progress.try_start();
if (depot_to_try != null) {
/* request the depot for a new free socket to read on */
Socket socket = depot_to_try.connect();
/* if free socket is found, start the try */
if (socket != null) {
if (!host_statistics.containsKey(depot_to_try.host)) {
host_statistics.put(depot_to_try.host, new TransferStatistics());
}
host_statistics.get(depot_to_try.host).try_start();
}
return socket;
}
return null;
}
public synchronized void try_end(Depot depot_tried, Socket socket_tried, long offset_transferred,
long bytes_transferred, String transfer_status)
{
transfer_progress.try_end(bytes_transferred);
try {
/* release the socket for others */
depot_tried.release(socket_tried, bytes_transferred);
progress_update(depot_tried, offset_transferred, bytes_transferred, transfer_status);
if (socket_tried != null && host_statistics.containsKey(depot_tried.host)) {
/* end try on target host */
host_statistics.get(depot_tried.host).try_end(bytes_transferred);
}
} catch (Exception e) {
log.warning("Error ending try for depot=" + depot_tried + " socket=" + socket_tried
+ " bytes-transferred=" + bytes_transferred + ". " + e);
}
}
/*
* Max Heap Operations
*/
private int parent(int pos)
{
return pos / 2;
}
public synchronized void add(List<Mapping> mappings_max_heap, Mapping mapping)
{
if (!mappings_max_heap.contains(mapping)) {
/* add new mapping to end of the maxHeap */
mappings_max_heap.add(mapping);
int current = mappings_max_heap.size() - 1;
/* heapify starting from the newly inserted mapping */
while (parent(current) < current
&& mappings_max_heap.get(current).allocation.depot.is(Depot.BETTER,
mappings_max_heap.get(parent(current)).allocation.depot)) {
Collections.swap(mappings_max_heap, current, parent(current));
current = parent(current);
}
}
// mappings_max_heap_display(mappings_max_heap);
}
public synchronized Mapping mapping_best(List<Mapping> mappings_max_heap)
{
if (mappings_max_heap.size() > 0) {
/* remove head of MaxHeap */
Mapping mapping_head = mappings_max_heap.remove(0);
Mapping mapping_best = mapping_head;
/* reinsert (so that the new depot gets re-heapified) */
add(mappings_max_heap, mapping_head);
return mapping_best;
}
return null;
}
public synchronized void mappings_max_heap_display(List<Mapping> mappings_max_heap)
{
System.out
.println("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Depot Max Heap ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
for (int i = 0; i < mappings_max_heap.size(); i++) {
System.out.printf("[%s]", mappings_max_heap.get(i));
/* printing [Parent] [Left-Child] [Right-Child] triplets */
if ((i + 1) % 3 == 0) {
System.out.printf("\n");
}
}
System.out
.println("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
}
public synchronized void add(List<Depot> depots_max_heap, Depot depot)
{
if (!depots_max_heap.contains(depot)) {
/* add new depot to end of the maxHeap */
depots_max_heap.add(depot);
int current = depots_max_heap.size() - 1;
/* heapify starting from the newly inserted depot */
while (parent(current) < current
&& depots_max_heap.get(current).is(Depot.BETTER, depots_max_heap.get(parent(current)))) {
Collections.swap(depots_max_heap, current, parent(current));
current = parent(current);
}
}
// mappings_max_heap_display(mappings_max_heap);
}
public synchronized Depot depot_best(List<Depot> depots_max_heap)
{
if (depots_max_heap.size() > 0) {
/* remove head of MaxHeap */
Depot depot_head = depots_max_heap.remove(0);
Depot depot_best = depot_head;
/* reinsert (so that the new depot gets re-heapified) */
add(depots_max_heap, depot_head);
return depot_best;
}
return null;
}
public synchronized List<Depot> depots_best(List<Depot> depots_max_heap, int count_wanted_depots)
{
if (depots_max_heap.size() >= count_wanted_depots) {
List<Depot> depots_best = new ArrayList<Depot>(count_wanted_depots);
/* remove count-max from MaxHeap */
for (int i = 0; i < count_wanted_depots; i++) {
depots_best.add(depots_max_heap.remove(0));
}
for (int i = 0; i < count_wanted_depots; i++) {
/* reinsert (so that the new depot gets re-heapified) */
add(depots_max_heap, depots_best.get(i));
}
return depots_best;
}
return null;
}
public synchronized void depots_max_heap_display(List<Depot> depots_max_heap)
{
System.out
.println("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Depot Max Heap ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
for (int i = 0; i < depots_max_heap.size(); i++) {
System.out.printf("[%s]", depots_max_heap.get(i));
/* printing [Parent] [Left-Child] [Right-Child] triplets */
if ((i + 1) % 3 == 0) {
System.out.printf("\n");
}
}
System.out
.println("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
}
public String toString()
{
StringBuffer status = new StringBuffer(transfer_progress.status()).append(" ");
for (Map.Entry<String, TransferStatistics> host_stats : host_statistics.entrySet()) {
status.append("(").append(host_stats.getKey()).append(" ")
.append(host_stats.getValue().bytes_transferred).append("B ")
.append(host_stats.getValue().elapsed_seconds()).append("s ")
.append(host_stats.getValue().megabytes_per_second()).append("MB/s").append(")");
}
return status.toString();
}
}
| |
/*
* Configurate
* Copyright (C) zml and Configurate contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja.leaping.configurate;
import com.google.common.collect.ImmutableList;
import com.google.common.reflect.TypeToken;
import ninja.leaping.configurate.loader.ConfigurationLoader;
import ninja.leaping.configurate.objectmapping.ObjectMappingException;
import ninja.leaping.configurate.objectmapping.serialize.TypeSerializer;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* A node in the configuration tree.
*
* <p>All aspects of a configurations structure are represented using instances of
* {@link ConfigurationNode}, and the links between them.</p>
*
* <p>{@link ConfigurationNode}s can hold different types of {@link ValueType values}. They can:</p>
* <p>
* <ul>
* <li>Hold a single "scalar" value ({@link ValueType#SCALAR})</li>
* <li>Represent a "list" of child {@link ConfigurationNode}s ({@link ValueType#LIST})</li>
* <li>Represent a "map" of child {@link ConfigurationNode}s ({@link ValueType#MAP})</li>
* <li>Hold no value at all ({@link ValueType#NULL})</li>
* </ul>
*
* <p>The overall configuration stems from a single "root" node, which is provided by the
* {@link ConfigurationLoader}, or by other means programmatically.</p>
*
* <p>This is effectively the main class of configurate.</p>
*/
public interface ConfigurationNode {
int NUMBER_DEF = 0;
/**
* Gets the "key" of this node.
*
* <p>The key determines this {@link ConfigurationNode}s position within the overall
* configuration structure.</p>
*
* <p>If this node is currently {@link #isVirtual() virtual}, this method's result may be
* inaccurate.</p>
*
* <p>Note that this method only returns the nearest "link" in the hierarchy, and does not
* return a representation of the full path. See {@link #getPath()} for that.</p>
*
* <p>The {@link ConfigurationNode}s returned as values from {@link #getChildrenMap()} will
* have keys derived from their pairing in the map node.</p>
*
* <p>The {@link ConfigurationNode}s returned from {@link #getChildrenList()} will have keys
* derived from their position (index) in the list node.</p>
*
* @return The key of this node
*/
@Nullable
Object getKey();
/**
* Gets the full path of {@link #getKey() keys} from the root node to this node.
*
* <p>Node implementations may not keep a full path for each node, so this method may be
* somewhat complex to calculate.</p>
*
* @return An array compiled from the keys for each node up the hierarchy
*/
@NonNull
Object[] getPath();
/**
* Gets the parent of this node.
*
* <p>If this node is currently {@link #isVirtual() virtual}, this method's result may be
* inaccurate.</p>
*
* @return The nodes parent
*/
@Nullable
ConfigurationNode getParent();
/**
* Gets the node at the given (relative) path, possibly traversing multiple levels of nodes.
*
* <p>This is the main method used to navigate through the configuration.</p>
*
* <p>The path parameter effectively consumes an array of keys, which locate the unique position
* of a given node within the structure.</p>
*
* <p>A node is <b>always</b> returned by this method. If the given node does not exist in the
* structure, a {@link #isVirtual() virtual} node will be returned which represents the
* position.</p>
*
* @param path The path to fetch the node at
* @return The node at the given path, possibly virtual
*/
@NonNull
ConfigurationNode getNode(@NonNull Object... path);
/**
* Gets if this node is virtual.
*
* <p>Virtual nodes are nodes which are not attached to a wider configuration structure.</p>
*
* <p>A node is primarily "virtual" when it has no set value.</p>
*
* @return true if this node is virtual
*/
boolean isVirtual();
/**
* Gets the options that currently apply to this node
*
* @return The ConfigurationOptions instance that governs the functionality of this node
*/
@NonNull
ConfigurationOptions getOptions();
/**
* Gets the value type of this node.
*
* @return The value type
*/
@NonNull
ValueType getValueType();
/**
* Gets if this node has "list children".
*
* @return if this node has children in the form of a list
*/
default boolean hasListChildren() {
return getValueType() == ValueType.LIST;
}
/**
* Gets if this node has "map children".
*
* @return if this node has children in the form of a map
*/
default boolean hasMapChildren() {
return getValueType() == ValueType.MAP;
}
/**
* Gets the "list children" attached to this node, if it has any.
*
* <p>If this node does not {@link #hasListChildren() have list children}, an empty list is
* returned.</p>
*
* @return The list children currently attached to this node
*/
@NonNull
List<? extends ConfigurationNode> getChildrenList();
/**
* Gets the "map children" attached to this node, if it has any.
*
* <p>If this node does not {@link #hasMapChildren() have map children}, an empty map
* returned.</p>
*
* @return The map children currently attached to this node
*/
@NonNull
Map<Object, ? extends ConfigurationNode> getChildrenMap();
/**
* Get the current value associated with this node.
*
* <p>If this node has children, this method will recursively unwrap them to construct a List
* or a Map.</p>
*
* @see #getValue(Object)
* @return This configuration's current value, or null if there is none
*/
@Nullable
default Object getValue() {
return getValue((Object) null);
}
/**
* Get the current value associated with this node.
*
* <p>If this node has children, this method will recursively unwrap them to construct a List
* or a Map.</p>
*
* @param def The default value to return if this node has no set value
* @return This configuration's current value, or {@code def} if there is none
*/
Object getValue(@Nullable Object def);
/**
* Get the current value associated with this node.
*
* <p>If this node has children, this method will recursively unwrap them to construct a List
* or a Map.</p>
*
* @param defSupplier The function that will be called to calculate a default value only if
* there is no existing value
* @return This configuration's current value, or {@code def} if there is none
*/
Object getValue(@NonNull Supplier<Object> defSupplier);
/**
* Gets the appropriately transformed typed version of this node's value from the provided
* transformation function.
*
* @param transformer The transformation function
* @param <T> The expected type
* @return A transformed value of the correct type, or null either if no value is present or the
* value could not be converted
*/
@Nullable
default <T> T getValue(@NonNull Function<Object, T> transformer) {
return getValue(transformer, (T) null);
}
/**
* Gets the appropriately transformed typed version of this node's value from the provided
* transformation function.
*
* @param transformer The transformation function
* @param def The default value to return if this node has no set value or is not of a
* convertible type
* @param <T> The expected type
* @return A transformed value of the correct type, or {@code def} either if no value is present
* or the value could not be converted
*/
<T> T getValue(@NonNull Function<Object, T> transformer, @Nullable T def);
/**
* Gets the appropriately transformed typed version of this node's value from the provided
* transformation function.
*
* @param transformer The transformation function
* @param defSupplier The function that will be called to calculate a default value only if
* there is no existing value of the correct type
* @param <T> The expected type
* @return A transformed value of the correct type, or {@code def} either if no value is present
* or the value could not be converted
*/
<T> T getValue(@NonNull Function<Object, T> transformer, @NonNull Supplier<T> defSupplier);
/**
* If this node has list values, this function unwraps them and converts them to an appropriate
* type based on the provided function.
*
* <p>If this node has a scalar value, this function treats it as a list with one value</p>
*
* @param transformer The transformation function
* @param <T> The expected type
* @return An immutable copy of the values contained
*/
@NonNull
<T> List<T> getList(@NonNull Function<Object, T> transformer);
/**
* If this node has list values, this function unwraps them and converts them to an appropriate
* type based on the provided function.
*
* <p>If this node has a scalar value, this function treats it as a list with one value.</p>
*
* @param transformer The transformation function
* @param def The default value if no appropriate value is set
* @param <T> The expected type
* @return An immutable copy of the values contained that could be successfully converted, or {@code def} if no
* values could be converted
*/
<T> List<T> getList(@NonNull Function<Object, T> transformer, @Nullable List<T> def);
/**
* If this node has list values, this function unwraps them and converts them to an appropriate
* type based on the provided function.
*
* <p>If this node has a scalar value, this function treats it as a list with one value.</p>
*
* @param transformer The transformation function
* @param defSupplier The function that will be called to calculate a default value only if there is no existing
* value of the correct type
* @param <T> The expected type
* @return An immutable copy of the values contained that could be successfully converted, or {@code def} if no
* values could be converted
*/
<T> List<T> getList(@NonNull Function<Object, T> transformer, @NonNull Supplier<List<T>> defSupplier);
/**
* If this node has list values, this function unwraps them and converts them to an appropriate
* type based on the provided function.
*
* <p>If this node has a scalar value, this function treats it as a list with one value.</p>
*
* @param type The expected type
* @param <T> The expected type
* @return An immutable copy of the values contained
*/
@NonNull
default <T> List<T> getList(@NonNull TypeToken<T> type) throws ObjectMappingException {
return getList(type, ImmutableList.of());
}
/**
* If this node has list values, this function unwraps them and converts them to an appropriate
* type based on the provided function.
*
* <p>If this node has a scalar value, this function treats it as a list with one value.</p>
*
* @param type The expected type
* @param def The default value if no appropriate value is set
* @param <T> The expected type
* @return An immutable copy of the values contained that could be successfully converted, or {@code def} if no
* values could be converted
*/
<T> List<T> getList(@NonNull TypeToken<T> type, @Nullable List<T> def) throws ObjectMappingException;
/**
* If this node has list values, this function unwraps them and converts them to an appropriate
* type based on the provided function.
*
* <p>If this node has a scalar value, this function treats it as a list with one value.</p>
*
* @param type The expected type
* @param defSupplier The function that will be called to calculate a default value only if there is no existing
* value of the correct type
* @param <T> The expected type
* @return An immutable copy of the values contained that could be successfully converted, or {@code def} if no
* values could be converted
*/
<T> List<T> getList(@NonNull TypeToken<T> type, @NonNull Supplier<List<T>> defSupplier) throws ObjectMappingException;
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @see #getValue()
* @return The appropriate type conversion, null if no appropriate value is available
*/
@Nullable
default String getString() {
return getString(null);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @param def The default value if no appropriate value is set
* @see #getValue()
* @return The appropriate type conversion, {@code def} if no appropriate value is available
*/
default String getString(@Nullable String def) {
return getValue(Types::asString, def);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @see #getValue()
* @return The appropriate type conversion, 0 if no appropriate value is available
*/
default float getFloat() {
return getFloat(NUMBER_DEF);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @param def The default value if no appropriate value is set
* @see #getValue()
* @return The appropriate type conversion, {@code def} if no appropriate value is available
*/
default float getFloat(float def) {
return getValue(Types::asFloat, def);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @see #getValue()
* @return The appropriate type conversion, 0 if no appropriate value is available
*/
default double getDouble() {
return getDouble(NUMBER_DEF);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @param def The default value if no appropriate value is set
* @see #getValue()
* @return The appropriate type conversion, {@code def} if no appropriate value is available
*/
default double getDouble(double def) {
return getValue(Types::asDouble, def);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @see #getValue()
* @return The appropriate type conversion, 0 if no appropriate value is available
*/
default int getInt() {
return getInt(NUMBER_DEF);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @param def The default value if no appropriate value is set
* @see #getValue()
* @return The appropriate type conversion, {@code def} if no appropriate value is available
*/
default int getInt(int def) {
return getValue(Types::asInt, def);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @see #getValue()
* @return The appropriate type conversion, 0 if no appropriate value is available
*/
default long getLong() {
return getLong(NUMBER_DEF);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @param def The default value if no appropriate value is set
* @see #getValue()
* @return The appropriate type conversion, {@code def} if no appropriate value is available
*/
default long getLong(long def) {
return getValue(Types::asLong, def);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @see #getValue()
* @return The appropriate type conversion, false if no appropriate value is available
*/
default boolean getBoolean() {
return getBoolean(false);
}
/**
* Gets the value typed using the appropriate type conversion from {@link Types}
*
* @param def The default value if no appropriate value is set
* @see #getValue()
* @return The appropriate type conversion, {@code def} if no appropriate value is available
*/
default boolean getBoolean(boolean def) {
return getValue(Types::asBoolean, def);
}
/**
* Get the current value associated with this node.
*
* <p>If this node has children, this method will recursively unwrap them to construct a
* List or a Map.</p>
*
* <p>This method will also perform deserialization using the appropriate TypeSerializer for
* the given type, or casting if no type serializer is found.</p>
*
* @param type The type to deserialize to
* @param <T> the type to get
* @return the value if present and of the proper type, else null
*/
@Nullable
default <T> T getValue(@NonNull TypeToken<T> type) throws ObjectMappingException {
return getValue(type, (T) null);
}
/**
* Get the current value associated with this node.
*
* <p>If this node has children, this method will recursively unwrap them to construct a
* List or a Map.</p>
*
* <p>This method will also perform deserialization using the appropriate TypeSerializer for
* the given type, or casting if no type serializer is found.</p>
*
* @param type The type to deserialize to
* @param def The value to return if no value or value is not of appropriate type
* @param <T> the type to get
* @return the value if of the proper type, else {@code def}
*/
<T> T getValue(@NonNull TypeToken<T> type, T def) throws ObjectMappingException;
/**
* Get the current value associated with this node.
*
* <p>If this node has children, this method will recursively unwrap them to construct a
* List or a Map.</p>
*
* <p>This method will also perform deserialization using the appropriate TypeSerializer for
* the given type, or casting if no type serializer is found.</p>
*
* @param type The type to deserialize to
* @param defSupplier The function that will be called to calculate a default value only if there is no existing
* value of the correct type
* @param <T> the type to get
* @return the value if of the proper type, else {@code def}
*/
<T> T getValue(@NonNull TypeToken<T> type, @NonNull Supplier<T> defSupplier) throws ObjectMappingException;
/**
* Set this node's value to the given value.
*
* <p>If the provided value is a {@link Collection} or a {@link Map}, it will be unwrapped into
* the appropriate configuration node structure.</p>
*
* @param value The value to set
* @return this
*/
@NonNull
ConfigurationNode setValue(@Nullable Object value);
/**
* Set this node's value to the given value.
*
* <p>If the provided value is a {@link Collection} or a {@link Map}, it will be unwrapped into
* the appropriate configuration node structure.</p>
*
* <p>This method will also perform serialization using the appropriate TypeSerializer for the
* given type, or casting if no type serializer is found.</p>
*
* @param type The type to use for serialization type information
* @param value The value to set
* @param <T> The type to serialize to
* @return this
*/
@NonNull
default <T> ConfigurationNode setValue(@NonNull TypeToken<T> type, @Nullable T value) throws ObjectMappingException {
if (value == null) {
setValue(null);
return this;
}
TypeSerializer<T> serial = getOptions().getSerializers().get(type);
if (serial != null) {
serial.serialize(type, value, this);
} else if (getOptions().acceptsType(value.getClass())) {
setValue(value); // Just write if no applicable serializer exists?
} else {
throw new ObjectMappingException("No serializer available for type " + type);
}
return this;
}
/**
* Set all the values from the given node that are not present in this node
* to their values in the provided node.
*
* <p>Map keys will be merged. Lists and scalar values will be replaced.</p>
*
* @param other The node to merge values from
* @return this
*/
@NonNull
ConfigurationNode mergeValuesFrom(@NonNull ConfigurationNode other);
/**
* Removes a direct child of this node
*
* @param key The key of the node to remove
* @return If a node was removed
*/
boolean removeChild(@NonNull Object key);
/**
* Gets a new child node created as the next entry in the list.
*
* @return A new child created as the next entry in the list when it is attached
*/
@NonNull
ConfigurationNode getAppendedNode();
/**
* Creates a deep copy of this node.
*
* <p>If this node has child nodes (is a list or map), the child nodes will
* also be copied. This action is performed recursively.</p>
*
* <p>The resultant node will (initially) contain the same value(s) as this node,
* and will therefore be {@link Object#equals(Object) equal}, however, changes made to
* the original will not be reflected in the copy, and vice versa.</p>
*
* <p>The actual scalar values that back the configuration will
* <strong>not</strong> be copied - only the node structure that forms the
* configuration. This is not a problem in most cases, as the scalar values
* stored in configurations are usually immutable. (e.g. strings, numbers, booleans).</p>
*
* @return A copy of this node
*/
@NonNull
ConfigurationNode copy();
}
| |
package us.codecraft.netty_servlet.connector.netty;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.handler.codec.http.*;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.http.Cookie;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.InetSocketAddress;
import java.security.Principal;
import java.util.*;
/**
* @author code4crafter@gmail.com
*/
public class NettyHttpServletRequestAdaptor implements HttpServletRequest {
private HttpRequest httpRequest;
private Channel channel;
private Cookie[] cookieCache;
private QueryStringDecoder queryStringDecoder;
private Map<String, String[]> parameterMap;
private String characterEncoding;
private Map<String,Object> attributes;
public NettyHttpServletRequestAdaptor(HttpRequest httpRequest, Channel channel) {
this.httpRequest = httpRequest;
this.channel = channel;
this.attributes = new HashMap<String, Object>();
}
public QueryStringDecoder getQueryStringDecoder() {
if (queryStringDecoder == null) {
queryStringDecoder = new QueryStringDecoder(httpRequest.getUri());
}
return queryStringDecoder;
}
@Override
public String getAuthType() {
throw new UnsupportedOperationException();
}
@Override
public Cookie[] getCookies() {
if (cookieCache == null) {
Set<org.jboss.netty.handler.codec.http.Cookie> cookies = new CookieDecoder().decode(HttpHeaders.Names.COOKIE);
cookieCache = new Cookie[cookies.size()];
NettyToServletCookieConvertor.convert(cookies).toArray(cookieCache);
}
return cookieCache;
}
@Override
public long getDateHeader(String name) {
throw new UnsupportedOperationException();
}
@Override
public String getHeader(String name) {
return httpRequest.headers().get(name);
}
@Override
public Enumeration<String> getHeaders(String name) {
return new EnumerationIterableAdaptor(httpRequest.headers().getAll(name));
}
@Override
public Enumeration<String> getHeaderNames() {
return new EnumerationIterableAdaptor(httpRequest.headers().names());
}
@Override
public int getIntHeader(String name) {
return Integer.parseInt(httpRequest.headers().get(name));
}
@Override
public String getMethod() {
return httpRequest.getMethod().getName();
}
@Override
public String getPathInfo() {
throw new UnsupportedOperationException();
}
@Override
public String getPathTranslated() {
throw new UnsupportedOperationException();
}
@Override
public String getContextPath() {
throw new UnsupportedOperationException();
}
@Override
public String getQueryString() {
throw new UnsupportedOperationException();
}
@Override
public String getRemoteUser() {
throw new UnsupportedOperationException();
}
@Override
public boolean isUserInRole(String role) {
return false;
}
@Override
public Principal getUserPrincipal() {
throw new UnsupportedOperationException();
}
@Override
public String getRequestedSessionId() {
throw new UnsupportedOperationException();
}
@Override
public String getRequestURI() {
return httpRequest.getUri();
}
@Override
public StringBuffer getRequestURL() {
throw new UnsupportedOperationException();
}
@Override
public String getServletPath() {
throw new UnsupportedOperationException();
}
@Override
public HttpSession getSession(boolean create) {
throw new UnsupportedOperationException();
}
@Override
public HttpSession getSession() {
throw new UnsupportedOperationException();
}
@Override
public boolean isRequestedSessionIdValid() {
return false;
}
@Override
public boolean isRequestedSessionIdFromCookie() {
return false;
}
@Override
public boolean isRequestedSessionIdFromURL() {
return false;
}
@Override
public boolean isRequestedSessionIdFromUrl() {
return false;
}
@Override
public boolean authenticate(HttpServletResponse response) throws IOException, ServletException {
return false;
}
@Override
public void login(String username, String password) throws ServletException {
throw new UnsupportedOperationException();
}
@Override
public void logout() throws ServletException {
throw new UnsupportedOperationException();
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
throw new UnsupportedOperationException();
}
@Override
public Part getPart(String name) throws IOException, ServletException {
throw new UnsupportedOperationException();
}
@Override
public Object getAttribute(String name) {
return attributes.get(name);
}
@Override
public Enumeration<String> getAttributeNames() {
return new EnumerationIterableAdaptor<String>(attributes.keySet());
}
@Override
public String getCharacterEncoding() {
return characterEncoding;
}
@Override
public void setCharacterEncoding(String env) throws UnsupportedEncodingException {
this.characterEncoding = env;
}
@Override
public int getContentLength() {
return getIntHeader(HttpHeaders.Names.CONTENT_LENGTH);
}
@Override
public String getContentType() {
return getHeader(HttpHeaders.Names.CONTENT_TYPE);
}
@Override
public ServletInputStream getInputStream() throws IOException {
return new ChannelBufferServletInputStream(httpRequest.getContent());
}
@Override
public String getParameter(String name) {
if (getParameterMap().get(name) != null && getParameterMap().get(name).length > 0) {
return getParameterMap().get(name)[0];
}
return null;
}
@Override
public Enumeration<String> getParameterNames() {
return new EnumerationIterableAdaptor<String>(getParameterMap().keySet());
}
@Override
public String[] getParameterValues(String name) {
return getParameterMap().get(name);
}
@Override
public Map<String, String[]> getParameterMap() {
if (parameterMap == null) {
parameterMap = new HashMap<String, String[]>(getQueryStringDecoder().getParameters().size());
for (Map.Entry<String, List<String>> stringListEntry : getQueryStringDecoder().getParameters().entrySet()) {
String[] strings = new String[stringListEntry.getValue().size()];
parameterMap.put(stringListEntry.getKey(), stringListEntry.getValue().toArray(strings));
}
}
return parameterMap;
}
@Override
public String getProtocol() {
return httpRequest.getProtocolVersion().getText();
}
@Override
public String getScheme() {
return httpRequest.getProtocolVersion().getProtocolName();
}
@Override
public BufferedReader getReader() throws IOException {
return new BufferedReader(new InputStreamReader(getInputStream()));
}
@Override
public void setAttribute(String name, Object o) {
this.attributes.put(name,o);
}
@Override
public void removeAttribute(String name) {
this.attributes.remove(name);
}
@Override
public Locale getLocale() {
throw new UnsupportedOperationException();
}
@Override
public Enumeration<Locale> getLocales() {
throw new UnsupportedOperationException();
}
@Override
public boolean isSecure() {
return false;
}
@Override
public String getRemoteAddr() {
return ((InetSocketAddress)channel.getRemoteAddress()).getAddress().getHostAddress();
}
@Override
public String getRemoteHost() {
return ((InetSocketAddress)channel.getRemoteAddress()).getHostName();
}
@Override
public int getRemotePort() {
return ((InetSocketAddress)channel.getRemoteAddress()).getPort();
}
@Override
public String getLocalName() {
return ((InetSocketAddress)channel.getLocalAddress()).getHostName();
}
@Override
public String getLocalAddr() {
return ((InetSocketAddress)channel.getLocalAddress()).getAddress().getHostAddress();
}
@Override
public int getLocalPort() {
return ((InetSocketAddress)channel.getLocalAddress()).getPort();
}
@Override
public String getServerName() {
return getLocalName();
}
@Override
public int getServerPort() {
return getLocalPort();
}
@Override
public ServletContext getServletContext() {
throw new UnsupportedOperationException();
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
throw new UnsupportedOperationException();
}
@Override
public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse) throws IllegalStateException {
throw new UnsupportedOperationException();
}
@Override
public boolean isAsyncStarted() {
throw new UnsupportedOperationException();
}
@Override
public boolean isAsyncSupported() {
return false;
}
@Override
public AsyncContext getAsyncContext() {
throw new UnsupportedOperationException();
}
@Override
public DispatcherType getDispatcherType() {
throw new UnsupportedOperationException();
}
@Override
public RequestDispatcher getRequestDispatcher(String path) {
throw new UnsupportedOperationException();
}
@Override
public String getRealPath(String path) {
throw new UnsupportedOperationException();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service.timelineservice;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
import org.apache.hadoop.yarn.client.api.TimelineV2Client;
import org.apache.hadoop.yarn.service.ServiceContext;
import org.apache.hadoop.yarn.service.api.records.*;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import static org.apache.hadoop.yarn.service.api.records.ContainerState.READY;
import static org.apache.hadoop.yarn.service.api.records.ContainerState.STOPPED;
import static org.apache.hadoop.yarn.service.timelineservice.ServiceTimelineMetricsConstants.DIAGNOSTICS_INFO;
/**
* A single service that publishes all the Timeline Entities.
*/
public class ServiceTimelinePublisher extends CompositeService {
// Number of bytes of config which can be published in one shot to ATSv2.
public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
private TimelineV2Client timelineClient;
private volatile boolean stopped = false;
private static final Logger log =
LoggerFactory.getLogger(ServiceTimelinePublisher.class);
@Override
protected void serviceInit(org.apache.hadoop.conf.Configuration configuration)
throws Exception {
addService(timelineClient);
super.serviceInit(configuration);
}
@Override
protected void serviceStop() throws Exception {
stopped = true;
super.serviceStop();
}
public boolean isStopped() {
return stopped;
}
public ServiceTimelinePublisher(TimelineV2Client client) {
super(ServiceTimelinePublisher.class.getName());
timelineClient = client;
}
public void serviceAttemptRegistered(Service service,
org.apache.hadoop.conf.Configuration systemConf) {
long currentTimeMillis = service.getLaunchTime() == null
? System.currentTimeMillis() : service.getLaunchTime().getTime();
TimelineEntity entity = createServiceAttemptEntity(service.getId());
entity.setCreatedTime(currentTimeMillis);
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.NAME, service.getName());
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
ServiceState.STARTED.toString());
entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_TIME,
currentTimeMillis);
entity.addInfo(ServiceTimelineMetricsConstants.QUICK_LINKS,
service.getQuicklinks());
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent.setId(ServiceTimelineEvent.SERVICE_ATTEMPT_REGISTERED.toString());
startEvent.setTimestamp(currentTimeMillis);
entity.addEvent(startEvent);
// publish before configurations published
putEntity(entity);
// publish system config - YarnConfiguration
populateTimelineEntity(systemConf.iterator(), service.getId(),
ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
// publish container conf
publishContainerConf(service.getConfiguration(), service.getId(),
ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
// publish component as separate entity.
publishComponents(service.getComponents());
}
public void serviceAttemptUpdated(Service service) {
TimelineEntity entity = createServiceAttemptEntity(service.getId());
entity.addInfo(ServiceTimelineMetricsConstants.QUICK_LINKS,
service.getQuicklinks());
putEntity(entity);
}
public void serviceAttemptUnregistered(ServiceContext context,
String diagnostics) {
TimelineEntity entity = createServiceAttemptEntity(
context.attemptId.getApplicationId().toString());
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
FinalApplicationStatus.ENDED);
entityInfos.put(DIAGNOSTICS_INFO, diagnostics);
entity.addInfo(entityInfos);
// add an event
TimelineEvent finishEvent = new TimelineEvent();
finishEvent
.setId(ServiceTimelineEvent.SERVICE_ATTEMPT_UNREGISTERED.toString());
finishEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(finishEvent);
putEntity(entity);
}
public void componentInstanceStarted(Container container,
ComponentInstance instance) {
TimelineEntity entity = createComponentInstanceEntity(container.getId());
entity.setCreatedTime(container.getLaunchTime().getTime());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.BARE_HOST,
container.getBareHost());
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
container.getState().toString());
entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_TIME,
container.getLaunchTime().getTime());
entityInfos.put(ServiceTimelineMetricsConstants.COMPONENT_NAME,
instance.getCompName());
entityInfos.put(ServiceTimelineMetricsConstants.COMPONENT_INSTANCE_NAME,
instance.getCompInstanceName());
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent
.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_REGISTERED.toString());
startEvent.setTimestamp(container.getLaunchTime().getTime());
entity.addEvent(startEvent);
putEntity(entity);
}
public void componentInstanceFinished(ContainerId containerId,
int exitCode, String diagnostics) {
TimelineEntity entity = createComponentInstanceEntity(
containerId.toString());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.EXIT_STATUS_CODE,
exitCode);
entityInfos.put(DIAGNOSTICS_INFO, diagnostics);
entityInfos.put(ServiceTimelineMetricsConstants.STATE, STOPPED);
entity.addInfo(entityInfos);
// add an event
TimelineEvent startEvent = new TimelineEvent();
startEvent
.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_UNREGISTERED.toString());
startEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(startEvent);
putEntity(entity);
}
public void componentInstanceIPHostUpdated(Container container) {
TimelineEntity entity = createComponentInstanceEntity(container.getId());
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
entityInfos.put(ServiceTimelineMetricsConstants.IP, container.getIp());
entityInfos.put(ServiceTimelineMetricsConstants.HOSTNAME,
container.getHostname());
entityInfos.put(ServiceTimelineMetricsConstants.STATE,
container.getState().toString());
entity.addInfo(entityInfos);
TimelineEvent updateEvent = new TimelineEvent();
updateEvent.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_IP_HOST_UPDATE
.toString());
updateEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(updateEvent);
putEntity(entity);
}
public void componentInstanceBecomeReady(Container container) {
TimelineEntity entity = createComponentInstanceEntity(container.getId());
Map<String, Object> entityInfo = new HashMap<>();
entityInfo.put(ServiceTimelineMetricsConstants.STATE, READY);
entity.addInfo(entityInfo);
TimelineEvent updateEvent = new TimelineEvent();
updateEvent.setId(ServiceTimelineEvent.COMPONENT_INSTANCE_BECOME_READY
.toString());
updateEvent.setTimestamp(System.currentTimeMillis());
entity.addEvent(updateEvent);
putEntity(entity);
}
private void publishComponents(List<Component> components) {
long currentTimeMillis = System.currentTimeMillis();
for (Component component : components) {
TimelineEntity entity = createComponentEntity(component.getName());
entity.setCreatedTime(currentTimeMillis);
// create info keys
Map<String, Object> entityInfos = new HashMap<String, Object>();
if (component.getArtifact() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.ARTIFACT_ID,
component.getArtifact().getId());
entityInfos.put(ServiceTimelineMetricsConstants.ARTIFACT_TYPE,
component.getArtifact().getType().toString());
}
if (component.getResource() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_CPU,
component.getResource().getCpus());
entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_MEMORY,
component.getResource().getMemory());
if (component.getResource().getProfile() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.RESOURCE_PROFILE,
component.getResource().getProfile());
}
}
if (component.getLaunchCommand() != null) {
entityInfos.put(ServiceTimelineMetricsConstants.LAUNCH_COMMAND,
component.getLaunchCommand());
}
entityInfos.put(ServiceTimelineMetricsConstants.RUN_PRIVILEGED_CONTAINER,
component.getRunPrivilegedContainer().toString());
entity.addInfo(entityInfos);
putEntity(entity);
// publish container specific configurations
publishContainerConf(component.getConfiguration(), component.getName(),
ServiceTimelineEntityType.COMPONENT.toString());
}
}
private void publishContainerConf(Configuration configuration,
String entityId, String entityType) {
populateTimelineEntity(configuration.getEnv().entrySet().iterator(),
entityId, entityType);
for (ConfigFile configFile : configuration.getFiles()) {
populateTimelineEntity(configFile.getProperties().entrySet().iterator(),
entityId, entityType);
}
}
private void populateTimelineEntity(Iterator<Entry<String, String>> iterator,
String entityId, String entityType) {
int configSize = 0;
TimelineEntity entity = createTimelineEntity(entityId, entityType);
while (iterator.hasNext()) {
Entry<String, String> entry = iterator.next();
int size = entry.getKey().length() + entry.getValue().length();
configSize += size;
// Configs are split into multiple entities if they exceed 100kb in size.
if (configSize > ATS_CONFIG_PUBLISH_SIZE_BYTES) {
if (entity.getConfigs().size() > 0) {
putEntity(entity);
entity = createTimelineEntity(entityId, entityType);
}
configSize = size;
}
entity.addConfig(entry.getKey(), entry.getValue());
}
if (configSize > 0) {
putEntity(entity);
}
}
/**
* Called from ServiceMetricsSink at regular interval of time.
* @param metrics of service or components
* @param entityId Id of entity
* @param entityType Type of entity
* @param timestamp
*/
public void publishMetrics(Iterable<AbstractMetric> metrics, String entityId,
String entityType, long timestamp) {
TimelineEntity entity = createTimelineEntity(entityId, entityType);
Set<TimelineMetric> entityMetrics = new HashSet<TimelineMetric>();
for (AbstractMetric metric : metrics) {
TimelineMetric timelineMetric = new TimelineMetric();
timelineMetric.setId(metric.name());
timelineMetric.addValue(timestamp, metric.value());
entityMetrics.add(timelineMetric);
}
entity.setMetrics(entityMetrics);
putEntity(entity);
}
private TimelineEntity createServiceAttemptEntity(String serviceId) {
TimelineEntity entity = createTimelineEntity(serviceId,
ServiceTimelineEntityType.SERVICE_ATTEMPT.toString());
return entity;
}
private TimelineEntity createComponentInstanceEntity(String instanceId) {
TimelineEntity entity = createTimelineEntity(instanceId,
ServiceTimelineEntityType.COMPONENT_INSTANCE.toString());
return entity;
}
private TimelineEntity createComponentEntity(String componentId) {
TimelineEntity entity = createTimelineEntity(componentId,
ServiceTimelineEntityType.COMPONENT.toString());
return entity;
}
private TimelineEntity createTimelineEntity(String entityId,
String entityType) {
TimelineEntity entity = new TimelineEntity();
entity.setId(entityId);
entity.setType(entityType);
return entity;
}
private void putEntity(TimelineEntity entity) {
try {
if (log.isDebugEnabled()) {
log.debug("Publishing the entity " + entity + ", JSON-style content: "
+ TimelineUtils.dumpTimelineRecordtoJSON(entity));
}
if (timelineClient != null) {
timelineClient.putEntitiesAsync(entity);
} else {
log.error("Seems like client has been removed before the entity "
+ "could be published for " + entity);
}
} catch (Exception e) {
log.error("Error when publishing entity " + entity, e);
}
}
}
| |
package com.docusign.esign.model;
import java.util.Objects;
import java.util.Arrays;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
/**
* ENoteConfiguration.
*
*/
public class ENoteConfiguration {
@JsonProperty("apiKey")
private String apiKey = null;
@JsonProperty("connectConfigured")
private String connectConfigured = null;
@JsonProperty("eNoteConfigured")
private String eNoteConfigured = null;
@JsonProperty("organization")
private String organization = null;
@JsonProperty("password")
private String password = null;
@JsonProperty("userName")
private String userName = null;
/**
* apiKey.
*
* @return ENoteConfiguration
**/
public ENoteConfiguration apiKey(String apiKey) {
this.apiKey = apiKey;
return this;
}
/**
* .
* @return apiKey
**/
@ApiModelProperty(value = "")
public String getApiKey() {
return apiKey;
}
/**
* setApiKey.
**/
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
/**
* connectConfigured.
*
* @return ENoteConfiguration
**/
public ENoteConfiguration connectConfigured(String connectConfigured) {
this.connectConfigured = connectConfigured;
return this;
}
/**
* .
* @return connectConfigured
**/
@ApiModelProperty(value = "")
public String getConnectConfigured() {
return connectConfigured;
}
/**
* setConnectConfigured.
**/
public void setConnectConfigured(String connectConfigured) {
this.connectConfigured = connectConfigured;
}
/**
* eNoteConfigured.
*
* @return ENoteConfiguration
**/
public ENoteConfiguration eNoteConfigured(String eNoteConfigured) {
this.eNoteConfigured = eNoteConfigured;
return this;
}
/**
* .
* @return eNoteConfigured
**/
@ApiModelProperty(value = "")
public String getENoteConfigured() {
return eNoteConfigured;
}
/**
* setENoteConfigured.
**/
public void setENoteConfigured(String eNoteConfigured) {
this.eNoteConfigured = eNoteConfigured;
}
/**
* organization.
*
* @return ENoteConfiguration
**/
public ENoteConfiguration organization(String organization) {
this.organization = organization;
return this;
}
/**
* .
* @return organization
**/
@ApiModelProperty(value = "")
public String getOrganization() {
return organization;
}
/**
* setOrganization.
**/
public void setOrganization(String organization) {
this.organization = organization;
}
/**
* password.
*
* @return ENoteConfiguration
**/
public ENoteConfiguration password(String password) {
this.password = password;
return this;
}
/**
* .
* @return password
**/
@ApiModelProperty(value = "")
public String getPassword() {
return password;
}
/**
* setPassword.
**/
public void setPassword(String password) {
this.password = password;
}
/**
* userName.
*
* @return ENoteConfiguration
**/
public ENoteConfiguration userName(String userName) {
this.userName = userName;
return this;
}
/**
* .
* @return userName
**/
@ApiModelProperty(value = "")
public String getUserName() {
return userName;
}
/**
* setUserName.
**/
public void setUserName(String userName) {
this.userName = userName;
}
/**
* Compares objects.
*
* @return true or false depending on comparison result.
*/
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ENoteConfiguration eNoteConfiguration = (ENoteConfiguration) o;
return Objects.equals(this.apiKey, eNoteConfiguration.apiKey) &&
Objects.equals(this.connectConfigured, eNoteConfiguration.connectConfigured) &&
Objects.equals(this.eNoteConfigured, eNoteConfiguration.eNoteConfigured) &&
Objects.equals(this.organization, eNoteConfiguration.organization) &&
Objects.equals(this.password, eNoteConfiguration.password) &&
Objects.equals(this.userName, eNoteConfiguration.userName);
}
/**
* Returns the HashCode.
*/
@Override
public int hashCode() {
return Objects.hash(apiKey, connectConfigured, eNoteConfigured, organization, password, userName);
}
/**
* Converts the given object to string.
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ENoteConfiguration {\n");
sb.append(" apiKey: ").append(toIndentedString(apiKey)).append("\n");
sb.append(" connectConfigured: ").append(toIndentedString(connectConfigured)).append("\n");
sb.append(" eNoteConfigured: ").append(toIndentedString(eNoteConfigured)).append("\n");
sb.append(" organization: ").append(toIndentedString(organization)).append("\n");
sb.append(" password: ").append(toIndentedString(password)).append("\n");
sb.append(" userName: ").append(toIndentedString(userName)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: DTMException.java,v 1.3 2005/09/28 13:48:50 pvedula Exp $
*/
package com.sun.org.apache.xml.internal.dtm;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.xml.transform.SourceLocator;
import com.sun.org.apache.xml.internal.res.XMLErrorResources;
import com.sun.org.apache.xml.internal.res.XMLMessages;
/**
* This class specifies an exceptional condition that occured
* in the DTM module.
*/
public class DTMException extends RuntimeException {
static final long serialVersionUID = -775576419181334734L;
/** Field locator specifies where the error occured.
* @serial */
SourceLocator locator;
/**
* Method getLocator retrieves an instance of a SourceLocator
* object that specifies where an error occured.
*
* @return A SourceLocator object, or null if none was specified.
*/
public SourceLocator getLocator() {
return locator;
}
/**
* Method setLocator sets an instance of a SourceLocator
* object that specifies where an error occured.
*
* @param location A SourceLocator object, or null to clear the location.
*/
public void setLocator(SourceLocator location) {
locator = location;
}
/** Field containedException specifies a wrapped exception. May be null.
* @serial */
Throwable containedException;
/**
* This method retrieves an exception that this exception wraps.
*
* @return An Throwable object, or null.
* @see #getCause
*/
public Throwable getException() {
return containedException;
}
/**
* Returns the cause of this throwable or <code>null</code> if the
* cause is nonexistent or unknown. (The cause is the throwable that
* caused this throwable to get thrown.)
*/
public Throwable getCause() {
return ((containedException == this)
? null
: containedException);
}
/**
* Initializes the <i>cause</i> of this throwable to the specified value.
* (The cause is the throwable that caused this throwable to get thrown.)
*
* <p>This method can be called at most once. It is generally called from
* within the constructor, or immediately after creating the
* throwable. If this throwable was created
* with {@link #DTMException(Throwable)} or
* {@link #DTMException(String,Throwable)}, this method cannot be called
* even once.
*
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A <tt>null</tt> value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
* @return a reference to this <code>Throwable</code> instance.
* @throws IllegalArgumentException if <code>cause</code> is this
* throwable. (A throwable cannot
* be its own cause.)
* @throws IllegalStateException if this throwable was
* created with {@link #DTMException(Throwable)} or
* {@link #DTMException(String,Throwable)}, or this method has already
* been called on this throwable.
*/
public synchronized Throwable initCause(Throwable cause) {
if ((this.containedException == null) && (cause != null)) {
throw new IllegalStateException(XMLMessages.createXMLMessage(XMLErrorResources.ER_CANNOT_OVERWRITE_CAUSE, null)); //"Can't overwrite cause");
}
if (cause == this) {
throw new IllegalArgumentException(
XMLMessages.createXMLMessage(XMLErrorResources.ER_SELF_CAUSATION_NOT_PERMITTED, null)); //"Self-causation not permitted");
}
this.containedException = cause;
return this;
}
/**
* Create a new DTMException.
*
* @param message The error or warning message.
*/
public DTMException(String message) {
super(message);
this.containedException = null;
this.locator = null;
}
/**
* Create a new DTMException wrapping an existing exception.
*
* @param e The exception to be wrapped.
*/
public DTMException(Throwable e) {
super(e.getMessage());
this.containedException = e;
this.locator = null;
}
/**
* Wrap an existing exception in a DTMException.
*
* <p>This is used for throwing processor exceptions before
* the processing has started.</p>
*
* @param message The error or warning message, or null to
* use the message from the embedded exception.
* @param e Any exception
*/
public DTMException(String message, Throwable e) {
super(((message == null) || (message.length() == 0))
? e.getMessage()
: message);
this.containedException = e;
this.locator = null;
}
/**
* Create a new DTMException from a message and a Locator.
*
* <p>This constructor is especially useful when an application is
* creating its own exception from within a DocumentHandler
* callback.</p>
*
* @param message The error or warning message.
* @param locator The locator object for the error or warning.
*/
public DTMException(String message, SourceLocator locator) {
super(message);
this.containedException = null;
this.locator = locator;
}
/**
* Wrap an existing exception in a DTMException.
*
* @param message The error or warning message, or null to
* use the message from the embedded exception.
* @param locator The locator object for the error or warning.
* @param e Any exception
*/
public DTMException(String message, SourceLocator locator,
Throwable e) {
super(message);
this.containedException = e;
this.locator = locator;
}
/**
* Get the error message with location information
* appended.
*/
public String getMessageAndLocation() {
StringBuffer sbuffer = new StringBuffer();
String message = super.getMessage();
if (null != message) {
sbuffer.append(message);
}
if (null != locator) {
String systemID = locator.getSystemId();
int line = locator.getLineNumber();
int column = locator.getColumnNumber();
if (null != systemID) {
sbuffer.append("; SystemID: ");
sbuffer.append(systemID);
}
if (0 != line) {
sbuffer.append("; Line#: ");
sbuffer.append(line);
}
if (0 != column) {
sbuffer.append("; Column#: ");
sbuffer.append(column);
}
}
return sbuffer.toString();
}
/**
* Get the location information as a string.
*
* @return A string with location info, or null
* if there is no location information.
*/
public String getLocationAsString() {
if (null != locator) {
StringBuffer sbuffer = new StringBuffer();
String systemID = locator.getSystemId();
int line = locator.getLineNumber();
int column = locator.getColumnNumber();
if (null != systemID) {
sbuffer.append("; SystemID: ");
sbuffer.append(systemID);
}
if (0 != line) {
sbuffer.append("; Line#: ");
sbuffer.append(line);
}
if (0 != column) {
sbuffer.append("; Column#: ");
sbuffer.append(column);
}
return sbuffer.toString();
} else {
return null;
}
}
/**
* Print the the trace of methods from where the error
* originated. This will trace all nested exception
* objects, as well as this object.
*/
public void printStackTrace() {
printStackTrace(new java.io.PrintWriter(System.err, true));
}
/**
* Print the the trace of methods from where the error
* originated. This will trace all nested exception
* objects, as well as this object.
* @param s The stream where the dump will be sent to.
*/
public void printStackTrace(java.io.PrintStream s) {
printStackTrace(new java.io.PrintWriter(s));
}
/**
* Print the the trace of methods from where the error
* originated. This will trace all nested exception
* objects, as well as this object.
* @param s The writer where the dump will be sent to.
*/
public void printStackTrace(java.io.PrintWriter s) {
if (s == null) {
s = new java.io.PrintWriter(System.err, true);
}
try {
String locInfo = getLocationAsString();
if (null != locInfo) {
s.println(locInfo);
}
super.printStackTrace(s);
} catch (Throwable e) {}
boolean isJdk14OrHigher = false;
try {
Throwable.class.getMethod("getCause", (Class[]) null);
isJdk14OrHigher = true;
} catch (NoSuchMethodException nsme) {
// do nothing
}
// The printStackTrace method of the Throwable class in jdk 1.4
// and higher will include the cause when printing the backtrace.
// The following code is only required when using jdk 1.3 or lower
if (!isJdk14OrHigher) {
Throwable exception = getException();
for (int i = 0; (i < 10) && (null != exception); i++) {
s.println("---------");
try {
if (exception instanceof DTMException) {
String locInfo =
((DTMException) exception)
.getLocationAsString();
if (null != locInfo) {
s.println(locInfo);
}
}
exception.printStackTrace(s);
} catch (Throwable e) {
s.println("Could not print stack trace...");
}
try {
Method meth =
((Object) exception).getClass().getMethod("getException",
(Class[]) null);
if (null != meth) {
Throwable prev = exception;
exception = (Throwable) meth.invoke(exception, (Object[]) null);
if (prev == exception) {
break;
}
} else {
exception = null;
}
} catch (InvocationTargetException ite) {
exception = null;
} catch (IllegalAccessException iae) {
exception = null;
} catch (NoSuchMethodException nsme) {
exception = null;
}
}
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.guardduty.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains information about the service.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/guardduty-2017-11-28/Service" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Service implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Information about the activity described in a finding.
* </p>
*/
private Action action;
/**
* <p>
* An evidence object associated with the service.
* </p>
*/
private Evidence evidence;
/**
* <p>
* Indicates whether this finding is archived.
* </p>
*/
private Boolean archived;
/**
* <p>
* Total count of the occurrences of this finding type.
* </p>
*/
private Integer count;
/**
* <p>
* Detector ID for the GuardDuty service.
* </p>
*/
private String detectorId;
/**
* <p>
* First seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*/
private String eventFirstSeen;
/**
* <p>
* Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*/
private String eventLastSeen;
/**
* <p>
* Resource role information for this finding.
* </p>
*/
private String resourceRole;
/**
* <p>
* The name of the AWS service (GuardDuty) that generated a finding.
* </p>
*/
private String serviceName;
/**
* <p>
* Feedback left about the finding.
* </p>
*/
private String userFeedback;
/**
* <p>
* Information about the activity described in a finding.
* </p>
*
* @param action
* Information about the activity described in a finding.
*/
public void setAction(Action action) {
this.action = action;
}
/**
* <p>
* Information about the activity described in a finding.
* </p>
*
* @return Information about the activity described in a finding.
*/
public Action getAction() {
return this.action;
}
/**
* <p>
* Information about the activity described in a finding.
* </p>
*
* @param action
* Information about the activity described in a finding.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withAction(Action action) {
setAction(action);
return this;
}
/**
* <p>
* An evidence object associated with the service.
* </p>
*
* @param evidence
* An evidence object associated with the service.
*/
public void setEvidence(Evidence evidence) {
this.evidence = evidence;
}
/**
* <p>
* An evidence object associated with the service.
* </p>
*
* @return An evidence object associated with the service.
*/
public Evidence getEvidence() {
return this.evidence;
}
/**
* <p>
* An evidence object associated with the service.
* </p>
*
* @param evidence
* An evidence object associated with the service.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withEvidence(Evidence evidence) {
setEvidence(evidence);
return this;
}
/**
* <p>
* Indicates whether this finding is archived.
* </p>
*
* @param archived
* Indicates whether this finding is archived.
*/
public void setArchived(Boolean archived) {
this.archived = archived;
}
/**
* <p>
* Indicates whether this finding is archived.
* </p>
*
* @return Indicates whether this finding is archived.
*/
public Boolean getArchived() {
return this.archived;
}
/**
* <p>
* Indicates whether this finding is archived.
* </p>
*
* @param archived
* Indicates whether this finding is archived.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withArchived(Boolean archived) {
setArchived(archived);
return this;
}
/**
* <p>
* Indicates whether this finding is archived.
* </p>
*
* @return Indicates whether this finding is archived.
*/
public Boolean isArchived() {
return this.archived;
}
/**
* <p>
* Total count of the occurrences of this finding type.
* </p>
*
* @param count
* Total count of the occurrences of this finding type.
*/
public void setCount(Integer count) {
this.count = count;
}
/**
* <p>
* Total count of the occurrences of this finding type.
* </p>
*
* @return Total count of the occurrences of this finding type.
*/
public Integer getCount() {
return this.count;
}
/**
* <p>
* Total count of the occurrences of this finding type.
* </p>
*
* @param count
* Total count of the occurrences of this finding type.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withCount(Integer count) {
setCount(count);
return this;
}
/**
* <p>
* Detector ID for the GuardDuty service.
* </p>
*
* @param detectorId
* Detector ID for the GuardDuty service.
*/
public void setDetectorId(String detectorId) {
this.detectorId = detectorId;
}
/**
* <p>
* Detector ID for the GuardDuty service.
* </p>
*
* @return Detector ID for the GuardDuty service.
*/
public String getDetectorId() {
return this.detectorId;
}
/**
* <p>
* Detector ID for the GuardDuty service.
* </p>
*
* @param detectorId
* Detector ID for the GuardDuty service.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withDetectorId(String detectorId) {
setDetectorId(detectorId);
return this;
}
/**
* <p>
* First seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*
* @param eventFirstSeen
* First seen timestamp of the activity that prompted GuardDuty to generate this finding.
*/
public void setEventFirstSeen(String eventFirstSeen) {
this.eventFirstSeen = eventFirstSeen;
}
/**
* <p>
* First seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*
* @return First seen timestamp of the activity that prompted GuardDuty to generate this finding.
*/
public String getEventFirstSeen() {
return this.eventFirstSeen;
}
/**
* <p>
* First seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*
* @param eventFirstSeen
* First seen timestamp of the activity that prompted GuardDuty to generate this finding.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withEventFirstSeen(String eventFirstSeen) {
setEventFirstSeen(eventFirstSeen);
return this;
}
/**
* <p>
* Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*
* @param eventLastSeen
* Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
*/
public void setEventLastSeen(String eventLastSeen) {
this.eventLastSeen = eventLastSeen;
}
/**
* <p>
* Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*
* @return Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
*/
public String getEventLastSeen() {
return this.eventLastSeen;
}
/**
* <p>
* Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
* </p>
*
* @param eventLastSeen
* Last seen timestamp of the activity that prompted GuardDuty to generate this finding.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withEventLastSeen(String eventLastSeen) {
setEventLastSeen(eventLastSeen);
return this;
}
/**
* <p>
* Resource role information for this finding.
* </p>
*
* @param resourceRole
* Resource role information for this finding.
*/
public void setResourceRole(String resourceRole) {
this.resourceRole = resourceRole;
}
/**
* <p>
* Resource role information for this finding.
* </p>
*
* @return Resource role information for this finding.
*/
public String getResourceRole() {
return this.resourceRole;
}
/**
* <p>
* Resource role information for this finding.
* </p>
*
* @param resourceRole
* Resource role information for this finding.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withResourceRole(String resourceRole) {
setResourceRole(resourceRole);
return this;
}
/**
* <p>
* The name of the AWS service (GuardDuty) that generated a finding.
* </p>
*
* @param serviceName
* The name of the AWS service (GuardDuty) that generated a finding.
*/
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
/**
* <p>
* The name of the AWS service (GuardDuty) that generated a finding.
* </p>
*
* @return The name of the AWS service (GuardDuty) that generated a finding.
*/
public String getServiceName() {
return this.serviceName;
}
/**
* <p>
* The name of the AWS service (GuardDuty) that generated a finding.
* </p>
*
* @param serviceName
* The name of the AWS service (GuardDuty) that generated a finding.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withServiceName(String serviceName) {
setServiceName(serviceName);
return this;
}
/**
* <p>
* Feedback left about the finding.
* </p>
*
* @param userFeedback
* Feedback left about the finding.
*/
public void setUserFeedback(String userFeedback) {
this.userFeedback = userFeedback;
}
/**
* <p>
* Feedback left about the finding.
* </p>
*
* @return Feedback left about the finding.
*/
public String getUserFeedback() {
return this.userFeedback;
}
/**
* <p>
* Feedback left about the finding.
* </p>
*
* @param userFeedback
* Feedback left about the finding.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Service withUserFeedback(String userFeedback) {
setUserFeedback(userFeedback);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAction() != null)
sb.append("Action: ").append(getAction()).append(",");
if (getEvidence() != null)
sb.append("Evidence: ").append(getEvidence()).append(",");
if (getArchived() != null)
sb.append("Archived: ").append(getArchived()).append(",");
if (getCount() != null)
sb.append("Count: ").append(getCount()).append(",");
if (getDetectorId() != null)
sb.append("DetectorId: ").append(getDetectorId()).append(",");
if (getEventFirstSeen() != null)
sb.append("EventFirstSeen: ").append(getEventFirstSeen()).append(",");
if (getEventLastSeen() != null)
sb.append("EventLastSeen: ").append(getEventLastSeen()).append(",");
if (getResourceRole() != null)
sb.append("ResourceRole: ").append(getResourceRole()).append(",");
if (getServiceName() != null)
sb.append("ServiceName: ").append(getServiceName()).append(",");
if (getUserFeedback() != null)
sb.append("UserFeedback: ").append(getUserFeedback());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Service == false)
return false;
Service other = (Service) obj;
if (other.getAction() == null ^ this.getAction() == null)
return false;
if (other.getAction() != null && other.getAction().equals(this.getAction()) == false)
return false;
if (other.getEvidence() == null ^ this.getEvidence() == null)
return false;
if (other.getEvidence() != null && other.getEvidence().equals(this.getEvidence()) == false)
return false;
if (other.getArchived() == null ^ this.getArchived() == null)
return false;
if (other.getArchived() != null && other.getArchived().equals(this.getArchived()) == false)
return false;
if (other.getCount() == null ^ this.getCount() == null)
return false;
if (other.getCount() != null && other.getCount().equals(this.getCount()) == false)
return false;
if (other.getDetectorId() == null ^ this.getDetectorId() == null)
return false;
if (other.getDetectorId() != null && other.getDetectorId().equals(this.getDetectorId()) == false)
return false;
if (other.getEventFirstSeen() == null ^ this.getEventFirstSeen() == null)
return false;
if (other.getEventFirstSeen() != null && other.getEventFirstSeen().equals(this.getEventFirstSeen()) == false)
return false;
if (other.getEventLastSeen() == null ^ this.getEventLastSeen() == null)
return false;
if (other.getEventLastSeen() != null && other.getEventLastSeen().equals(this.getEventLastSeen()) == false)
return false;
if (other.getResourceRole() == null ^ this.getResourceRole() == null)
return false;
if (other.getResourceRole() != null && other.getResourceRole().equals(this.getResourceRole()) == false)
return false;
if (other.getServiceName() == null ^ this.getServiceName() == null)
return false;
if (other.getServiceName() != null && other.getServiceName().equals(this.getServiceName()) == false)
return false;
if (other.getUserFeedback() == null ^ this.getUserFeedback() == null)
return false;
if (other.getUserFeedback() != null && other.getUserFeedback().equals(this.getUserFeedback()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAction() == null) ? 0 : getAction().hashCode());
hashCode = prime * hashCode + ((getEvidence() == null) ? 0 : getEvidence().hashCode());
hashCode = prime * hashCode + ((getArchived() == null) ? 0 : getArchived().hashCode());
hashCode = prime * hashCode + ((getCount() == null) ? 0 : getCount().hashCode());
hashCode = prime * hashCode + ((getDetectorId() == null) ? 0 : getDetectorId().hashCode());
hashCode = prime * hashCode + ((getEventFirstSeen() == null) ? 0 : getEventFirstSeen().hashCode());
hashCode = prime * hashCode + ((getEventLastSeen() == null) ? 0 : getEventLastSeen().hashCode());
hashCode = prime * hashCode + ((getResourceRole() == null) ? 0 : getResourceRole().hashCode());
hashCode = prime * hashCode + ((getServiceName() == null) ? 0 : getServiceName().hashCode());
hashCode = prime * hashCode + ((getUserFeedback() == null) ? 0 : getUserFeedback().hashCode());
return hashCode;
}
@Override
public Service clone() {
try {
return (Service) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.guardduty.model.transform.ServiceMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Expirable;
import com.intellij.openapi.util.ExpirableRunnable;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
/**
* This class receives focus requests, manages the, and delegates to the awt focus subsystem. All focus requests
* should be done through this class. For example, to request focus on a component:
* <pre>
* IdeFocusManager.getInstance(project).requestFocus(comp, true);
* </pre>
* This is the preferred way to request focus on components to
* <pre>
* comp.requestFocus();
* </pre>
*
* This class is also responsible for delivering key events while focus transferring is in progress.
* <p>
* <code>IdeFocusManager</code> instance can be received per project or the global instance. The preferred way is
* to use instance <code>IdeFocusManager.getInstance(project)</code>. If no project instance is available, then
* <code>IdeFocusManager.getGlobalInstance()</code> can be used.
*/
public abstract class IdeFocusManager implements FocusRequestor {
/**
* Finds most suitable component to request focus to. For instance you may pass a JPanel instance,
* this method will traverse into it's children to find focusable component
* @return suitable component to focus
*/
@Nullable
public abstract JComponent getFocusTargetFor(@NotNull final JComponent comp);
/**
* Executes given runnable after all focus activities are finished
*/
public abstract void doWhenFocusSettlesDown(@NotNull Runnable runnable);
/**
* Executes given runnable after all focus activities are finished
*/
public abstract void doWhenFocusSettlesDown(@NotNull ExpirableRunnable runnable);
/**
* Finds focused component among descendants of the given component. Descendants may be in child popups and windows
*/
@Nullable
public abstract Component getFocusedDescendantFor(final Component comp);
/**
* Dispatches given key event. This methods should not be called by the user code
* @return true is the event was dispatched, false - otherwise.
*/
public abstract boolean dispatch(@NotNull KeyEvent e);
/**
* Aggregates all key events until given callback object is processed
* @param done action callback
*/
public abstract void typeAheadUntil(ActionCallback done);
/**
* Reports if any focus activity is being done
*/
public abstract boolean isFocusBeingTransferred();
/**
* Requests default focus. The method should not be called by the user code.
*/
@NotNull
public abstract ActionCallback requestDefaultFocus(boolean forced);
/**
* Reports of focus transfer is enabled right now. It can be disabled if app is inactive. In this case
* all focus requests will be either postponed or executed only if <code>FocusCommand</code> can be executed on an inaactive app.
* @see com.intellij.openapi.wm.FocusCommand#canExecuteOnInactiveApp()
*/
public abstract boolean isFocusTransferEnabled();
/**
* Returns <code>Expirable</code> instance for the given counter of focus commands. As any new <code>FocusCommand</code>
* is emitted to execute, the counter increments thus making the returned <code>Expirable</code> objects expired.
*/
@NotNull
public abstract Expirable getTimestamp(boolean trackOnlyForcedCommands);
/**
* Returns <code>FocusRequestor</code> object which will emit focus requests unless expired.
* @see #getTimestamp(boolean)
*/
@NotNull
public abstract FocusRequestor getFurtherRequestor();
/**
* Injects some procedure that will maybe do something with focus after all focus requests are fulfilled and
* before focus transfer is reported ready.
*/
public abstract void revalidateFocus(@NotNull ExpirableRunnable runnable);
/**
* Enables or disables typeahead
* @see #typeAheadUntil(com.intellij.openapi.util.ActionCallback)
*/
public abstract void setTypeaheadEnabled(boolean enabled);
/**
* Computes effective focus owner
*/
public abstract Component getFocusOwner();
/**
* Runs runnable for which <code>DataContext</code> will no be computed from the current focus owner,
* but used the given one
*/
public abstract void runOnOwnContext(@NotNull DataContext context, @NotNull Runnable runnable);
/**
* Returns last focused component for the given <code>IdeFrame</code>
*/
@Nullable
public abstract Component getLastFocusedFor(@Nullable IdeFrame frame);
/**
* Returns last focused <code>IdeFrame</code>
*/
@Nullable
public abstract IdeFrame getLastFocusedFrame();
/**
* Put the container window to front. May not execute of the app is inactive or under some other conditions. This
* is the preferred way to finding the container window and unconditionally calling <code>window.toFront()</code>
*/
public abstract void toFront(JComponent c);
public static IdeFocusManager getInstance(@Nullable Project project) {
if (project == null || project.isDisposed() || !project.isInitialized()) return getGlobalInstance();
return project.getComponent(IdeFocusManager.class);
}
@NotNull
public static IdeFocusManager findInstanceByContext(@Nullable DataContext context) {
IdeFocusManager instance = null;
if (context != null) {
instance = getInstanceSafe(CommonDataKeys.PROJECT.getData(context));
}
if (instance == null) {
instance = findByComponent(KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow());
}
if (instance == null) {
instance = getGlobalInstance();
}
return instance;
}
@NotNull
public static IdeFocusManager findInstanceByComponent(@NotNull Component c) {
final IdeFocusManager instance = findByComponent(c);
return instance != null ? instance : findInstanceByContext(null);
}
@Nullable
private static IdeFocusManager findByComponent(Component c) {
final Component parent = UIUtil.findUltimateParent(c);
if (parent instanceof IdeFrame) {
return getInstanceSafe(((IdeFrame)parent).getProject());
}
return null;
}
@Nullable
private static IdeFocusManager getInstanceSafe(@Nullable Project project) {
if (project != null && !project.isDisposed() && project.isInitialized()) {
return getInstance(project);
}
return null;
}
@NotNull
public static IdeFocusManager findInstance() {
final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
return owner != null ? findInstanceByComponent(owner) : findInstanceByContext(null);
}
@NotNull
public static IdeFocusManager getGlobalInstance() {
IdeFocusManager fm = null;
Application app = ApplicationManager.getApplication();
if (app != null && app.hasComponent(IdeFocusManager.class)) {
fm = app.getComponent(IdeFocusManager.class);
}
if (fm == null) {
// happens when app is semi-initialized (e.g. when IDEA server dialog is shown)
fm = PassThroughIdeFocusManager.getInstance();
}
return fm;
}
}
| |
package org.common.jfunk;
import static org.common.jfunk.Pair.pair;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* Functions analogous to those available in the Ruby's Enumerable module.
* Return values can be the following:
* {@link java.util.ArrayList}, {@link java.util.HashSet}
*
* This class is called <b>Enumerables</b> and not <b>Collections</b> in order not to confuse
* it with {@link java.util.Collections}
*
*/
public class Enumerables {
public static <T> Boolean all(Collection<T> c, Predicate<T> p) {
verifyArguments(c, p);
for (T e : c) {
if (!p.call(e)) {
return false;
}
}
return true;
}
public static <T> Boolean any(Collection<T> c, Predicate<T> p) {
verifyArguments(c, p);
for (T e : c) {
if (p.call(e)) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
public static <T, U, Y extends Collection<U>> Y collect(Collection<T> c, Function<T, U> f, Class<?>... resultType) {
verifyArguments(c, f);
Y result = (Y) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<U>());
for (T e : c) {
result.add(f.call(e));
}
return result;
}
@SuppressWarnings("unchecked")
public static <T, U, Y extends Collection<T>> Collection<Pair<U, Y>> chunk(Collection<T> c, Function<T, U> f, Class<?>... resultType) {
verifyArguments(c, f);
List<Pair<U, Y>> pairs = (List<Pair<U, Y>>) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<Pair<U, Y>>());
Y previousChunk = (Y) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<T>());
U previousChunkKey = null;
for (T e : c) {
U currentChunkKey = f.call(e);
if ((null == previousChunkKey) || !currentChunkKey.equals(previousChunkKey)) {
if (previousChunk.size() > 0) {
pairs.add(new Pair<U, Y>(previousChunkKey, previousChunk));
};
previousChunk = (Y) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<T>());
previousChunkKey = currentChunkKey;
};
previousChunk.add(e);
};
if (previousChunk.size() > 0) {
pairs.add(new Pair<U, Y>(previousChunkKey, previousChunk));
};
return pairs;
}
@SuppressWarnings("unchecked")
public static <T, Y extends Collection<T>> Y concat(Collection<T> c1, Collection<T> c2, Class<?>... resultType) {
Y result = (Y) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<T>());
result.addAll(c1);
result.addAll(c2);
return result;
}
@SuppressWarnings("unchecked")
public static <T, U, Y extends Collection<U>> Y collectConcat(Collection<T> c, Function<T, Collection<U>> f, Class<?>... resultType) {
Y result = (Y) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<U>());
Collection<Collection<U>> collectedParts = collect(c, f);
for (Collection<U> part : collectedParts) {
result = concat(result, part, resultType);
};
return result;
}
@SuppressWarnings("unchecked")
public static <T, U, Y extends Collection<U>> Y map(Collection<T> c, Function<T, U> f, Class<?>... resultType) {
verifyArguments(c, f);
Y result = (Y) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<U>());
for (T e : c) {
result.add(f.call(e));
};
return result;
}
public static <T, U> U reduce(Collection<T> c, Function<Pair<U, T>, U> f, U acc) {
verifyArguments(c, f, acc);
for (T e : c) {
acc = f.call(new Pair<U, T>(acc, e));
};
return acc;
}
public static <T> String join(Collection<T> c, String... separator) {
verifyArguments(c);
StringBuilder result = new StringBuilder();
String sep = (separator.length > 0) ? separator[0] : ",";
int lastIndex = -1;
for (T e : c) {
result.append(e.toString()).append(sep);
};
lastIndex = result.length() - sep.length();
if (lastIndex >= 0) {
result.replace(result.length() - sep.length(), result.length(), "");
};
return result.toString();
};
public static <T> int count(Collection<T> c, Predicate<T> p) {
verifyArguments(c, p);
int result = 0;
for (T e : c) {
if (p.call(e)) {
result++;
}
};
return result;
};
public static <T> void each(Collection<T> c, Action<T> a) {
verifyArguments(c, a);
for (T e : c) {
a.perform(e);
};
};
public static <T> void cycle(Collection<T> c, Action<T> a, int times) {
verifyArguments(c, a);
if (times < 0) {
throw new IllegalArgumentException("'times' should be > 0");
};
for (int i = 0; i < times; i++) {
each(c, a);
};
};
public static <T> void eachWithIndex(Collection<T> c, Action<Pair<T, Integer>> a) {
verifyArguments(c, a);
int cSize = c.size();
Iterator<T> cIterator = c.iterator();
for (int i = 0; i < cSize; i++) {
a.perform(pair(cIterator.next(), i));
};
};
public static <T, U> U eachWithObject(Collection<T> c, Action<Pair<T, U>> a, U acc) {
verifyArguments(c, a, acc);
for (T e : c) {
a.perform(pair(e, acc));
};
return acc;
};
@SuppressWarnings("unchecked")
public static <T> Collection<T> filter(Collection<T> c, Predicate<T> p, Class<?>... resultType) {
verifyArguments(c, p);
Collection<T> result = (Collection<T>) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<T>());
for (T e : c) {
if (p.call(e)) {
result.add(e);
};
};
return result;
}
@SuppressWarnings("unchecked")
public static <T> Collection<T> sortBy(Collection<T> c, Comparator<T> comp, Class<?>... resultType) {
verifyArguments(c, comp);
Collection<T> result = (Collection<T>) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<T>());
List<T> sorted = new ArrayList<T>();
sorted.addAll(c);
Collections.sort(sorted, comp);
result.addAll(sorted);
return result;
}
@SuppressWarnings("unchecked")
public static <T extends Comparable<? super T>> Collection<T> sort(Collection<T> c, Class<?>... resultType) {
verifyArguments(c);
Collection<T> result = (Collection<T>) (resultType.length > 0 ? instantiate(resultType[0]) : new ArrayList<T>());
List<T> sorted = new ArrayList<T>();
sorted.addAll(c);
Collections.sort(sorted);
result.addAll(sorted);
return result;
}
//TODO (some of the methods listed below):
// #detect
// #drop
// #drop_while
// #each_cons
// #each_entry
// #each_slice
// #entries
// #find
// #find_all
// #find_index
// #first
// #flat_map
// #grep
// #group_by
// #include?
// #inject
// #max
// #max_by
// #member?
// #min
// #min_by
// #minmax
// #minmax_by
// #none?
// #one?
// #partition
// #reject
// #reverse_each
// #select
// #slice_before
// #take
// #take_while
// #to_a
// #zip
//TODO: Re-factor commonality between the different functions
private static <T> T instantiate(Class<T> type) {
try {
return type.newInstance();
} catch (InstantiationException exception) {
throw new RuntimeException(exception);
} catch (IllegalAccessException exception) {
throw new RuntimeException(exception);
}
};
private static void verifyArguments(Object... args) {
for (int i = 0; i < args.length; i++) {
if (null == args[i]) {
throw new IllegalArgumentException("Null argument number " + (i + 1));
}
}
}
}
| |
/*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage.miniclient;
/**
*
* @author Narflex
*/
public class MediaCmd
{
public static final int MEDIACMD_INIT = 0;
public static final int MEDIACMD_DEINIT = 1;
public static final int MEDIACMD_OPENURL = 16;
// length, url
public static final int MEDIACMD_GETMEDIATIME = 17;
public static final int MEDIACMD_SETMUTE = 18;
// mute
public static final int MEDIACMD_STOP = 19;
public static final int MEDIACMD_PAUSE = 20;
public static final int MEDIACMD_PLAY = 21;
public static final int MEDIACMD_FLUSH = 22;
public static final int MEDIACMD_PUSHBUFFER = 23;
// size, flags, data
public static final int MEDIACMD_GETVIDEORECT = 24;
// returns 16bit width, 16bit height
public static final int MEDIACMD_SETVIDEORECT = 25;
// x, y, width, height, x, y, width, height
public static final int MEDIACMD_GETVOLUME = 26;
public static final int MEDIACMD_SETVOLUME = 27;
// volume
public static final int MEDIACMD_FRAMESTEP = 28;
public static final int MEDIACMD_SEEK = 29;
// 64-bit time (for pull mode only)
private static MediaCmd globalMediaCmd;
public static MediaCmd getInstance() { return globalMediaCmd; }
private MiniMPlayerPlugin playa;
private java.io.File buffFile;
private java.io.FileOutputStream buffStream;
//private java.io.RandomAccessFile buffRaf;
private long pushDataLeftBeforeInit;
private long bufferFilePushedBytes;
private boolean pushMode;
private int numPushedBuffers;
private int DESIRED_VIDEO_PREBUFFER_SIZE = 4*1024*1024;
private int DESIRED_AUDIO_PREBUFFER_SIZE = 2*1024*1024;
private int maxPrebufferSize;
private MiniClientConnection myConn;
private int statsChannelBWKbps;
private int statsStreamBWKbps;
private int statsTargetBWKbps;
private long serverMuxTime;
private long prebufferTime;
/** Creates a new instance of MediaCmd */
public MediaCmd(MiniClientConnection myConn)
{
this.myConn = myConn;
globalMediaCmd = this;
}
public MiniMPlayerPlugin getPlaya()
{
return playa;
}
public static void writeInt(int value, byte[] data, int offset)
{
data[offset] = (byte)((value >> 24) & 0xFF);
data[offset + 1] = (byte)((value >> 16) & 0xFF);
data[offset + 2] = (byte)((value >> 8 ) & 0xFF);
data[offset + 3] = (byte)(value & 0xFF);
}
public static void writeShort(short value, byte[] data, int offset)
{
data[offset] = (byte)((value >> 8 ) & 0xFF);
data[offset + 1] = (byte)(value & 0xFF);
}
public static int readInt(int pos, byte[] cmddata)
{
return ((cmddata[pos+0] & 0xFF)<<24)|((cmddata[pos+1] & 0xFF)<<16)|((cmddata[pos+2] & 0xFF)<<8)|(cmddata[pos+3] & 0xFF);
}
public static short readShort(int pos, byte[] cmddata)
{
return (short) (((cmddata[pos+0] & 0xFF)<<8)|(cmddata[pos+1] & 0xFF));
}
public void close()
{
if (myConn.getGfxCmd() != null)
myConn.getGfxCmd().setVideoBounds(null, null);
if (playa != null)
playa.free();
playa = null;
try
{
buffStream.close();
}
catch (Exception e){}
buffStream = null;
if (buffFile != null)
buffFile.delete();
}
private Process ogleProcess;
public static BufferStatsFrame bufferStatsFrame;
public int ExecuteMediaCommand(int cmd, int len, byte[] cmddata, byte[] retbuf)
{
// TODO verify sizes...
if(cmd!=MEDIACMD_PUSHBUFFER)
System.out.println("Execute media command " + cmd);
switch(cmd)
{
case MEDIACMD_INIT:
try
{
DESIRED_VIDEO_PREBUFFER_SIZE = Integer.parseInt(MiniClient.myProperties.getProperty("video_buffer_size", "" + (4*1024*1024)));
DESIRED_AUDIO_PREBUFFER_SIZE = Integer.parseInt(MiniClient.myProperties.getProperty("audio_buffer_size", "" + (2*1024*1024)));
}
catch (Exception e)
{
System.out.println("ERROR:" + e);
}
readInt(0, cmddata); // video format code
writeInt(1, retbuf, 0);
return 4;
case MEDIACMD_DEINIT:
writeInt(1, retbuf, 0);
if (ogleProcess != null)
{
ogleProcess.destroy();
ogleProcess = null;
}
close();
return 4;
case MEDIACMD_OPENURL:
int strLen = readInt(0, cmddata);
String urlString = "";
maxPrebufferSize = DESIRED_VIDEO_PREBUFFER_SIZE;
if (strLen > 1)
urlString = new String(cmddata, 4, strLen - 1);
if (!urlString.startsWith("push:"))
{
if (urlString.startsWith("dvd:"))
{
try
{
ogleProcess = Runtime.getRuntime().exec("ogle");
}
catch (Exception e)
{
System.out.println("ERROR Launching Ogle:" + e);
}
}
else if (urlString.startsWith("file://"))
{
playa = new MiniMPlayerPlugin(myConn.getGfxCmd(), myConn);
playa.setPushMode(false);
playa.load((byte)0, (byte)0, "", urlString, null, false, 0);
pushDataLeftBeforeInit = 0;
pushMode = false;
}
else
{
playa = new MiniMPlayerPlugin(myConn.getGfxCmd(), myConn);
// We always set it to be an active file because it'll get turned off by the streaming code if it is not.
// It's safe to say it's active when it's not (as long as it's a streamable file format), but the opposite is not true.
// So we always say it's active to avoid any problems loading the file if it's a streamable file format.
boolean isActive = urlString.toLowerCase().endsWith(".mpg") || urlString.toLowerCase().endsWith(".ts") ||
urlString.toLowerCase().endsWith(".flv");
playa.setPushMode(false);
playa.load((byte)0, (byte)0, "", urlString, myConn.getServerName(), isActive, 0);
pushDataLeftBeforeInit = 0;
pushMode = false;
}
}
else
{
if (MiniClientConnection.detailedBufferStats)
{
if (bufferStatsFrame == null)
{
bufferStatsFrame = new BufferStatsFrame();
// If we don't rethread this I've seen it deadlock the JVM on Linux
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
bufferStatsFrame.pack();
}
});
}
}
pushMode = true;
if (MiniMPlayerPlugin.USE_STDIN)
{
playa = new MiniMPlayerPlugin(myConn.getGfxCmd(), myConn);
playa.setPushMode(true);
playa.load((byte)0, (byte)0, "", buffFile.toString(), null, true, 0);
pushDataLeftBeforeInit = 0;
}
else
{
try
{
buffFile = java.io.File.createTempFile("stvbuff", ".dat");
buffStream = new java.io.FileOutputStream(buffFile);
//buffRaf = new java.io.RandomAccessFile(buffFile, "rw");
buffFile.deleteOnExit();
}
catch (java.io.IOException e)
{
System.out.println("Error with streaming: " + e);
e.printStackTrace();
return 0;
}
if (urlString.indexOf("audio") != -1 && urlString.indexOf("bf=vid") == -1)
{
pushDataLeftBeforeInit = 1024*16;
maxPrebufferSize = DESIRED_AUDIO_PREBUFFER_SIZE;
}
else
{
pushDataLeftBeforeInit = 1024*64;
maxPrebufferSize = DESIRED_VIDEO_PREBUFFER_SIZE;
}
playa = new MiniMPlayerPlugin(myConn.getGfxCmd(), myConn);
playa.setPushMode(true);
// playa.load((byte)0, (byte)0, "", buffFile, null, true, 0);
}
}
writeInt(1, retbuf, 0);
return 4;
case MEDIACMD_GETMEDIATIME:
if (playa == null)
return 0;
long theTime = playa.getMediaTimeMillis();
writeInt((int)theTime, retbuf, 0);
if (MiniClientConnection.detailedBufferStats)
{
if (playa != null)
{
retbuf[4] = (byte)(playa.getState() & 0xFF);
}
else
{
retbuf[4] = 0;
}
return 5;
}
else
return 4;
case MEDIACMD_SETMUTE:
writeInt(1, retbuf, 0);
if (playa == null)
return 4;
playa.setMute(readInt(0, cmddata) != 0);
return 4;
case MEDIACMD_STOP:
writeInt(1, retbuf, 0);
if (playa == null)
return 4;
playa.stop();
return 4;
case MEDIACMD_PAUSE:
writeInt(1, retbuf, 0);
if (playa == null)
return 4;
playa.pause();
return 4;
case MEDIACMD_PLAY:
writeInt(1, retbuf, 0);
if (playa == null)
return 4;
playa.play();
return 4;
case MEDIACMD_FLUSH:
writeInt(1, retbuf, 0);
// TODO
if (playa != null && pushMode && numPushedBuffers > 0)
{
numPushedBuffers = 0;
// Be sure all data is written to disk that we've gotten already.
try
{
if (buffStream != null)
buffStream.getFD().sync();
}catch (Exception e){}
//playa.beginFlush();
// try
{
//buffStream.close();
//buffStream = new java.io.FileOutputStream(buffFile);
//buffRaf.seek(0);
// buffFile = java.io.File.createTempFile("stvbuff", ".dat");
//buffStream = new java.io.FileOutputStream(buffFile);
// buffRaf = new java.io.RandomAccessFile(buffFile, "rw");
// buffFile.deleteOnExit();
playa.seek(Long.MAX_VALUE);
}
// catch (java.io.IOException e)
{
// System.out.println("Error zeroing out buffered stream from disk:" + e);
}
//playa.seek(0);
//playa.play();
}
return 4;
case MEDIACMD_PUSHBUFFER:
int buffSize = readInt(0, cmddata);
int flags = readInt(4, cmddata);
int bufDataOffset = 8;
if (MiniClientConnection.detailedBufferStats && buffSize > 0 && len > buffSize + 13)
{
bufDataOffset += 10;
statsChannelBWKbps = readShort(8, cmddata);
statsStreamBWKbps = readShort(10, cmddata);
statsTargetBWKbps = readShort(12, cmddata);
serverMuxTime = readInt(14, cmddata);
if (playa != null)
{
prebufferTime = serverMuxTime - playa.getMediaTimeMillis();
}
System.out.println("STATS chanBW=" + statsChannelBWKbps + " streamBW=" + statsStreamBWKbps + " targetBW=" + statsTargetBWKbps + " pretime=" + prebufferTime);
if (bufferStatsFrame != null)
{
bufferStatsFrame.addNewStats(statsChannelBWKbps, statsStreamBWKbps, statsTargetBWKbps, prebufferTime);
myConn.getGfxCmd().getWindow().updateStats();
}
}
if (buffSize > 0)
{
numPushedBuffers++;
try
{
if (MiniMPlayerPlugin.USE_STDIN)
playa.pushData(cmddata, bufDataOffset, buffSize);
else if (buffStream != null)
{
buffStream.write(cmddata, bufDataOffset, buffSize);
buffStream.flush();
// DISABLE THIS buffStream.getFD().sync();
bufferFilePushedBytes += buffSize;
//buffRaf.write(cmddata, 8, buffSize);
}
}
catch (java.io.IOException e)
{
System.out.println("IO Error:" + e);
}
}
if (!MiniMPlayerPlugin.USE_STDIN && pushDataLeftBeforeInit > 0)
{
pushDataLeftBeforeInit -= buffSize;
if (pushDataLeftBeforeInit <= 0)
{
//playa = new MiniMPlayerPlugin(myConn.getGfxCmd(), myConn);
//playa.setPushMode(true);
playa.load((byte)0, (byte)0, "", buffFile.toString(), null, true, 0);
}
}
if (flags == 0x80 && playa != null)
{
playa.inactiveFile();
}
int rv;
// Always indicate we have at least 512K of buffer...there's NO reason to stop buffering additional
// data since as playback goes on we keep writing to the filesystem anyways. Yeah, we could recover some bandwidth
// but that's not how any online video players work and we shouldn't be any different than that.
if (playa == null)
rv = maxPrebufferSize;
else
rv = (int)Math.max(131072*4, maxPrebufferSize - (bufferFilePushedBytes - playa.getLastFileReadPos()));
System.out.println("Finished pushing current data buffer of " + buffSize + " availSize=" + rv + " totalPushed=" + bufferFilePushedBytes +
" fileSize=" + (buffFile == null ? 0 : buffFile.length()));
writeInt(rv, retbuf, 0);
if (MiniClientConnection.detailedBufferStats)
{
if (playa != null)
{
writeInt((int)playa.getMediaTimeMillis(), retbuf, 4);
retbuf[8] = (byte)(playa.getState() & 0xFF);
}
else
{
writeInt(0, retbuf, 4);
retbuf[8] = 0;
}
if (flags == 0x80 && (playa == null || pushDataLeftBeforeInit > 0))
{
retbuf[8] = (byte)(MiniMPlayerPlugin.EOS_STATE & 0xFF);
}
return 9;
}
else
return 4;
case MEDIACMD_GETVOLUME:
if (playa == null)
writeInt(65535, retbuf, 0);
else
writeInt(Math.round(playa.getVolume() * 65535), retbuf, 0);
return 4;
case MEDIACMD_SETVOLUME:
if (playa == null)
writeInt(65535, retbuf, 0);
else
writeInt(Math.round(playa.setVolume(readInt(0, cmddata) / 65535.0f) * 65535), retbuf, 0);
return 4;
case MEDIACMD_SETVIDEORECT:
java.awt.Rectangle srcRect = new java.awt.Rectangle(readInt(0, cmddata), readInt(4, cmddata),
readInt(8, cmddata), readInt(12, cmddata));
java.awt.Rectangle destRect = new java.awt.Rectangle(readInt(16, cmddata), readInt(20, cmddata),
readInt(24, cmddata), readInt(28, cmddata));
if (playa != null)
playa.setVideoRectangles(srcRect, destRect, false);
myConn.getGfxCmd().setVideoBounds(srcRect, destRect);
writeInt(0, retbuf, 0);
return 4;
case MEDIACMD_GETVIDEORECT:
java.awt.Dimension vidRect = null;
if (playa != null)
{
vidRect = playa.getVideoDimensions();
writeShort((short)vidRect.width, retbuf, 0);
writeShort((short)vidRect.height, retbuf, 2);
}
else
{
writeInt(0, retbuf, 0);
}
return 4;
case MEDIACMD_SEEK:
long seekTime = ((long)readInt(0, cmddata) << 32) | readInt(4, cmddata);
if (playa != null)
playa.seek(seekTime);
return 0;
default:
return -1;
}
}
}
| |
/**
*
*/
package com.aol.webservice_base.validator;
import java.util.ArrayList;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import com.aol.webservice_base.state.Constants;
import com.aol.webservice_base.state.RequestState;
import com.aol.webservice_base.support.MockHttpServletRequest;
import com.aol.webservice_base.validator.parameter.ParameterValidator;
import com.aol.webservice_base.validator.parameter.types.AbstractParameterType;
import com.aol.webservice_base.validator.parameter.types.ParameterTypeInteger;
import com.aol.webservice_base.validator.support.MockServletRequestFacade;
/**
* @author human
*
*/
public class ParameterValidatorTest {
MockHttpServletRequest fakeReq;
protected ParameterValidator validator;
ParameterTypeInteger validNoReq1;
ParameterTypeInteger validNoReq2;
ParameterTypeInteger validReq1;
ParameterTypeInteger validReq2;
@Before
public void init() {
fakeReq = new MockHttpServletRequest();
fakeReq.setAttribute(Constants.REQUEST_STATE, new RequestState(null));
validator = new ParameterValidator();
validReq1 = new ParameterTypeInteger();
validReq1.setName("one");
validReq1.setRequired(true);
validReq2 = new ParameterTypeInteger();
validReq2.setName("two");
validReq2.setRequired(true);
validNoReq1 = new ParameterTypeInteger();
validNoReq1.setName("one");
validNoReq1.setRequired(false);
validNoReq2 = new ParameterTypeInteger();
validNoReq2.setName("two");
validNoReq2.setRequired(false);
}
@Test
public void findValueFirst() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validNoReq1);
childValidators.add(validNoReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_OK, state.getStatusCode());
}
@Test
public void findValueFirstInvalid() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
validNoReq1.setMin(2);
childValidators.add(validNoReq1);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_INVALID_PARAMETER, state.getStatusCode());
}
@Test
public void findValueFirstInvalidOverride() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
validNoReq1.setMin(2);
validNoReq1.setInvalidErrorCodeOverride(99);
childValidators.add(validNoReq1);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(99, state.getStatusCode());
}
@Test
public void findValueSecond() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validNoReq1);
childValidators.add(validNoReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("two", "2");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_OK, state.getStatusCode());
}
@Test
public void findValueBoth() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validNoReq1);
childValidators.add(validNoReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
fakeReq.addParameter("two", "2");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_OK, state.getStatusCode());
}
@Test
public void findValueFirstReqFail() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validReq1);
childValidators.add(validReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertFalse(state.getStatusCode() == 200);
}
@Test
public void findValueSecondReqFail() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validReq1);
childValidators.add(validReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("two", "2");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertFalse(state.getStatusCode() == 200);
}
@Test
public void findValueBothReq() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validReq1);
childValidators.add(validReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
fakeReq.addParameter("two", "2");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_OK, state.getStatusCode());
}
@Test
public void unexpectedParameterAlone() {
fakeReq.addParameter("notExpected", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_INVALID_PARAMETER, state.getStatusCode());
}
@Test
public void findValueFirstUnexpectedAlso() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
childValidators.add(validNoReq1);
childValidators.add(validNoReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
fakeReq.addParameter("notExpected", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_INVALID_PARAMETER, state.getStatusCode());
}
@Test
public void bothRequiredOnePrecludesOther() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
validNoReq1.setPrecludes("two");
childValidators.add(validNoReq1);
childValidators.add(validNoReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_OK, state.getStatusCode());
}
@Test
public void bothRequiredOnePrecludesOtherBothExist() {
ArrayList<AbstractParameterType> childValidators = new ArrayList<AbstractParameterType>();
validNoReq1.setPrecludes("two");
childValidators.add(validNoReq1);
childValidators.add(validNoReq2);
validator.setValidators(childValidators);
fakeReq.addParameter("one", "1");
fakeReq.addParameter("two", "2");
validator.validateRequest(new MockServletRequestFacade(fakeReq));
RequestState state = (RequestState)fakeReq.getAttribute(Constants.REQUEST_STATE);
Assert.assertEquals(Constants.SC_INVALID_PARAMETER, state.getStatusCode());
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.