gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.threeten.extra;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.time.DateTimeException;
import java.time.Duration;
import java.time.LocalDate;
import java.time.Period;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoUnit;
import java.time.temporal.IsoFields;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAmount;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import com.tngtech.junit.dataprovider.DataProvider;
import com.tngtech.junit.dataprovider.UseDataProvider;
/**
* Test class.
*/
public class TestYears {
//-----------------------------------------------------------------------
@Test
public void test_isSerializable() {
assertTrue(Serializable.class.isAssignableFrom(Years.class));
}
//-----------------------------------------------------------------------
@Test
public void test_deserializationSingleton() throws Exception {
Years test = Years.ZERO;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeObject(test);
}
try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()))) {
assertSame(test, ois.readObject());
}
}
//-----------------------------------------------------------------------
@Test
public void test_ZERO() {
assertSame(Years.of(0), Years.ZERO);
assertEquals(Years.of(0), Years.ZERO);
assertEquals(0, Years.ZERO.getAmount());
assertFalse(Years.ZERO.isNegative());
assertTrue(Years.ZERO.isZero());
assertFalse(Years.ZERO.isPositive());
}
@Test
public void test_ONE() {
assertSame(Years.of(1), Years.ONE);
assertEquals(Years.of(1), Years.ONE);
assertEquals(1, Years.ONE.getAmount());
assertFalse(Years.ONE.isNegative());
assertFalse(Years.ONE.isZero());
assertTrue(Years.ONE.isPositive());
}
//-----------------------------------------------------------------------
@Test
public void test_of() {
assertEquals(1, Years.of(1).getAmount());
assertEquals(2, Years.of(2).getAmount());
assertEquals(Integer.MAX_VALUE, Years.of(Integer.MAX_VALUE).getAmount());
assertEquals(-1, Years.of(-1).getAmount());
assertEquals(-2, Years.of(-2).getAmount());
assertEquals(Integer.MIN_VALUE, Years.of(Integer.MIN_VALUE).getAmount());
}
@Test
public void test_ofMinusOne() {
assertEquals(-1, Years.of(-1).getAmount());
assertTrue(Years.of(-1).isNegative());
assertFalse(Years.of(-1).isZero());
assertFalse(Years.of(-1).isPositive());
}
//-----------------------------------------------------------------------
@Test
public void test_from_P0Y() {
assertEquals(Years.of(0), Years.from(Period.ofYears(0)));
}
@Test
public void test_from_P2Y() {
assertEquals(Years.of(2), Years.from(Period.ofYears(2)));
}
@Test
public void test_from_P24M() {
assertEquals(Years.of(2), Years.from(Period.ofMonths(24)));
}
@Test
public void test_from_yearsAndMonths() {
assertEquals(Years.of(5), Years.from(Period.of(3, 24, 0)));
}
@Test
public void test_from_decadesAndMonths() {
assertEquals(Years.of(19), Years.from(new MockDecadesMonths(2, -12)));
}
@Test
public void test_from_wrongUnit_remainder() {
assertThrows(DateTimeException.class, () -> Years.from(Period.ofMonths(3)));
}
@Test
public void test_from_wrongUnit_noConversion() {
assertThrows(DateTimeException.class, () -> Years.from(Period.ofDays(2)));
}
@Test
public void test_from_null() {
assertThrows(NullPointerException.class, () -> Years.from((TemporalAmount) null));
}
//-----------------------------------------------------------------------
@Test
public void test_parse_CharSequence() {
assertEquals(Years.of(0), Years.parse("P0Y"));
assertEquals(Years.of(1), Years.parse("P1Y"));
assertEquals(Years.of(2), Years.parse("P2Y"));
assertEquals(Years.of(123456789), Years.parse("P123456789Y"));
assertEquals(Years.of(-2), Years.parse("P-2Y"));
assertEquals(Years.of(-2), Years.parse("-P2Y"));
assertEquals(Years.of(2), Years.parse("-P-2Y"));
}
@DataProvider
public static Object[][] data_invalid() {
return new Object[][] {
{"P3M"},
{"P3W"},
{"P3D"},
{"3"},
{"-3"},
{"3Y"},
{"-3Y"},
{"P3"},
{"P-3"},
{"PY"},
};
}
@ParameterizedTest
@UseDataProvider("data_invalid")
public void test_parse_CharSequence_invalid(String str) {
assertThrows(DateTimeParseException.class, () -> Years.parse(str));
}
@Test
public void test_parse_CharSequence_null() {
assertThrows(NullPointerException.class, () -> Years.parse((CharSequence) null));
}
//-----------------------------------------------------------------------
@Test
public void test_between() {
assertEquals(Years.of(2), Years.between(LocalDate.of(2019, 1, 1), LocalDate.of(2021, 1, 1)));
}
@Test
public void test_between_date_null() {
assertThrows(NullPointerException.class, () -> Years.between(LocalDate.now(), (Temporal) null));
}
@Test
public void test_between_null_date() {
assertThrows(NullPointerException.class, () -> Years.between((Temporal) null, LocalDate.now()));
}
//-----------------------------------------------------------------------
@Test
public void test_get() {
assertEquals(6, Years.of(6).get(ChronoUnit.YEARS));
}
@Test
public void test_get_invalidType() {
assertThrows(DateTimeException.class, () -> Years.of(6).get(IsoFields.QUARTER_YEARS));
}
//-----------------------------------------------------------------------
@Test
public void test_plus_TemporalAmount_Years() {
Years test5 = Years.of(5);
assertEquals(Years.of(5), test5.plus(Years.of(0)));
assertEquals(Years.of(7), test5.plus(Years.of(2)));
assertEquals(Years.of(3), test5.plus(Years.of(-2)));
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE - 1).plus(Years.of(1)));
assertEquals(Years.of(Integer.MIN_VALUE), Years.of(Integer.MIN_VALUE + 1).plus(Years.of(-1)));
}
@Test
public void test_plus_TemporalAmount_Period() {
Years test5 = Years.of(5);
assertEquals(Years.of(5), test5.plus(Period.ofYears(0)));
assertEquals(Years.of(7), test5.plus(Period.ofYears(2)));
assertEquals(Years.of(3), test5.plus(Period.ofYears(-2)));
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE - 1).plus(Period.ofYears(1)));
assertEquals(Years.of(Integer.MIN_VALUE), Years.of(Integer.MIN_VALUE + 1).plus(Period.ofYears(-1)));
}
@Test
public void test_plus_TemporalAmount_PeriodMonths() {
assertThrows(DateTimeException.class, () -> Years.of(1).plus(Period.ofMonths(2)));
}
@Test
public void test_plus_TemporalAmount_Duration() {
assertThrows(DateTimeException.class, () -> Years.of(1).plus(Duration.ofHours(2)));
}
@Test
public void test_plus_TemporalAmount_overflowTooBig() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MAX_VALUE - 1).plus(Years.of(2)));
}
@Test
public void test_plus_TemporalAmount_overflowTooSmall() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE + 1).plus(Years.of(-2)));
}
@Test
public void test_plus_TemporalAmount_null() {
assertThrows(NullPointerException.class, () -> Years.of(Integer.MIN_VALUE + 1).plus(null));
}
//-----------------------------------------------------------------------
@Test
public void test_plus_int() {
Years test5 = Years.of(5);
assertEquals(Years.of(5), test5.plus(0));
assertEquals(Years.of(7), test5.plus(2));
assertEquals(Years.of(3), test5.plus(-2));
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE - 1).plus(1));
assertEquals(Years.of(Integer.MIN_VALUE), Years.of(Integer.MIN_VALUE + 1).plus(-1));
}
@Test
public void test_plus_int_overflowTooBig() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MAX_VALUE - 1).plus(2));
}
@Test
public void test_plus_int_overflowTooSmall() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE + 1).plus(-2));
}
//-----------------------------------------------------------------------
@Test
public void test_minus_TemporalAmount_Years() {
Years test5 = Years.of(5);
assertEquals(Years.of(5), test5.minus(Years.of(0)));
assertEquals(Years.of(3), test5.minus(Years.of(2)));
assertEquals(Years.of(7), test5.minus(Years.of(-2)));
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE - 1).minus(Years.of(-1)));
assertEquals(Years.of(Integer.MIN_VALUE), Years.of(Integer.MIN_VALUE + 1).minus(Years.of(1)));
}
@Test
public void test_minus_TemporalAmount_Period() {
Years test5 = Years.of(5);
assertEquals(Years.of(5), test5.minus(Period.ofYears(0)));
assertEquals(Years.of(3), test5.minus(Period.ofYears(2)));
assertEquals(Years.of(7), test5.minus(Period.ofYears(-2)));
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE - 1).minus(Period.ofYears(-1)));
assertEquals(Years.of(Integer.MIN_VALUE), Years.of(Integer.MIN_VALUE + 1).minus(Period.ofYears(1)));
}
@Test
public void test_minus_TemporalAmount_PeriodMonths() {
assertThrows(DateTimeException.class, () -> Years.of(1).minus(Period.ofMonths(2)));
}
@Test
public void test_minus_TemporalAmount_Duration() {
assertThrows(DateTimeException.class, () -> Years.of(1).minus(Duration.ofHours(2)));
}
@Test
public void test_minus_TemporalAmount_overflowTooBig() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MAX_VALUE - 1).minus(Years.of(-2)));
}
@Test
public void test_minus_TemporalAmount_overflowTooSmall() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE + 1).minus(Years.of(2)));
}
@Test
public void test_minus_TemporalAmount_null() {
assertThrows(NullPointerException.class, () -> Years.of(Integer.MIN_VALUE + 1).minus(null));
}
//-----------------------------------------------------------------------
@Test
public void test_minus_int() {
Years test5 = Years.of(5);
assertEquals(Years.of(5), test5.minus(0));
assertEquals(Years.of(3), test5.minus(2));
assertEquals(Years.of(7), test5.minus(-2));
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE - 1).minus(-1));
assertEquals(Years.of(Integer.MIN_VALUE), Years.of(Integer.MIN_VALUE + 1).minus(1));
}
@Test
public void test_minus_int_overflowTooBig() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MAX_VALUE - 1).minus(-2));
}
@Test
public void test_minus_int_overflowTooSmall() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE + 1).minus(2));
}
//-----------------------------------------------------------------------
@Test
public void test_multipliedBy() {
Years test5 = Years.of(5);
assertEquals(Years.of(0), test5.multipliedBy(0));
assertEquals(Years.of(5), test5.multipliedBy(1));
assertEquals(Years.of(10), test5.multipliedBy(2));
assertEquals(Years.of(15), test5.multipliedBy(3));
assertEquals(Years.of(-15), test5.multipliedBy(-3));
}
@Test
public void test_multipliedBy_negate() {
Years test5 = Years.of(5);
assertEquals(Years.of(-15), test5.multipliedBy(-3));
}
@Test
public void test_multipliedBy_overflowTooBig() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MAX_VALUE / 2 + 1).multipliedBy(2));
}
@Test
public void test_multipliedBy_overflowTooSmall() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE / 2 - 1).multipliedBy(2));
}
//-----------------------------------------------------------------------
@Test
public void test_dividedBy() {
Years test12 = Years.of(12);
assertEquals(Years.of(12), test12.dividedBy(1));
assertEquals(Years.of(6), test12.dividedBy(2));
assertEquals(Years.of(4), test12.dividedBy(3));
assertEquals(Years.of(3), test12.dividedBy(4));
assertEquals(Years.of(2), test12.dividedBy(5));
assertEquals(Years.of(2), test12.dividedBy(6));
assertEquals(Years.of(-4), test12.dividedBy(-3));
}
@Test
public void test_dividedBy_negate() {
Years test12 = Years.of(12);
assertEquals(Years.of(-4), test12.dividedBy(-3));
}
@Test
public void test_dividedBy_divideByZero() {
assertThrows(ArithmeticException.class, () -> Years.of(1).dividedBy(0));
}
//-----------------------------------------------------------------------
@Test
public void test_negated() {
assertEquals(Years.of(0), Years.of(0).negated());
assertEquals(Years.of(-12), Years.of(12).negated());
assertEquals(Years.of(12), Years.of(-12).negated());
assertEquals(Years.of(-Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE).negated());
}
@Test
public void test_negated_overflow() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE).negated());
}
//-----------------------------------------------------------------------
@Test
public void test_abs() {
assertEquals(Years.of(0), Years.of(0).abs());
assertEquals(Years.of(12), Years.of(12).abs());
assertEquals(Years.of(12), Years.of(-12).abs());
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(Integer.MAX_VALUE).abs());
assertEquals(Years.of(Integer.MAX_VALUE), Years.of(-Integer.MAX_VALUE).abs());
}
@Test
public void test_abs_overflow() {
assertThrows(ArithmeticException.class, () -> Years.of(Integer.MIN_VALUE).abs());
}
//-----------------------------------------------------------------------
@Test
public void test_addTo() {
assertEquals(LocalDate.of(2019, 1, 10), Years.of(0).addTo(LocalDate.of(2019, 1, 10)));
assertEquals(LocalDate.of(2024, 1, 10), Years.of(5).addTo(LocalDate.of(2019, 1, 10)));
}
@Test
public void test_subtractFrom() {
assertEquals(LocalDate.of(2019, 1, 10), Years.of(0).subtractFrom(LocalDate.of(2019, 1, 10)));
assertEquals(LocalDate.of(2014, 1, 10), Years.of(5).subtractFrom(LocalDate.of(2019, 1, 10)));
}
//-----------------------------------------------------------------------
@Test
public void test_toPeriod() {
for (int i = -20; i < 20; i++) {
assertEquals(Period.ofYears(i), Years.of(i).toPeriod());
}
}
//-----------------------------------------------------------------------
@Test
public void test_compareTo() {
Years test5 = Years.of(5);
Years test6 = Years.of(6);
assertEquals(0, test5.compareTo(test5));
assertEquals(-1, test5.compareTo(test6));
assertEquals(1, test6.compareTo(test5));
}
@Test
public void test_compareTo_null() {
Years test5 = Years.of(5);
assertThrows(NullPointerException.class, () -> test5.compareTo(null));
}
//-----------------------------------------------------------------------
@Test
public void test_equals() {
Years test5 = Years.of(5);
Years test6 = Years.of(6);
assertEquals(true, test5.equals(test5));
assertEquals(false, test5.equals(test6));
assertEquals(false, test6.equals(test5));
}
@Test
public void test_equals_null() {
Years test5 = Years.of(5);
assertEquals(false, test5.equals(null));
}
@Test
public void test_equals_otherClass() {
Years test5 = Years.of(5);
Object obj = "";
assertEquals(false, test5.equals(obj));
}
//-----------------------------------------------------------------------
@Test
public void test_hashCode() {
Years test5 = Years.of(5);
Years test6 = Years.of(6);
assertEquals(true, test5.hashCode() == test5.hashCode());
assertEquals(false, test5.hashCode() == test6.hashCode());
}
//-----------------------------------------------------------------------
@Test
public void test_toString() {
Years test5 = Years.of(5);
assertEquals("P5Y", test5.toString());
Years testM1 = Years.of(-1);
assertEquals("P-1Y", testM1.toString());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search.type;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.ReduceSearchPhaseException;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.action.SearchServiceListener;
import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.concurrent.atomic.AtomicInteger;
/**
*
*/
public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeAction {
@Inject
public TransportSearchDfsQueryThenFetchAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController, ActionFilters actionFilters) {
super(settings, threadPool, clusterService, searchService, searchPhaseController, actionFilters);
}
@Override
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
new AsyncAction(searchRequest, listener).start();
}
private class AsyncAction extends BaseAsyncAction<DfsSearchResult> {
final AtomicArray<QuerySearchResult> queryResults;
final AtomicArray<FetchSearchResult> fetchResults;
final AtomicArray<IntArrayList> docIdsToLoad;
private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
queryResults = new AtomicArray<>(firstResults.length());
fetchResults = new AtomicArray<>(firstResults.length());
docIdsToLoad = new AtomicArray<>(firstResults.length());
}
@Override
protected String firstPhaseName() {
return "dfs";
}
@Override
protected void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request, SearchServiceListener<DfsSearchResult> listener) {
searchService.sendExecuteDfs(node, request, listener);
}
@Override
protected void moveToSecondPhase() {
final AggregatedDfs dfs = searchPhaseController.aggregateDfs(firstResults);
final AtomicInteger counter = new AtomicInteger(firstResults.asList().size());
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
DfsSearchResult dfsResult = entry.value;
DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId());
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
}
}
void executeQuery(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, final QuerySearchRequest querySearchRequest, DiscoveryNode node) {
searchService.sendExecuteQuery(node, querySearchRequest, new SearchServiceListener<QuerySearchResult>() {
@Override
public void onResult(QuerySearchResult result) {
result.shardTarget(dfsResult.shardTarget());
queryResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
executeFetchPhase();
}
}
@Override
public void onFailure(Throwable t) {
onQueryFailure(t, querySearchRequest, shardIndex, dfsResult, counter);
}
});
}
void onQueryFailure(Throwable t, QuerySearchRequest querySearchRequest, int shardIndex, DfsSearchResult dfsResult, AtomicInteger counter) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute query phase", t, querySearchRequest.id());
}
this.addShardFailure(shardIndex, dfsResult.shardTarget(), t);
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
if (successfulOps.get() == 0) {
listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", buildShardFailures()));
} else {
executeFetchPhase();
}
}
}
void executeFetchPhase() {
try {
innerExecuteFetchPhase();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException("query", "", e, buildShardFailures()));
}
}
void innerExecuteFetchPhase() throws Exception {
boolean useScroll = !useSlowScroll && request.scroll() != null;
sortedShardList = searchPhaseController.sortDocs(useScroll, queryResults);
searchPhaseController.fillDocIdsToLoad(docIdsToLoad, sortedShardList);
if (docIdsToLoad.asList().isEmpty()) {
finishHim();
return;
}
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(
request, sortedShardList, firstResults.length()
);
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = queryResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult, entry, lastEmittedDocPerShard);
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
}
}
void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, final AtomicInteger counter, final ShardFetchSearchRequest fetchSearchRequest, DiscoveryNode node) {
searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() {
@Override
public void onResult(FetchSearchResult result) {
result.shardTarget(shardTarget);
fetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onFetchFailure(t, fetchSearchRequest, shardIndex, shardTarget, counter);
}
});
}
void onFetchFailure(Throwable t, ShardFetchSearchRequest fetchSearchRequest, int shardIndex, SearchShardTarget shardTarget, AtomicInteger counter) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute fetch phase", t, fetchSearchRequest.id());
}
this.addShardFailure(shardIndex, shardTarget, t);
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
try {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
try {
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
String scrollId = null;
if (request.scroll() != null) {
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, expectedSuccessfulOps, successfulOps.get(), buildTookInMillis(), buildShardFailures()));
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("merge", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
} finally {
releaseIrrelevantSearchContexts(queryResults, docIdsToLoad);
}
}
});
} catch (EsRejectedExecutionException ex) {
try {
releaseIrrelevantSearchContexts(queryResults, docIdsToLoad);
} finally {
listener.onFailure(ex);
}
}
}
}
}
| |
package com.suscipio_solutions.consecro_mud.core;
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CharStats;
import com.suscipio_solutions.consecro_mud.Items.interfaces.RawMaterial;
import com.suscipio_solutions.consecro_mud.Items.interfaces.Wearable;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.AbilityMapper;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.AbilityParameters;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.AreaGenerationLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.AutoTitlesLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMFlagLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMMiscUtils;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CatalogLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ChannelsLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CharCreationLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ClanManager;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ColorLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CombatLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.CommonCommands;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.DatabaseEngine;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.DiceLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.EnglishParsing;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ExpLevelLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ExpertiseLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.FactionManager;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.GenericBuilder;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.GenericEditor;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.HelpLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.I3Interface;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ItemBalanceLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.JournalsLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.LanguageLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.LegalLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ListingLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.MaskingLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.MaterialLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.MoneyLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.PlayerLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.PollManager;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ProtocolLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.QuestManager;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.SMTPLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.SessionsList;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ShoppingLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.SlaveryLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.SocialsList;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.StatisticsLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.TechLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.TelnetFilter;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.TextEncoders;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ThreadEngine;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.TimeManager;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.TrackingLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.WebMacroLibrary;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.WorldMap;
import com.suscipio_solutions.consecro_mud.Libraries.interfaces.XMLLibrary;
import com.suscipio_solutions.consecro_mud.core.CMSecurity.DbgFlag;
import com.suscipio_solutions.consecro_mud.core.collections.SVector;
import com.suscipio_solutions.consecro_mud.core.interfaces.MudHost;
import com.suscipio_solutions.consecro_mud.core.threads.CMFactoryThread;
public class CMLib
{
private static final SVector<MudHost> mudThreads=new SVector<MudHost>();
private static final CMLib[] libs=new CMLib[256];
/**
* Constructs a new CMLib object for the current thread group.
*/
public CMLib()
{
super();
final char c=Thread.currentThread().getThreadGroup().getName().charAt(0);
if(libs[c]==null) libs[c]=this;
}
/**
* Returns the log object for the current threadgroup, or null if unassigned.
* @return the Log object, or null
*/
private static final CMLib l()
{
return libs[Thread.currentThread().getThreadGroup().getName().charAt(0)];
}
/**
* Returns a CMLib object for the current thread group. If one is not assigned,
* it will be instantiated, thus guarenteeing that a CMLib object always returns
* from this method.
* @return a CMLib object
*/
public static final CMLib initialize()
{
final CMLib l=l();
return (l==null)?new CMLib():l;
}
private final CMLibrary[] libraries=new CMLibrary[Library.values().length];
private final boolean[] registered=new boolean[Library.values().length];
/**
* Collection of all the different official CoffeeMud libraries
* @author BZ
*/
public static enum Library
{
DATABASE(DatabaseEngine.class),
THREADS(ThreadEngine.class),
INTERMUD(I3Interface.class),
WEBMACS(WebMacroLibrary.class),
LISTER(ListingLibrary.class),
MONEY(MoneyLibrary.class),
SHOPS(ShoppingLibrary.class),
COMBAT(CombatLibrary.class),
HELP(HelpLibrary.class),
TRACKING(TrackingLibrary.class),
MASKING(MaskingLibrary.class),
CHANNELS(ChannelsLibrary.class),
COMMANDS(CommonCommands.class),
ENGLISH(EnglishParsing.class),
SLAVERY(SlaveryLibrary.class),
JOURNALS(JournalsLibrary.class),
FLAGS(CMFlagLibrary.class),
OBJBUILDERS(GenericBuilder.class),
SESSIONS(SessionsList.class),
TELNET(TelnetFilter.class),
XML(XMLLibrary.class),
SOCIALS(SocialsList.class),
UTENSILS(CMMiscUtils.class),
STATS(StatisticsLibrary.class),
MAP(WorldMap.class),
QUEST(QuestManager.class),
ABLEMAP(AbilityMapper.class),
ENCODER(TextEncoders.class),
SMTP(SMTPLibrary.class),
DICE(DiceLibrary.class),
FACTIONS(FactionManager.class),
CLANS(ClanManager.class),
POLLS(PollManager.class),
TIME(TimeManager.class),
COLOR(ColorLibrary.class),
LOGIN(CharCreationLibrary.class),
TIMS(ItemBalanceLibrary.class),
LEVELS(ExpLevelLibrary.class),
EXPERTISES(ExpertiseLibrary.class),
MATERIALS(MaterialLibrary.class),
LEGAL(LegalLibrary.class),
LANGUAGE(LanguageLibrary.class),
CATALOG(CatalogLibrary.class),
PLAYERS(PlayerLibrary.class),
TITLES(AutoTitlesLibrary.class),
ABLEPARMS(AbilityParameters.class),
GENEDITOR(GenericEditor.class),
AREAGEN(AreaGenerationLibrary.class),
TECH(TechLibrary.class),
PROTOCOL(ProtocolLibrary.class)
;
public final Class<?> ancestor;
private Library(Class<?> ancestorC1)
{
this.ancestor=ancestorC1;
}
}
/**
* Returns reference to the math utility class.
* @see com.suscipio_solutions.consecro_mud.core.CMath
* @return reference to the math utility class.
*/
public static final CMath math()
{
return CMath.instance();
}
/**
* Returns reference to the string parameter utility class.
* @see com.suscipio_solutions.consecro_mud.core.CMParms
* @return reference to the string parameter utility class.
*/
public static final CMParms parms()
{
return CMParms.instance();
}
/**
* Returns reference to the string utility class.
* @see com.suscipio_solutions.consecro_mud.core.CMStrings
* @return reference to the string utility class.
*/
public static final CMStrings strings()
{
return CMStrings.instance();
}
/**
* Returns reference to the class loader.
* @see com.suscipio_solutions.consecro_mud.core.CMClass
* @return reference to the class loader.
*/
public static final CMClass classes()
{
return CMClass.instance();
}
/**
* Returns reference to the security class.
* @see com.suscipio_solutions.consecro_mud.core.CMSecurity
* @return reference to the security class.
*/
public static final CMSecurity security()
{
return CMSecurity.instance();
}
/**
* Returns reference to the directions class.
* @see com.suscipio_solutions.consecro_mud.core.Directions
* @return reference to the directions class.
*/
public static final Directions directions()
{
return Directions.instance();
}
/**
* Returns reference to the logger.
* @see com.suscipio_solutions.consecro_mud.core.Log
* @return reference to the logger.
*/
public static final Log log()
{
return Log.instance();
}
/**
* Returns reference to the resources storage class.
* @see com.suscipio_solutions.consecro_mud.core.Resources
* @return reference to the resources storage class.
*/
public static final Resources resources()
{
return Resources.instance();
}
/**
* Returns reference to the properties ini file class.
* @see com.suscipio_solutions.consecro_mud.core.CMProps
* @return reference to the properties ini file class.
*/
public static final CMProps props()
{
return CMProps.instance();
}
/**
* Returns a list of all the registered mud hosts running.
* @see com.suscipio_solutions.consecro_mud.application.MUD
* @return list of the registered mud hosts running.
*/
public static final List<MudHost> hosts()
{
return mudThreads;
}
/**
* Returns the mud running on the given port, or null
* if none is found.
* @see com.suscipio_solutions.consecro_mud.core.interfaces.MudHost
* @param port port to search for
* @return the mudhost running on that port
*/
public static final MudHost mud(int port)
{
if(mudThreads.size()==0)
return null;
else
if(port<=0)
return mudThreads.firstElement();
else
for(int i=0;i<mudThreads.size();i++)
if(mudThreads.elementAt(i).getPort()==port)
return mudThreads.elementAt(i);
return null;
}
/**
* Returns an enumeration of basic code libraries registered
* with the system thus far.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMLibrary
* @return an enumeration of basic code libraries registered
*/
public static final Enumeration<CMLibrary> libraries()
{
final Vector<CMLibrary> V=new Vector<CMLibrary>();
for(final Library lbry : Library.values())
if(l().libraries[lbry.ordinal()]!=null)
V.add(l().libraries[lbry.ordinal()]);
return V.elements();
}
/**
* Returns a reference to this threads database engine library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.DatabaseEngine
* @return a reference to this threads database engine library.
*/
public static final DatabaseEngine database()
{
return (DatabaseEngine)l().libraries[Library.DATABASE.ordinal()];
}
/**
* Returns a reference to this threads Thread access library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ThreadEngine
* @return a reference to this threads Thread access library.
*/
public static final ThreadEngine threads()
{
return (ThreadEngine)l().libraries[Library.THREADS.ordinal()];
}
/**
* Returns a reference to this threads Intermud3 access library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.I3Interface
* @return a reference to this threads Intermud3 access library.
*/
public static final I3Interface intermud()
{
return (I3Interface)l().libraries[Library.INTERMUD.ordinal()];
}
/**
* Returns a reference to this threads item balancing library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ItemBalanceLibrary
* @return a reference to this threads item balancing library.
*/
public static final ItemBalanceLibrary itemBuilder()
{
return (ItemBalanceLibrary)l().libraries[Library.TIMS.ordinal()];
}
/**
* Returns a reference to this threads web macro filtering library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.WebMacroLibrary
* @return a reference to this threads web macro filtering library.
*/
public static final WebMacroLibrary webMacroFilter()
{
return (WebMacroLibrary)l().libraries[Library.WEBMACS.ordinal()];
}
/**
* Returns a reference to this threads string/item/object listing library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ListingLibrary
* @return a reference to this threads string/item/object listing library.
*/
public static final ListingLibrary lister()
{
return (ListingLibrary)l().libraries[Library.LISTER.ordinal()];
}
/**
* Returns a reference to this threads money handling library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.MoneyLibrary
* @return a reference to this threads money handling library.
*/
public static final MoneyLibrary beanCounter()
{
return (MoneyLibrary)l().libraries[Library.MONEY.ordinal()];
}
/**
* Returns a reference to this threads store front/shopping library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ShoppingLibrary
* @return a reference to this threads store front/shopping library.
*/
public static final ShoppingLibrary coffeeShops()
{
return (ShoppingLibrary)l().libraries[Library.SHOPS.ordinal()];
}
/**
* Returns a reference to this threads raw resource/material item library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.MaterialLibrary
* @return a reference to this threads raw resource/material item library.
*/
public static final MaterialLibrary materials()
{
return (MaterialLibrary)l().libraries[Library.MATERIALS.ordinal()];
}
/**
* Returns a reference to this threads combat library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CombatLibrary
* @return a reference to this threads combat library.
*/
public static final CombatLibrary combat()
{
return (CombatLibrary)l().libraries[Library.COMBAT.ordinal()];
}
/**
* Returns a reference to this threads help file library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.HelpLibrary
* @return a reference to this threads help file library.
*/
public static final HelpLibrary help()
{
return (HelpLibrary)l().libraries[Library.HELP.ordinal()];
}
/**
* Returns a reference to this threads mob tracking/movement library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.TrackingLibrary
* @return a reference to this threads mob tracking/movement library.
*/
public static final TrackingLibrary tracking()
{
return (TrackingLibrary)l().libraries[Library.TRACKING.ordinal()];
}
/**
* Returns a reference to this threads legal and property library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.LegalLibrary
* @return a reference to this threads legal and property library.
*/
public static final LegalLibrary law()
{
return (LegalLibrary)l().libraries[Library.LEGAL.ordinal()];
}
/**
* Returns a reference to this threads object masking/filtering library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.MaskingLibrary
* @return a reference to this threads object masking/filtering library.
*/
public static final MaskingLibrary masking()
{
return (MaskingLibrary)l().libraries[Library.MASKING.ordinal()];
}
/**
* Returns a reference to this threads chat channel library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ChannelsLibrary
* @return a reference to this threads chat channel library.
*/
public static final ChannelsLibrary channels()
{
return (ChannelsLibrary)l().libraries[Library.CHANNELS.ordinal()];
}
/**
* Returns a reference to this threads command shortcut and common event handler library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CommonCommands
* @return a reference to this threads command shortcut and common event handler library.
*/
public static final CommonCommands commands()
{
return (CommonCommands)l().libraries[Library.COMMANDS.ordinal()];
}
/**
* Returns a reference to this threads english grammar and input utility library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.EnglishParsing
* @return a reference to this threads english grammar and input utility library.
*/
public static final EnglishParsing english()
{
return (EnglishParsing)l().libraries[Library.ENGLISH.ordinal()];
}
/**
* Returns a reference to this threads slavery and geas library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.SlaveryLibrary
* @return a reference to this threads slavery and geas library.
*/
public static final SlaveryLibrary slavery()
{
return (SlaveryLibrary)l().libraries[Library.SLAVERY.ordinal()];
}
/**
* Returns a reference to this threads message board and journal library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.JournalsLibrary
* @return a reference to this threads message board and journal library.
*/
public static final JournalsLibrary journals()
{
return (JournalsLibrary)l().libraries[Library.JOURNALS.ordinal()];
}
/**
* Returns a reference to this threads telnet input/output filtering library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.TelnetFilter
* @return a reference to this threads telnet input/output filtering library.
*/
public static final TelnetFilter coffeeFilter()
{
return (TelnetFilter)l().libraries[Library.TELNET.ordinal()];
}
/**
* Returns a reference to this threads GenObject low level construction library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.GenericBuilder
* @return a reference to this threads GenObject low level construction library.
*/
public static final GenericBuilder coffeeMaker()
{
return (GenericBuilder)l().libraries[Library.OBJBUILDERS.ordinal()];
}
/**
* Returns a reference to this threads telnet session management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.SessionsList
* @return a reference to this threads telnet session management library.
*/
public static final SessionsList sessions()
{
return (SessionsList)l().libraries[Library.SESSIONS.ordinal()];
}
/**
* Returns a reference to this threads flag checking shortcut library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMFlagLibrary
* @return a reference to this threads flag checking shortcut library.
*/
public static final CMFlagLibrary flags()
{
return (CMFlagLibrary)l().libraries[Library.FLAGS.ordinal()];
}
/**
* Returns a reference to this threads xml parsing library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.XMLLibrary
* @return a reference to this threads xml parsing library.
*/
public static final XMLLibrary xml()
{
return (XMLLibrary)l().libraries[Library.XML.ordinal()];
}
/**
* Returns a reference to this threads social command collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.SocialsList
* @return a reference to this threads social command collection/management library.
*/
public static final SocialsList socials()
{
return (SocialsList)l().libraries[Library.SOCIALS.ordinal()];
}
/**
* Returns a reference to this threads random world utilities library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMMiscUtils
* @return a reference to this threads random world utilities library.
*/
public static final CMMiscUtils utensils()
{
return (CMMiscUtils)l().libraries[Library.UTENSILS.ordinal()];
}
/**
* Returns a reference to this threads statistics library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.StatisticsLibrary
* @return a reference to this threads statistics library.
*/
public static final StatisticsLibrary coffeeTables()
{
return (StatisticsLibrary)l().libraries[Library.STATS.ordinal()];
}
/**
* Returns a reference to this threads leveling and experience gaining library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ExpLevelLibrary
* @return a reference to this threads leveling and experience gaining library.
*/
public static final ExpLevelLibrary leveler()
{
return (ExpLevelLibrary)l().libraries[Library.LEVELS.ordinal()];
}
/**
* Returns a reference to this threads areas and rooms access/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.WorldMap
* @return a reference to this threads areas and rooms access/management library.
*/
public static final WorldMap map()
{
return (WorldMap)l().libraries[Library.MAP.ordinal()];
}
/**
* Returns a reference to this threads quest collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.QuestManager
* @return a reference to this threads quest collection/management library.
*/
public static final QuestManager quests()
{
return (QuestManager)l().libraries[Library.QUEST.ordinal()];
}
/**
* Returns a reference to this threads random map/object generation library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.AreaGenerationLibrary
* @return a reference to this threads random map/object generation library.
*/
public static final AreaGenerationLibrary percolator()
{
return (AreaGenerationLibrary)l().libraries[Library.AREAGEN.ordinal()];
}
/**
* Returns a reference to this threads abilities collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.AbilityMapper
* @return a reference to this threads abilities collection/management library.
*/
public static final AbilityMapper ableMapper()
{
return (AbilityMapper)l().libraries[Library.ABLEMAP.ordinal()];
}
/**
* Returns a reference to this threads string hashing and compression library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.TextEncoders
* @return a reference to this threads string hashing and compression library.
*/
public static final TextEncoders encoder()
{
return (TextEncoders)l().libraries[Library.ENCODER.ordinal()];
}
/**
* Returns a reference to this threads email sending library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.SMTPLibrary
* @return a reference to this threads email sending library.
*/
public static final SMTPLibrary smtp()
{
return (SMTPLibrary)l().libraries[Library.SMTP.ordinal()];
}
/**
* Returns a reference to this threads localization library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.LanguageLibrary
* @return a reference to this threads localization library.
*/
public static final LanguageLibrary lang()
{
return (LanguageLibrary)l().libraries[Library.LANGUAGE.ordinal()];
}
/**
* Returns a reference to this threads random dice roll library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.DiceLibrary
* @return a reference to this threads random dice roll library.
*/
public static final DiceLibrary dice()
{
return (DiceLibrary)l().libraries[Library.DICE.ordinal()];
}
/**
* Returns a reference to this threads faction collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.FactionManager
* @return a reference to this threads faction collection/management library.
*/
public static final FactionManager factions()
{
return (FactionManager)l().libraries[Library.FACTIONS.ordinal()];
}
/**
* Returns a reference to this threads clan collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ClanManager
* @return a reference to this threads clan collection/management library.
*/
public static final ClanManager clans()
{
return (ClanManager)l().libraries[Library.CLANS.ordinal()];
}
/**
* Returns a reference to this threads player poll collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.PollManager
* @return a reference to this threads player poll collection/management library.
*/
public static final PollManager polls()
{
return (PollManager)l().libraries[Library.POLLS.ordinal()];
}
/**
* Returns a reference to this threads real time utility library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.TimeManager
* @return a reference to this threads real time utility library.
*/
public static final TimeManager time()
{
return (TimeManager)l().libraries[Library.TIME.ordinal()];
}
/**
* Returns a reference to this threads ansi color library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ColorLibrary
* @return a reference to this threads ansi color library.
*/
public static final ColorLibrary color()
{
return (ColorLibrary)l().libraries[Library.COLOR.ordinal()];
}
/**
* Returns a reference to this threads login and char creation library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CharCreationLibrary
* @return a reference to this threads login and char creation library.
*/
public static final CharCreationLibrary login()
{
return (CharCreationLibrary)l().libraries[Library.LOGIN.ordinal()];
}
/**
* Returns a reference to this threads expertise collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ExpertiseLibrary
* @return a reference to this threads expertise collection/management library.
*/
public static final ExpertiseLibrary expertises()
{
return (ExpertiseLibrary)l().libraries[Library.EXPERTISES.ordinal()];
}
/**
* Returns a reference to this threads player and account collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.PlayerLibrary
* @return a reference to this threads player and account collection/management library.
*/
public static final PlayerLibrary players()
{
return (PlayerLibrary)l().libraries[Library.PLAYERS.ordinal()];
}
/**
* Returns a reference to this threads cataloged mob/item collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CatalogLibrary
* @return a reference to this threads cataloged mob/item collection/management library.
*/
public static final CatalogLibrary catalog()
{
return (CatalogLibrary)l().libraries[Library.CATALOG.ordinal()];
}
/**
* Returns a reference to this threads player titles collection/management library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.AutoTitlesLibrary
* @return a reference to this threads player titles collection/management library.
*/
public static final AutoTitlesLibrary titles()
{
return (AutoTitlesLibrary)l().libraries[Library.TITLES.ordinal()];
}
/**
* Returns a reference to this threads recipe maker and skill parameter library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.AbilityParameters
* @return a reference to this threads recipe maker and skill parameter library.
*/
public static final AbilityParameters ableParms()
{
return (AbilityParameters)l().libraries[Library.ABLEPARMS.ordinal()];
}
/**
* Returns a reference to this threads generic object builder/editor and prompting library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.GenericEditor
* @return a reference to this threads generic object builder/editor and prompting library.
*/
public static final GenericEditor genEd()
{
return (GenericEditor)l().libraries[Library.GENEDITOR.ordinal()];
}
/**
* Returns a reference to this threads tech and electricity library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.TechLibrary
* @return a reference to this threads tech and electricity library.
*/
public static final TechLibrary tech()
{
return (TechLibrary)l().libraries[Library.TECH.ordinal()];
}
/**
* Returns a reference to this threads mud protocol mxp/msdp/etc library.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.ProtocolLibrary
* @return a reference to this threads mud protocol mxp/msdp/etc library.
*/
public static final ProtocolLibrary protocol()
{
return (ProtocolLibrary)l().libraries[Library.PROTOCOL.ordinal()];
}
/**
* Return the Library Enum entry that represents the ancestor
* of the given library object.
* @see CMLib.Library
* @param O the library object
* @return the Library Enum entry
*/
public static final Library convertToLibraryCode(final Object O)
{
if(O==null)
return null;
for(final Library lbry : Library.values())
if(CMClass.checkAncestry(O.getClass(),lbry.ancestor))
return lbry;
return null;
}
/**
* Register the given library object as belonging to the thread
* group that called this method.
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMLibrary
* @param O the library to register
*/
public static final void registerLibrary(final CMLibrary O)
{
final Library lbry=convertToLibraryCode(O);
if(lbry!=null)
{
final int code=lbry.ordinal();
if(l()==null) CMLib.initialize();
if((!CMProps.isPrivateToMe(lbry.toString())
&&(libs[MudHost.MAIN_HOST]!=l())))
{
if(libs[MudHost.MAIN_HOST].libraries[code]==null)
libs[MudHost.MAIN_HOST].libraries[code]=O;
else
l().libraries[code]=libs[MudHost.MAIN_HOST].libraries[code];
}
else
l().libraries[code]=O;
l().registered[code]=true;
}
}
/**
* Do your best to shut down the given thread, trying for at most sleepTime ms, and
* making as many number attempts as given.
* @param t the thread to kill
* @param sleepTime ms to wait for the thread to die between attempts
* @param attempts the number of attempts to make
*/
public static final void killThread(final Thread t, final long sleepTime, final int attempts)
{
if(t==null) return;
if(t==Thread.currentThread())
throw new java.lang.ThreadDeath();
try
{
boolean stillAlive=false;
if(t instanceof CMFactoryThread)
{
final Runnable r=CMLib.threads().findRunnableByThread(t);
t.interrupt();
for(int i=0;i<sleepTime;i++)
{
Thread.sleep(1);
if(CMLib.threads().findRunnableByThread(t)!=r)
return;
}
stillAlive=(CMLib.threads().findRunnableByThread(t)==r);
}
else
{
t.interrupt();
try{Thread.sleep(sleepTime);}catch(final Exception e){}
int att=0;
while((att++<attempts)&&t.isAlive())
{
try { Thread.sleep(sleepTime); }catch(final Exception e){}
try { t.interrupt(); }catch(final Exception e){}
}
stillAlive=t.isAlive();
}
try
{
if(stillAlive)
{
final java.lang.StackTraceElement[] s=t.getStackTrace();
final StringBuffer dump = new StringBuffer("Unable to kill thread "+t.getName()+". It is still running.\n\r");
for (final StackTraceElement element : s)
dump.append("\n "+element.getClassName()+": "+element.getMethodName()+"("+element.getFileName()+": "+element.getLineNumber()+")");
Log.errOut(dump.toString());
}
}
catch(final java.lang.ThreadDeath td) {}
}
catch(final Throwable th){}
}
/**
* Sleep for the given ms without throwing an exception
* @param millis the ms to sleep
* @return true
*/
public static final boolean s_sleep(final long millis)
{
try
{
Thread.sleep(millis);
}
catch(final java.lang.InterruptedException ex)
{
return false;
}
return true;
}
/**
* Signify to the library library (this), that the ini file has been loaded,
* and that all registered libraries need to be likewise notified.
*/
public static final void propertiesLoaded()
{
final CMLib lib=l();
for(final Library lbry : Library.values())
{
if((!CMProps.isPrivateToMe(lbry.toString())&&(libs[MudHost.MAIN_HOST]!=lib)))
{}
else
if(lib.libraries[lbry.ordinal()]==null)
{}
else
lib.libraries[lbry.ordinal()].propertiesLoaded();
}
CharStats.CODES.reset();
RawMaterial.CODES.reset();
Wearable.CODES.reset();
}
/**
* Signify to the library library (this) that all of the library classes have
* been registered, and that any missing libraries are to share code with the
* thread 0 (base) set.
*/
public static final void activateLibraries()
{
final CMLib lib=l();
for(final Library lbry : Library.values())
{
if((!CMProps.isPrivateToMe(lbry.toString())&&(libs[MudHost.MAIN_HOST]!=lib)))
{
if(CMSecurity.isDebugging(DbgFlag.BOOTSTRAPPER))
Log.debugOut("HOST"+Thread.currentThread().getThreadGroup().getName().charAt(0)+" sharing library "+lbry.toString());
lib.libraries[lbry.ordinal()]=libs[MudHost.MAIN_HOST].libraries[lbry.ordinal()];
}
else
if(lib.libraries[lbry.ordinal()]==null)
Log.errOut("Unable to find library "+lbry.toString());
else
lib.libraries[lbry.ordinal()].activate();
}
}
/**
* Return the library belonging to the given thread group code, and the given
* Library Enum
* @see CMLib.Library
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMLibrary
* @param tcode the thread group code
* @param lcode the Library Enum
* @return the appropriate library belonging to the thread group and code
*/
public final static CMLibrary library(final char tcode, final Library lcode)
{
if(libs[tcode]!=null)
return libs[tcode].libraries[lcode.ordinal()];
return null;
}
/**
* Returns an enumeration of all library objects of the Library Enum type given
* across all thread groups.
* @see CMLib.Library
* @param code the Library Enum
* @return an enumeration of all library objects in all threads of that type
*/
public final static Enumeration<CMLibrary> libraries(final Library code)
{
final Vector<CMLibrary> V=new Vector<CMLibrary>();
for(int l=0;l<libs.length;l++)
if((libs[l]!=null)
&&(libs[l].libraries[code.ordinal()]!=null)
&&(!V.contains(libs[l].libraries[code.ordinal()])))
V.add(libs[l].libraries[code.ordinal()]);
return V.elements();
}
/**
* Calls registerLibrary on all the given CMLibrary objects
* @see com.suscipio_solutions.consecro_mud.Libraries.interfaces.CMLibrary
* @see CMLib#registerLibrary(CMLibrary)
* @param e an enumeration of CMLibrary objects
*/
public static final void registerLibraries(final Enumeration<CMLibrary> e)
{
for(;e.hasMoreElements();)
registerLibrary(e.nextElement());
}
/**
* Returns how many CMLibrary objects have been registered for this
* thread group.
* @return a count of CMLibrary objects registered
*/
public static final int countRegistered()
{
int x=0;
for (final boolean element : l().registered)
if(element) x++;
return x;
}
/**
* Returns a comma-delimited list of the ordinal numbers of those
* libraries which have not been registered for this thread group.
* @return a string list of unregistered library ordinals
*/
public static final String unregistered()
{
final StringBuffer str=new StringBuffer("");
for(int i=0;i<l().registered.length;i++)
if(!l().registered[i]) str.append(""+i+", ");
return str.toString();
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.avs;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.HttpPipelinePosition;
import com.azure.core.http.policy.AddDatePolicy;
import com.azure.core.http.policy.HttpLogOptions;
import com.azure.core.http.policy.HttpLoggingPolicy;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.http.policy.HttpPolicyProviders;
import com.azure.core.http.policy.RequestIdPolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;
import com.azure.core.management.http.policy.ArmChallengeAuthenticationPolicy;
import com.azure.core.management.profile.AzureProfile;
import com.azure.core.util.Configuration;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.avs.fluent.AvsClient;
import com.azure.resourcemanager.avs.implementation.AddonsImpl;
import com.azure.resourcemanager.avs.implementation.AuthorizationsImpl;
import com.azure.resourcemanager.avs.implementation.AvsClientBuilder;
import com.azure.resourcemanager.avs.implementation.CloudLinksImpl;
import com.azure.resourcemanager.avs.implementation.ClustersImpl;
import com.azure.resourcemanager.avs.implementation.DatastoresImpl;
import com.azure.resourcemanager.avs.implementation.GlobalReachConnectionsImpl;
import com.azure.resourcemanager.avs.implementation.HcxEnterpriseSitesImpl;
import com.azure.resourcemanager.avs.implementation.LocationsImpl;
import com.azure.resourcemanager.avs.implementation.OperationsImpl;
import com.azure.resourcemanager.avs.implementation.PlacementPoliciesImpl;
import com.azure.resourcemanager.avs.implementation.PrivateCloudsImpl;
import com.azure.resourcemanager.avs.implementation.ScriptCmdletsImpl;
import com.azure.resourcemanager.avs.implementation.ScriptExecutionsImpl;
import com.azure.resourcemanager.avs.implementation.ScriptPackagesImpl;
import com.azure.resourcemanager.avs.implementation.VirtualMachinesImpl;
import com.azure.resourcemanager.avs.implementation.WorkloadNetworksImpl;
import com.azure.resourcemanager.avs.models.Addons;
import com.azure.resourcemanager.avs.models.Authorizations;
import com.azure.resourcemanager.avs.models.CloudLinks;
import com.azure.resourcemanager.avs.models.Clusters;
import com.azure.resourcemanager.avs.models.Datastores;
import com.azure.resourcemanager.avs.models.GlobalReachConnections;
import com.azure.resourcemanager.avs.models.HcxEnterpriseSites;
import com.azure.resourcemanager.avs.models.Locations;
import com.azure.resourcemanager.avs.models.Operations;
import com.azure.resourcemanager.avs.models.PlacementPolicies;
import com.azure.resourcemanager.avs.models.PrivateClouds;
import com.azure.resourcemanager.avs.models.ScriptCmdlets;
import com.azure.resourcemanager.avs.models.ScriptExecutions;
import com.azure.resourcemanager.avs.models.ScriptPackages;
import com.azure.resourcemanager.avs.models.VirtualMachines;
import com.azure.resourcemanager.avs.models.WorkloadNetworks;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/** Entry point to AvsManager. Azure VMware Solution API. */
public final class AvsManager {
private Operations operations;
private Locations locations;
private PrivateClouds privateClouds;
private Clusters clusters;
private Datastores datastores;
private HcxEnterpriseSites hcxEnterpriseSites;
private Authorizations authorizations;
private GlobalReachConnections globalReachConnections;
private WorkloadNetworks workloadNetworks;
private CloudLinks cloudLinks;
private Addons addons;
private VirtualMachines virtualMachines;
private PlacementPolicies placementPolicies;
private ScriptPackages scriptPackages;
private ScriptCmdlets scriptCmdlets;
private ScriptExecutions scriptExecutions;
private final AvsClient clientObject;
private AvsManager(HttpPipeline httpPipeline, AzureProfile profile, Duration defaultPollInterval) {
Objects.requireNonNull(httpPipeline, "'httpPipeline' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
this.clientObject =
new AvsClientBuilder()
.pipeline(httpPipeline)
.endpoint(profile.getEnvironment().getResourceManagerEndpoint())
.subscriptionId(profile.getSubscriptionId())
.defaultPollInterval(defaultPollInterval)
.buildClient();
}
/**
* Creates an instance of Avs service API entry point.
*
* @param credential the credential to use.
* @param profile the Azure profile for client.
* @return the Avs service API instance.
*/
public static AvsManager authenticate(TokenCredential credential, AzureProfile profile) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
return configure().authenticate(credential, profile);
}
/**
* Gets a Configurable instance that can be used to create AvsManager with optional configuration.
*
* @return the Configurable instance allowing configurations.
*/
public static Configurable configure() {
return new AvsManager.Configurable();
}
/** The Configurable allowing configurations to be set. */
public static final class Configurable {
private final ClientLogger logger = new ClientLogger(Configurable.class);
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private final List<HttpPipelinePolicy> policies = new ArrayList<>();
private final List<String> scopes = new ArrayList<>();
private RetryPolicy retryPolicy;
private Duration defaultPollInterval;
private Configurable() {
}
/**
* Sets the http client.
*
* @param httpClient the HTTP client.
* @return the configurable object itself.
*/
public Configurable withHttpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Sets the logging options to the HTTP pipeline.
*
* @param httpLogOptions the HTTP log options.
* @return the configurable object itself.
*/
public Configurable withLogOptions(HttpLogOptions httpLogOptions) {
this.httpLogOptions = Objects.requireNonNull(httpLogOptions, "'httpLogOptions' cannot be null.");
return this;
}
/**
* Adds the pipeline policy to the HTTP pipeline.
*
* @param policy the HTTP pipeline policy.
* @return the configurable object itself.
*/
public Configurable withPolicy(HttpPipelinePolicy policy) {
this.policies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Adds the scope to permission sets.
*
* @param scope the scope.
* @return the configurable object itself.
*/
public Configurable withScope(String scope) {
this.scopes.add(Objects.requireNonNull(scope, "'scope' cannot be null."));
return this;
}
/**
* Sets the retry policy to the HTTP pipeline.
*
* @param retryPolicy the HTTP pipeline retry policy.
* @return the configurable object itself.
*/
public Configurable withRetryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "'retryPolicy' cannot be null.");
return this;
}
/**
* Sets the default poll interval, used when service does not provide "Retry-After" header.
*
* @param defaultPollInterval the default poll interval.
* @return the configurable object itself.
*/
public Configurable withDefaultPollInterval(Duration defaultPollInterval) {
this.defaultPollInterval = Objects.requireNonNull(defaultPollInterval, "'retryPolicy' cannot be null.");
if (this.defaultPollInterval.isNegative()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'httpPipeline' cannot be negative"));
}
return this;
}
/**
* Creates an instance of Avs service API entry point.
*
* @param credential the credential to use.
* @param profile the Azure profile for client.
* @return the Avs service API instance.
*/
public AvsManager authenticate(TokenCredential credential, AzureProfile profile) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
StringBuilder userAgentBuilder = new StringBuilder();
userAgentBuilder
.append("azsdk-java")
.append("-")
.append("com.azure.resourcemanager.avs")
.append("/")
.append("1.0.0-beta.3");
if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) {
userAgentBuilder
.append(" (")
.append(Configuration.getGlobalConfiguration().get("java.version"))
.append("; ")
.append(Configuration.getGlobalConfiguration().get("os.name"))
.append("; ")
.append(Configuration.getGlobalConfiguration().get("os.version"))
.append("; auto-generated)");
} else {
userAgentBuilder.append(" (auto-generated)");
}
if (scopes.isEmpty()) {
scopes.add(profile.getEnvironment().getManagementEndpoint() + "/.default");
}
if (retryPolicy == null) {
retryPolicy = new RetryPolicy("Retry-After", ChronoUnit.SECONDS);
}
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(userAgentBuilder.toString()));
policies.add(new RequestIdPolicy());
policies
.addAll(
this
.policies
.stream()
.filter(p -> p.getPipelinePosition() == HttpPipelinePosition.PER_CALL)
.collect(Collectors.toList()));
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy);
policies.add(new AddDatePolicy());
policies.add(new ArmChallengeAuthenticationPolicy(credential, scopes.toArray(new String[0])));
policies
.addAll(
this
.policies
.stream()
.filter(p -> p.getPipelinePosition() == HttpPipelinePosition.PER_RETRY)
.collect(Collectors.toList()));
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
HttpPipeline httpPipeline =
new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
return new AvsManager(httpPipeline, profile, defaultPollInterval);
}
}
/** @return Resource collection API of Operations. */
public Operations operations() {
if (this.operations == null) {
this.operations = new OperationsImpl(clientObject.getOperations(), this);
}
return operations;
}
/** @return Resource collection API of Locations. */
public Locations locations() {
if (this.locations == null) {
this.locations = new LocationsImpl(clientObject.getLocations(), this);
}
return locations;
}
/** @return Resource collection API of PrivateClouds. */
public PrivateClouds privateClouds() {
if (this.privateClouds == null) {
this.privateClouds = new PrivateCloudsImpl(clientObject.getPrivateClouds(), this);
}
return privateClouds;
}
/** @return Resource collection API of Clusters. */
public Clusters clusters() {
if (this.clusters == null) {
this.clusters = new ClustersImpl(clientObject.getClusters(), this);
}
return clusters;
}
/** @return Resource collection API of Datastores. */
public Datastores datastores() {
if (this.datastores == null) {
this.datastores = new DatastoresImpl(clientObject.getDatastores(), this);
}
return datastores;
}
/** @return Resource collection API of HcxEnterpriseSites. */
public HcxEnterpriseSites hcxEnterpriseSites() {
if (this.hcxEnterpriseSites == null) {
this.hcxEnterpriseSites = new HcxEnterpriseSitesImpl(clientObject.getHcxEnterpriseSites(), this);
}
return hcxEnterpriseSites;
}
/** @return Resource collection API of Authorizations. */
public Authorizations authorizations() {
if (this.authorizations == null) {
this.authorizations = new AuthorizationsImpl(clientObject.getAuthorizations(), this);
}
return authorizations;
}
/** @return Resource collection API of GlobalReachConnections. */
public GlobalReachConnections globalReachConnections() {
if (this.globalReachConnections == null) {
this.globalReachConnections =
new GlobalReachConnectionsImpl(clientObject.getGlobalReachConnections(), this);
}
return globalReachConnections;
}
/** @return Resource collection API of WorkloadNetworks. */
public WorkloadNetworks workloadNetworks() {
if (this.workloadNetworks == null) {
this.workloadNetworks = new WorkloadNetworksImpl(clientObject.getWorkloadNetworks(), this);
}
return workloadNetworks;
}
/** @return Resource collection API of CloudLinks. */
public CloudLinks cloudLinks() {
if (this.cloudLinks == null) {
this.cloudLinks = new CloudLinksImpl(clientObject.getCloudLinks(), this);
}
return cloudLinks;
}
/** @return Resource collection API of Addons. */
public Addons addons() {
if (this.addons == null) {
this.addons = new AddonsImpl(clientObject.getAddons(), this);
}
return addons;
}
/** @return Resource collection API of VirtualMachines. */
public VirtualMachines virtualMachines() {
if (this.virtualMachines == null) {
this.virtualMachines = new VirtualMachinesImpl(clientObject.getVirtualMachines(), this);
}
return virtualMachines;
}
/** @return Resource collection API of PlacementPolicies. */
public PlacementPolicies placementPolicies() {
if (this.placementPolicies == null) {
this.placementPolicies = new PlacementPoliciesImpl(clientObject.getPlacementPolicies(), this);
}
return placementPolicies;
}
/** @return Resource collection API of ScriptPackages. */
public ScriptPackages scriptPackages() {
if (this.scriptPackages == null) {
this.scriptPackages = new ScriptPackagesImpl(clientObject.getScriptPackages(), this);
}
return scriptPackages;
}
/** @return Resource collection API of ScriptCmdlets. */
public ScriptCmdlets scriptCmdlets() {
if (this.scriptCmdlets == null) {
this.scriptCmdlets = new ScriptCmdletsImpl(clientObject.getScriptCmdlets(), this);
}
return scriptCmdlets;
}
/** @return Resource collection API of ScriptExecutions. */
public ScriptExecutions scriptExecutions() {
if (this.scriptExecutions == null) {
this.scriptExecutions = new ScriptExecutionsImpl(clientObject.getScriptExecutions(), this);
}
return scriptExecutions;
}
/**
* @return Wrapped service client AvsClient providing direct access to the underlying auto-generated API
* implementation, based on Azure REST API.
*/
public AvsClient serviceClient() {
return this.clientObject;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.aggregates.serializable.std;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import org.apache.asterix.om.base.ADouble;
import org.apache.asterix.om.base.AInt64;
import org.apache.asterix.om.base.AMutableDouble;
import org.apache.asterix.om.base.AMutableInt64;
import org.apache.asterix.om.base.ANull;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.EnumDeserializer;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
import org.apache.asterix.runtime.evaluators.common.AccessibleByteArrayEval;
import org.apache.asterix.runtime.evaluators.common.ClosedRecordConstructorEvalFactory.ClosedRecordConstructorEval;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluator;
import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
import org.apache.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunction;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public abstract class AbstractSerializableAvgAggregateFunction implements ICopySerializableAggregateFunction {
private static final int SUM_FIELD_ID = 0;
private static final int COUNT_FIELD_ID = 1;
private static final int SUM_OFFSET = 0;
private static final int COUNT_OFFSET = 8;
protected static final int AGG_TYPE_OFFSET = 16;
private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
private ICopyEvaluator eval;
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
private ArrayBackedValueStorage avgBytes = new ArrayBackedValueStorage();
private ByteArrayAccessibleOutputStream sumBytes = new ByteArrayAccessibleOutputStream();
private DataOutput sumBytesOutput = new DataOutputStream(sumBytes);
private ByteArrayAccessibleOutputStream countBytes = new ByteArrayAccessibleOutputStream();
private DataOutput countBytesOutput = new DataOutputStream(countBytes);
private ICopyEvaluator evalSum = new AccessibleByteArrayEval(avgBytes.getDataOutput(), sumBytes);
private ICopyEvaluator evalCount = new AccessibleByteArrayEval(avgBytes.getDataOutput(), countBytes);
private ClosedRecordConstructorEval recordEval;
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<AInt64> longSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
public AbstractSerializableAvgAggregateFunction(ICopyEvaluatorFactory[] args) throws AlgebricksException {
eval = args[0].createEvaluator(inputVal);
}
@Override
public void init(DataOutput state) throws AlgebricksException {
try {
state.writeDouble(0.0);
state.writeLong(0);
state.writeByte(ATypeTag.SYSTEM_NULL.serialize());
} catch (IOException e) {
throw new AlgebricksException(e);
}
}
public abstract void step(IFrameTupleReference tuple, byte[] state, int start, int len) throws AlgebricksException;
public abstract void finish(byte[] state, int start, int len, DataOutput result) throws AlgebricksException;
public abstract void finishPartial(byte[] state, int start, int len, DataOutput result) throws AlgebricksException;
protected abstract void processNull(byte[] state, int start);
protected void processDataValues(IFrameTupleReference tuple, byte[] state, int start, int len)
throws AlgebricksException {
if (skipStep(state, start)) {
return;
}
inputVal.reset();
eval.evaluate(tuple);
double sum = BufferSerDeUtil.getDouble(state, start + SUM_OFFSET);
long count = BufferSerDeUtil.getLong(state, start + COUNT_OFFSET);
ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]);
ATypeTag aggType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(state[start + AGG_TYPE_OFFSET]);
if (typeTag == ATypeTag.NULL) {
processNull(state, start);
return;
} else if (aggType == ATypeTag.SYSTEM_NULL) {
aggType = typeTag;
} else if (typeTag != ATypeTag.SYSTEM_NULL && !ATypeHierarchy.isCompatible(typeTag, aggType)) {
throw new AlgebricksException("Unexpected type " + typeTag + " in aggregation input stream. Expected type "
+ aggType + ".");
} else if (ATypeHierarchy.canPromote(aggType, typeTag)) {
aggType = typeTag;
}
++count;
switch (typeTag) {
case INT8: {
byte val = AInt8SerializerDeserializer.getByte(inputVal.getByteArray(), 1);
sum += val;
break;
}
case INT16: {
short val = AInt16SerializerDeserializer.getShort(inputVal.getByteArray(), 1);
sum += val;
break;
}
case INT32: {
int val = AInt32SerializerDeserializer.getInt(inputVal.getByteArray(), 1);
sum += val;
break;
}
case INT64: {
long val = AInt64SerializerDeserializer.getLong(inputVal.getByteArray(), 1);
sum += val;
break;
}
case FLOAT: {
float val = AFloatSerializerDeserializer.getFloat(inputVal.getByteArray(), 1);
sum += val;
break;
}
case DOUBLE: {
double val = ADoubleSerializerDeserializer.getDouble(inputVal.getByteArray(), 1);
sum += val;
break;
}
default: {
throw new NotImplementedException("Cannot compute AVG for values of type " + typeTag);
}
}
inputVal.reset();
BufferSerDeUtil.writeDouble(sum, state, start + SUM_OFFSET);
BufferSerDeUtil.writeLong(count, state, start + COUNT_OFFSET);
state[start + AGG_TYPE_OFFSET] = aggType.serialize();
}
protected void finishPartialResults(byte[] state, int start, int len, DataOutput result) throws AlgebricksException {
double sum = BufferSerDeUtil.getDouble(state, start + SUM_OFFSET);
long count = BufferSerDeUtil.getLong(state, start + COUNT_OFFSET);
ATypeTag aggType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(state[start + AGG_TYPE_OFFSET]);
if (recordEval == null) {
ARecordType recType;
try {
recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] { BuiltinType.ADOUBLE,
BuiltinType.AINT64 }, false);
} catch (AsterixException | HyracksDataException e) {
throw new AlgebricksException(e);
}
recordEval = new ClosedRecordConstructorEval(recType, new ICopyEvaluator[] { evalSum, evalCount },
avgBytes, result);
}
try {
if (aggType == ATypeTag.SYSTEM_NULL) {
if (GlobalConfig.DEBUG) {
GlobalConfig.ASTERIX_LOGGER.finest("AVG aggregate ran over empty input.");
}
result.writeByte(ATypeTag.SYSTEM_NULL.serialize());
} else if (aggType == ATypeTag.NULL) {
result.writeByte(ATypeTag.NULL.serialize());
} else {
sumBytes.reset();
aDouble.setValue(sum);
doubleSerde.serialize(aDouble, sumBytesOutput);
countBytes.reset();
aInt64.setValue(count);
longSerde.serialize(aInt64, countBytesOutput);
recordEval.evaluate(null);
}
} catch (IOException e) {
throw new AlgebricksException(e);
}
}
protected void processPartialResults(IFrameTupleReference tuple, byte[] state, int start, int len)
throws AlgebricksException {
if (skipStep(state, start)) {
return;
}
double sum = BufferSerDeUtil.getDouble(state, start + SUM_OFFSET);
long count = BufferSerDeUtil.getLong(state, start + COUNT_OFFSET);
ATypeTag aggType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(state[start + AGG_TYPE_OFFSET]);
inputVal.reset();
eval.evaluate(tuple);
byte[] serBytes = inputVal.getByteArray();
ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]);
switch (typeTag) {
case NULL: {
processNull(state, start);
break;
}
case SYSTEM_NULL: {
// Ignore and return.
break;
}
case RECORD: {
// Expected.
aggType = ATypeTag.DOUBLE;
int nullBitmapSize = 0;
int offset1 = ARecordSerializerDeserializer.getFieldOffsetById(serBytes, SUM_FIELD_ID, nullBitmapSize,
false);
sum += ADoubleSerializerDeserializer.getDouble(serBytes, offset1);
int offset2 = ARecordSerializerDeserializer.getFieldOffsetById(serBytes, COUNT_FIELD_ID,
nullBitmapSize, false);
count += AInt64SerializerDeserializer.getLong(serBytes, offset2);
BufferSerDeUtil.writeDouble(sum, state, start + SUM_OFFSET);
BufferSerDeUtil.writeLong(count, state, start + COUNT_OFFSET);
state[start + AGG_TYPE_OFFSET] = aggType.serialize();
break;
}
default: {
throw new AlgebricksException("Global-Avg is not defined for values of type "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]));
}
}
}
protected void finishFinalResults(byte[] state, int start, int len, DataOutput result) throws AlgebricksException {
double sum = BufferSerDeUtil.getDouble(state, start + SUM_OFFSET);
long count = BufferSerDeUtil.getLong(state, start + COUNT_OFFSET);
ATypeTag aggType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(state[start + AGG_TYPE_OFFSET]);
try {
if (count == 0 || aggType == ATypeTag.NULL)
nullSerde.serialize(ANull.NULL, result);
else {
aDouble.setValue(sum / count);
doubleSerde.serialize(aDouble, result);
}
} catch (IOException e) {
throw new AlgebricksException(e);
}
}
protected boolean skipStep(byte[] state, int start) {
return false;
}
}
| |
/*
* Copyright 2014-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.metrics.alerter;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.hawkular.metrics.alerter.ConditionExpression.Function;
import org.hawkular.metrics.model.AvailabilityBucketPoint;
import org.hawkular.metrics.model.BucketPoint;
import org.hawkular.metrics.model.NumericBucketPoint;
import org.hawkular.metrics.model.Percentile;
import org.jboss.logging.Logger;
import com.udojava.evalex.Expression;
/**
* This class is responsible for resolving the {@link ConditionExpression} eval expression string. See
* <a href="http://github.com/uklimaschewski/EvalEx">here</a> for the supported expression format. The
* expression syntax is extended with supported query variables of the form <code>q(queryName,functionName)</code>
* where the <i>queryName</i> is a variable referring to a query defined for the {@link ConditionExpression} and
* the <i>functionName</i> is a supported aggregate function for the query's MetricType. The query variables are
* replaced with actual data at query time, before the eval expression is resolved.
*
* @author Jay Shaughnessy
* @author Lucas Ponce
*/
public class ConditionEvaluator {
private static final Logger log = Logger.getLogger(ConditionEvaluator.class);
private static final Pattern PATTERN_QUERY_FUNC = Pattern.compile("q\\((.*?),\\s*(.*?)\\)");
private static final Pattern PATTERN_QUERY_VAR = Pattern.compile("q\\(.*?\\)");
// Map variableName:query
private Map<String, QueryFunc> queryVars;
private String eval;
private Expression expression;
public ConditionEvaluator(String eval) {
super();
this.eval = eval;
this.queryVars = new HashMap<>();
this.expression = new Expression(replaceQueriesWithVariables(eval));
log.debugf("eval [%s] produced [%s] with variables %s", eval, expression.getOriginalExpression(), queryVars);
// Do a test evaluation to validate the expression
try {
for (String var : queryVars.keySet()) {
this.expression.setVariable(var, "1");
}
this.expression.eval();
} catch (Exception e) {
throw new IllegalArgumentException("Invalid eval expression [" + eval + "]: " + e.getMessage());
}
}
public String getEval() {
return eval;
}
/**
* Prepare to evaluate by supplying the required query data.
* @param data Map queryName => queryResults
* @return Map of the query variable replacement values used in the evaluation
*/
public Map<String, String> prepare(Map<String, BucketPoint> queryMap) throws IllegalArgumentException {
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, QueryFunc> entry : queryVars.entrySet()) {
String var = entry.getKey();
QueryFunc queryFunc = entry.getValue();
BucketPoint bucketPoint = queryMap.get(queryFunc.getQueryName());
if (null == bucketPoint) {
throw new IllegalArgumentException("No data found for query name [" + queryFunc.getQueryName() + "]");
}
BigDecimal value = null;
try {
value = BigDecimal.valueOf(getValue(queryFunc.getFunction(), bucketPoint));
} catch (Exception e) {
throw new IllegalArgumentException("Could not prepare evaluation query ["
+ queryFunc.getQueryName() + "]: " + e.getMessage());
}
this.expression.setVariable(var, value);
result.put(queryFunc.getCanonical(), value.toString());
}
return result;
}
/**
* Evaluate the prepared condition {@see #prepare(Map)).
* @return true if expression resolves to true, otherwise false
*/
public boolean evaluate() {
return (this.expression.eval() != BigDecimal.ZERO);
}
/**
* Just a convenience method to prepare and evaluate in one call.
* @return true if expression resolves to true, otherwise false
*/
public boolean prepareAndEvaluate(Map<String, BucketPoint> queryMap) {
prepare(queryMap);
return (this.expression.eval() != BigDecimal.ZERO);
}
private double getValue(String func, BucketPoint bucketPoint) throws Exception {
if (bucketPoint instanceof AvailabilityBucketPoint) {
return getAvailabilityValue(func, (AvailabilityBucketPoint) bucketPoint);
}
return getNumericValue(func, (NumericBucketPoint) bucketPoint);
}
private double getNumericValue(String func, NumericBucketPoint data) {
if (null == data || null == data.getSamples() || 0 == data.getSamples()) {
throw new IllegalArgumentException("NumericBucketPoint has no samples");
}
if (func.startsWith("%")) {
return funcToPercentile(func, data.getPercentiles()).getValue();
}
try {
switch (Function.valueOf(func)) {
case avg:
return data.getAvg().doubleValue();
case max:
return data.getMax().doubleValue();
case median:
return data.getMedian().doubleValue();
case min:
return data.getMin().doubleValue();
case samples:
return data.getSamples() * 1.0;
case sum:
return data.getSum().doubleValue();
default:
throw new IllegalArgumentException("Unexpected func [" + func + "]");
}
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
throw new IllegalArgumentException("Invalid value for func [" + func + "] in " + data.toString());
}
}
private double getAvailabilityValue(String func, AvailabilityBucketPoint data) {
if (null == data || null == data.getSamples() || 0 == data.getSamples()) {
throw new IllegalArgumentException("AvailabilityBucketPoint has no samples");
}
try {
switch (Function.valueOf(func)) {
case notUpCount:
return data.getNotUpCount().doubleValue();
case notUpDuration:
return data.getNotUpDuration().doubleValue();
case upCount:
return data.getUpCount().doubleValue();
case uptimeRatio:
return data.getUptimeRatio().doubleValue();
case samples:
return data.getSamples() * 1.0;
default:
throw new IllegalArgumentException("Unexpected func [" + func + "]");
}
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
throw new IllegalArgumentException("Invalid value for func [" + func + "] in " + data.toString());
}
}
Percentile funcToPercentile(String func, List<Percentile> percentiles) {
String quantile = func.substring(1);
for (Percentile p : percentiles) {
if (p.getOriginalQuantile().equals(quantile)) {
return p;
}
}
throw new IllegalArgumentException("Failed to find Percentile for [" + func + "] in " + percentiles);
}
public Map<String, QueryFunc> getQueryVars() {
return queryVars;
}
public Expression getExpression() {
return expression;
}
private String replaceQueriesWithVariables(String eval) {
if (null == queryVars) {
queryVars = new HashMap<>();
}
queryVars.clear();
// First, collect the queries
List<String> queries = new ArrayList<String>();
Matcher m = PATTERN_QUERY_VAR.matcher(eval);
while (m.find()) {
queries.add(m.group());
}
// Next, replace them with variables
int varNum = 0;
for (String q : queries) {
String var = null;
QueryFunc qf = new QueryFunc(q);
if (queryVars.containsValue(qf)) {
for (Map.Entry<String, QueryFunc> e : queryVars.entrySet()) {
if (e.getValue().equals(qf)) {
var = e.getKey();
break;
}
}
} else {
var = "q" + varNum++;
queryVars.put(var, qf);
}
eval = eval.replaceFirst("q\\(.*?\\)", var);
}
return eval;
}
static class QueryFunc {
private String queryName;
private String function;
private String canonical;
public QueryFunc(String queryName, String function) {
super();
this.queryName = queryName.trim();
this.function = function.trim();
}
public QueryFunc(String query) {
super();
Matcher m = PATTERN_QUERY_FUNC.matcher(query);
if (!m.matches() || m.groupCount() != 2) {
throw new IllegalArgumentException(
"Query segment [" + query + "] failed to parse. Groups=[" + m.groupCount() + "]");
}
this.queryName = m.group(1).trim();
this.function = m.group(2).trim();
}
public String getQueryName() {
return queryName;
}
public String getFunction() {
return function;
}
public String getCanonical() {
if (null == this.canonical) {
this.canonical = "q(" + queryName + "," + function + ")";
}
return canonical;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((function == null) ? 0 : function.hashCode());
result = prime * result + ((queryName == null) ? 0 : queryName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
QueryFunc other = (QueryFunc) obj;
if (function == null) {
if (other.function != null)
return false;
} else if (!function.equals(other.function))
return false;
if (queryName == null) {
if (other.queryName != null)
return false;
} else if (!queryName.equals(other.queryName))
return false;
return true;
}
@Override
public String toString() {
return "QueryFunc [queryName=" + queryName + ", function=" + function + "]";
}
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer.impl.rate;
import static com.opengamma.strata.basics.date.DayCounts.ACT_ACT_ISDA;
import static com.opengamma.strata.basics.index.OvernightIndices.CHF_TOIS;
import static com.opengamma.strata.basics.index.OvernightIndices.GBP_SONIA;
import static com.opengamma.strata.basics.index.OvernightIndices.USD_FED_FUND;
import static com.opengamma.strata.collect.TestHelper.assertThrows;
import static com.opengamma.strata.collect.TestHelper.date;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.time.LocalDate;
import org.testng.annotations.Test;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.currency.CurrencyAmount;
import com.opengamma.strata.basics.index.OvernightIndexObservation;
import com.opengamma.strata.collect.array.DoubleArray;
import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries;
import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeriesBuilder;
import com.opengamma.strata.market.curve.Curve;
import com.opengamma.strata.market.curve.Curves;
import com.opengamma.strata.market.curve.InterpolatedNodalCurve;
import com.opengamma.strata.market.curve.interpolator.CurveInterpolator;
import com.opengamma.strata.market.curve.interpolator.CurveInterpolators;
import com.opengamma.strata.market.explain.ExplainKey;
import com.opengamma.strata.market.explain.ExplainMap;
import com.opengamma.strata.market.explain.ExplainMapBuilder;
import com.opengamma.strata.market.param.CurrencyParameterSensitivities;
import com.opengamma.strata.market.sensitivity.PointSensitivityBuilder;
import com.opengamma.strata.pricer.PricingException;
import com.opengamma.strata.pricer.rate.ImmutableRatesProvider;
import com.opengamma.strata.pricer.rate.OvernightIndexRates;
import com.opengamma.strata.pricer.rate.OvernightRateSensitivity;
import com.opengamma.strata.pricer.rate.SimpleRatesProvider;
import com.opengamma.strata.pricer.sensitivity.RatesFiniteDifferenceSensitivityCalculator;
import com.opengamma.strata.product.rate.OvernightCompoundedRateComputation;
/**
* Test {@link ForwardOvernightCompoundedRateComputationFn}.
*/
@Test
public class ForwardOvernightCompoundedRateComputationFnTest {
private static final ReferenceData REF_DATA = ReferenceData.standard();
private static final LocalDate DUMMY_ACCRUAL_START_DATE = date(2015, 1, 1); // Accrual dates irrelevant for the rate
private static final LocalDate DUMMY_ACCRUAL_END_DATE = date(2015, 1, 31); // Accrual dates irrelevant for the rate
private static final LocalDate FIXING_START_DATE = date(2015, 1, 8);
private static final LocalDate FIXING_END_DATE = date(2015, 1, 15); // 1w only to decrease data
private static final LocalDate FIXING_FINAL_DATE = date(2015, 1, 14);
private static final LocalDate[] FIXING_DATES = new LocalDate[] {
date(2015, 1, 7),
date(2015, 1, 8),
date(2015, 1, 9),
date(2015, 1, 12),
date(2015, 1, 13),
date(2015, 1, 14),
date(2015, 1, 15)};
private static final OvernightIndexObservation[] USD_OBS = new OvernightIndexObservation[] {
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 7), REF_DATA),
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 8), REF_DATA),
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 9), REF_DATA),
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 12), REF_DATA),
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 13), REF_DATA),
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 14), REF_DATA),
OvernightIndexObservation.of(USD_FED_FUND, date(2015, 1, 15), REF_DATA)};
private static final OvernightIndexObservation[] GBP_OBS = new OvernightIndexObservation[] {
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 7), REF_DATA),
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 8), REF_DATA),
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 9), REF_DATA),
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 12), REF_DATA),
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 13), REF_DATA),
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 14), REF_DATA),
OvernightIndexObservation.of(GBP_SONIA, date(2015, 1, 15), REF_DATA)};
private static final OvernightIndexObservation[] CHF_OBS = new OvernightIndexObservation[] {
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 7), REF_DATA),
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 8), REF_DATA),
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 9), REF_DATA),
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 12), REF_DATA),
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 13), REF_DATA),
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 14), REF_DATA),
OvernightIndexObservation.of(CHF_TOIS, date(2015, 1, 15), REF_DATA)};
private static final double[] FIXING_RATES = {
0.0012, 0.0023, 0.0034,
0.0045, 0.0056, 0.0067, 0.0078};
private static final double[] FORWARD_RATES = {
0.0112, 0.0123, 0.0134,
0.0145, 0.0156, 0.0167, 0.0178};
private static final double TOLERANCE_RATE = 1.0E-10;
private static final double EPS_FD = 1.0E-7;
private static final ForwardOvernightCompoundedRateComputationFn OBS_FWD_ONCMP =
ForwardOvernightCompoundedRateComputationFn.DEFAULT;
/** No cutoff period and the period entirely forward. Test the forward part only. */
public void rateFedFundNoCutOffForward() { // publication=1, cutoff=0, effective offset=0, Forward
LocalDate[] valuationDate = {date(2015, 1, 1), date(2015, 1, 8)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
double rateCmp = 0.0123;
when(mockRates.periodRate(USD_OBS[1], FIXING_END_DATE)).thenReturn(rateCmp);
double rateExpected = rateCmp;
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
// explain
ExplainMapBuilder builder = ExplainMap.builder();
double explainedRate = OBS_FWD_ONCMP.explainRate(
ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv, builder);
assertEquals(explainedRate, rateExpected, TOLERANCE_RATE);
ExplainMap built = builder.build();
assertEquals(built.get(ExplainKey.OBSERVATIONS).isPresent(), false);
assertEquals(built.get(ExplainKey.COMBINED_RATE).get().doubleValue(), rateExpected, TOLERANCE_RATE);
}
/** No cutoff period and the period entirely forward. Test the forward part only against FD approximation. */
public void rateFedFundNoCutOffForwardSensitivity() { // publication=1, cutoff=0, effective offset=0, Forward
LocalDate[] valuationDate = {date(2015, 1, 1), date(2015, 1, 8)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
double rateCmp = 0.0123;
when(mockRates.periodRate(USD_OBS[1], FIXING_END_DATE)).thenReturn(rateCmp);
PointSensitivityBuilder rateSensitivity = OvernightRateSensitivity.ofPeriod(USD_OBS[1], FIXING_END_DATE, 1.0);
when(mockRates.periodRatePointSensitivity(USD_OBS[1], FIXING_END_DATE)).thenReturn(
rateSensitivity);
OvernightIndexRates mockRatesUp = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvUp = new SimpleRatesProvider(mockRatesUp);
when(mockRatesUp.periodRate(USD_OBS[1], FIXING_END_DATE)).thenReturn(
rateCmp + EPS_FD);
OvernightIndexRates mockRatesDw = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvDw = new SimpleRatesProvider(mockRatesDw);
when(mockRatesDw.periodRate(USD_OBS[1], FIXING_END_DATE)).thenReturn(
rateCmp - EPS_FD);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesUp.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesDw.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvUp);
double rateDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvDw);
double sensitivityExpected = 0.5 * (rateUp - rateDw) / EPS_FD;
PointSensitivityBuilder sensitivityBuilderExpected =
OvernightRateSensitivity.ofPeriod(USD_OBS[1], FIXING_END_DATE, sensitivityExpected);
PointSensitivityBuilder sensitivityBuilderComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertTrue(sensitivityBuilderComputed.build().normalized().equalWithTolerance(
sensitivityBuilderExpected.build().normalized(), EPS_FD));
}
}
/** Two days cutoff and the period is entirely forward. Test forward part plus cutoff specifics.
* Almost all Overnight Compounding coupon (OIS) don't use cutoff period.*/
public void rateFedFund2CutOffForward() { // publication=1, cutoff=2, effective offset=0, Forward
LocalDate[] valuationDate = {date(2015, 1, 1), date(2015, 1, 8)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 2, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
for (int i = 0; i < FIXING_DATES.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double investmentFactor = 1.0;
double afNonCutoff = 0.0;
for (int i = 1; i < 5; i++) {
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNonCutoff += af;
investmentFactor *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactor - 1.0d) / afNonCutoff;
when(mockRates.periodRate(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateCmp);
LocalDate fixingCutOff = FIXING_DATES[5];
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(fixingCutOff, REF_DATA);
double afCutOff = USD_FED_FUND.getDayCount().yearFraction(fixingCutOff, endDate);
double rateExpected = ((1.0 + rateCmp * afNonCutoff) * (1.0d + FORWARD_RATES[4] * afCutOff) - 1.0d)
/ (afNonCutoff + afCutOff);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Two days cutoff and the period is entirely forward. Test forward part plus cutoff specifics against FD.
* Almost all Overnight Compounding coupon (OIS) don't use cutoff period.*/
public void rateFedFund2CutOffForwardSensitivity() { // publication=1, cutoff=2, effective offset=0, Forward
LocalDate[] valuationDate = {date(2015, 1, 1), date(2015, 1, 8)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 2, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
int nFixings = FIXING_DATES.length;
OvernightIndexRates[] mockRatesUp = new OvernightIndexRates[nFixings];
SimpleRatesProvider[] simpleProvUp = new SimpleRatesProvider[nFixings];
OvernightIndexRates[] mockRatesDw = new OvernightIndexRates[nFixings];
SimpleRatesProvider[] simpleProvDw = new SimpleRatesProvider[nFixings];
OvernightIndexRates mockRatesPeriodUp = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvPeriodUp = new SimpleRatesProvider(mockRatesPeriodUp);
OvernightIndexRates mockRatesPeriodDw = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvPeriodDw = new SimpleRatesProvider(mockRatesPeriodDw);
double[][] forwardRatesUp = new double[nFixings][nFixings];
double[][] forwardRatesDw = new double[nFixings][nFixings];
for (int i = 0; i < nFixings; i++) {
mockRatesUp[i] = mock(OvernightIndexRates.class);
simpleProvUp[i] = new SimpleRatesProvider(mockRatesUp[i]);
mockRatesDw[i] = mock(OvernightIndexRates.class);
simpleProvDw[i] = new SimpleRatesProvider(mockRatesDw[i]);
for (int j = 0; j < nFixings; j++) {
double rateForUp = i == j ? FORWARD_RATES[j] + EPS_FD : FORWARD_RATES[j];
double rateForDw = i == j ? FORWARD_RATES[j] - EPS_FD : FORWARD_RATES[j];
forwardRatesUp[i][j] = rateForUp;
forwardRatesDw[i][j] = rateForDw;
}
}
for (int i = 0; i < nFixings; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
when(mockRatesPeriodUp.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
when(mockRatesPeriodDw.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
LocalDate fixingStartDate = USD_FED_FUND.calculateEffectiveFromFixing(FIXING_DATES[i], REF_DATA);
LocalDate fixingEndDate = USD_FED_FUND.calculateMaturityFromEffective(fixingStartDate, REF_DATA);
PointSensitivityBuilder rateSensitivity = OvernightRateSensitivity.ofPeriod(USD_OBS[i], fixingEndDate, 1.0);
when(mockRates.ratePointSensitivity(USD_OBS[i])).thenReturn(rateSensitivity);
for (int j = 0; j < nFixings; ++j) {
when(mockRatesUp[j].rate(USD_OBS[i])).thenReturn(forwardRatesUp[j][i]);
when(mockRatesDw[j].rate(USD_OBS[i])).thenReturn(forwardRatesDw[j][i]);
}
}
double investmentFactor = 1.0;
double afNonCutoff = 0.0;
for (int i = 1; i < 5; i++) {
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNonCutoff += af;
investmentFactor *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactor - 1.0d) / afNonCutoff;
when(mockRates.periodRate(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateCmp);
when(mockRatesPeriodUp.periodRate(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateCmp + EPS_FD);
when(mockRatesPeriodDw.periodRate(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateCmp - EPS_FD);
PointSensitivityBuilder rateSensitivity = OvernightRateSensitivity.ofPeriod(USD_OBS[1], FIXING_FINAL_DATE, 1.0);
when(mockRates.periodRatePointSensitivity(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateSensitivity);
for (int i = 0; i < nFixings; ++i) {
when(mockRatesUp[i].periodRate(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateCmp);
when(mockRatesDw[i].periodRate(USD_OBS[1], FIXING_FINAL_DATE)).thenReturn(rateCmp);
}
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesPeriodUp.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesPeriodDw.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
PointSensitivityBuilder sensitivityBuilderExpected1 = PointSensitivityBuilder.none();
for (int i = 0; i < nFixings; ++i) {
when(mockRatesUp[i].getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesDw[i].getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvUp[i]);
double rateDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvDw[i]);
double cutoffSensitivity = 0.5 * (rateUp - rateDw) / EPS_FD; // [4] is nonzero
LocalDate fixingStartDate = USD_FED_FUND.calculateEffectiveFromFixing(FIXING_DATES[i], REF_DATA);
LocalDate fixingEndDate = USD_FED_FUND.calculateMaturityFromEffective(fixingStartDate, REF_DATA);
sensitivityBuilderExpected1 = cutoffSensitivity == 0.0 ? sensitivityBuilderExpected1
: sensitivityBuilderExpected1.combinedWith(
OvernightRateSensitivity.ofPeriod(USD_OBS[i], fixingEndDate, cutoffSensitivity));
}
double ratePeriodUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvPeriodUp);
double ratePeriodDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvPeriodDw);
double periodSensitivity = 0.5 * (ratePeriodUp - ratePeriodDw) / EPS_FD;
PointSensitivityBuilder sensitivityBuilderExpected2 =
OvernightRateSensitivity.ofPeriod(USD_OBS[1], FIXING_FINAL_DATE, periodSensitivity);
PointSensitivityBuilder sensitivityBuilderExpected = sensitivityBuilderExpected1
.combinedWith(sensitivityBuilderExpected2);
PointSensitivityBuilder sensitivityBuilderComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertTrue(sensitivityBuilderComputed.build().normalized().equalWithTolerance(
sensitivityBuilderExpected.build().normalized(), EPS_FD));
}
}
/** No cutoff and one already fixed ON rate. Test the already fixed portion with only one fixed ON rate.*/
public void rateFedFund0CutOffValuation1() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing 1
LocalDate[] valuationDate = {date(2015, 1, 9), date(2015, 1, 12)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
for (int i = 0; i < 2; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < 2; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = 2; i < USD_OBS.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
LocalDate fixingknown = FIXING_DATES[1];
LocalDate endDateKnown = USD_FED_FUND.calculateMaturityFromEffective(fixingknown, REF_DATA);
double afKnown = USD_FED_FUND.getDayCount().yearFraction(fixingknown, endDateKnown);
double investmentFactor = 1.0;
double afNoCutoff = 0.0;
for (int i = 2; i < 6; i++) {
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNoCutoff += af;
investmentFactor *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactor - 1.0d) / afNoCutoff;
when(mockRates.periodRate(USD_OBS[2], FIXING_END_DATE)).thenReturn(rateCmp);
double rateExpected = ((1.0d + FIXING_RATES[1] * afKnown) * (1.0 + rateCmp * afNoCutoff) - 1.0d)
/ (afKnown + afNoCutoff);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** No cutoff and one already fixed ON rate. Test the already fixed portion with only one fixed ON rate against FD.*/
public void rateFedFund0CutOffValuation1Sensitivity() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing 1
LocalDate[] valuationDate = {date(2015, 1, 9), date(2015, 1, 12)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
for (int i = 0; i < 2; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
OvernightIndexRates mockRatesUp = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvUp = new SimpleRatesProvider(mockRatesUp);
OvernightIndexRates mockRatesDw = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvDw = new SimpleRatesProvider(mockRatesDw);
when(mockRatesUp.getFixings()).thenReturn(tsb.build());
when(mockRatesDw.getFixings()).thenReturn(tsb.build());
double investmentFactor = 1.0;
double afNoCutoff = 0.0;
for (int i = 2; i < 6; i++) {
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNoCutoff += af;
investmentFactor *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactor - 1.0d) / afNoCutoff;
when(mockRates.periodRate(USD_OBS[2], FIXING_END_DATE)).thenReturn(rateCmp);
when(mockRatesUp.periodRate(USD_OBS[2], FIXING_END_DATE)).thenReturn(rateCmp + EPS_FD);
when(mockRatesDw.periodRate(USD_OBS[2], FIXING_END_DATE)).thenReturn(rateCmp - EPS_FD);
PointSensitivityBuilder periodSensitivity = OvernightRateSensitivity.ofPeriod(USD_OBS[2], FIXING_END_DATE, 1.0d);
when(mockRates.periodRatePointSensitivity(USD_OBS[2], FIXING_END_DATE)).thenReturn(periodSensitivity);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesUp.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesDw.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvUp);
double rateDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvDw);
double sensitivityExpected = 0.5 * (rateUp - rateDw) / EPS_FD;
PointSensitivityBuilder sensitivityBuilderExpected =
OvernightRateSensitivity.ofPeriod(USD_OBS[2], FIXING_END_DATE, sensitivityExpected);
PointSensitivityBuilder sensitivityBuilderComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertTrue(sensitivityBuilderComputed.build().normalized().equalWithTolerance(
sensitivityBuilderExpected.build().normalized(), EPS_FD));
}
}
//-------------------------------------------------------------------------
/** No cutoff period and two already fixed ON rate. ON index is SONIA. */
public void rateSonia0CutOffValuation2() {
// publication=0, cutoff=0, effective offset=0, TS: Fixing 2
LocalDate[] valuationDate = {date(2015, 1, 9), date(2015, 1, 12)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(GBP_SONIA, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(GBP_SONIA);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 3;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(GBP_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < GBP_OBS.length; i++) {
when(mockRates.rate(GBP_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double afKnown = 0.0d;
double investmentFactorKnown = 1.0d;
for (int i = 0; i < lastFixing - 1; i++) {
LocalDate fixingknown = FIXING_DATES[i + 1];
LocalDate endDateKnown = GBP_SONIA.calculateMaturityFromEffective(fixingknown, REF_DATA);
double af = GBP_SONIA.getDayCount().yearFraction(fixingknown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + FIXING_RATES[i + 1] * af;
}
double afNoCutoff = 0.0d;
double investmentFactorNoCutoff = 1.0d;
for (int i = lastFixing; i < 6; i++) {
LocalDate endDate = GBP_SONIA.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = GBP_SONIA.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNoCutoff += af;
investmentFactorNoCutoff *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactorNoCutoff - 1.0d) / afNoCutoff;
when(mockRates.periodRate(GBP_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp);
double rateExpected = (investmentFactorKnown * (1.0 + rateCmp * afNoCutoff) - 1.0d)
/ (afKnown + afNoCutoff);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Test rate sensitivity against FD approximation.
* No cutoff period and two already fixed ON rate. ON index is SONIA. */
public void rateSonia0CutOffValuation2Sensitivity() {
// publication=0, cutoff=0, effective offset=0, TS: Fixing 2
LocalDate[] valuationDate = {date(2015, 1, 9), date(2015, 1, 12)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(GBP_SONIA, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(GBP_SONIA);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 3;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
OvernightIndexRates mockRatesUp = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvUp = new SimpleRatesProvider(mockRatesUp);
OvernightIndexRates mockRatesDw = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvDw = new SimpleRatesProvider(mockRatesDw);
when(mockRatesUp.getFixings()).thenReturn(tsb.build());
when(mockRatesDw.getFixings()).thenReturn(tsb.build());
double afNoCutoff = 0.0d;
double investmentFactorNoCutoff = 1.0d;
for (int i = lastFixing; i < 6; i++) {
LocalDate endDate = GBP_SONIA.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = GBP_SONIA.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNoCutoff += af;
investmentFactorNoCutoff *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactorNoCutoff - 1.0d) / afNoCutoff;
when(mockRates.periodRate(GBP_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp);
when(mockRatesUp.periodRate(GBP_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp + EPS_FD);
when(mockRatesDw.periodRate(GBP_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp - EPS_FD);
OvernightRateSensitivity periodSensitivity = OvernightRateSensitivity.ofPeriod(GBP_OBS[lastFixing], FIXING_DATES[6], 1.0d);
when(mockRates.periodRatePointSensitivity(GBP_OBS[lastFixing], FIXING_DATES[6]))
.thenReturn(periodSensitivity);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesUp.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesDw.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvUp);
double rateDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvDw);
double sensitivityExpected = 0.5 * (rateUp - rateDw) / EPS_FD;
OvernightRateSensitivity sensitivityBuilderExpected =
OvernightRateSensitivity.ofPeriod(GBP_OBS[lastFixing], FIXING_DATES[6], sensitivityExpected);
PointSensitivityBuilder sensitivityBuilderComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertTrue(sensitivityBuilderComputed.build().normalized().equalWithTolerance(
sensitivityBuilderExpected.build().normalized(), EPS_FD));
}
}
//-------------------------------------------------------------------------
/** No cutoff period and two already fixed ON rate. ON index is TOIS (with a effective offset of 1; TN rate). */
public void rateTois0CutOffValuation2() {
// publication=0, cutoff=0, effective offset=1, TS: Fixing 2
LocalDate[] valuationDate = {date(2015, 1, 9), date(2015, 1, 12)};
OvernightCompoundedRateComputation ro = OvernightCompoundedRateComputation.of(
CHF_TOIS,
CHF_TOIS.calculateEffectiveFromFixing(FIXING_START_DATE, REF_DATA),
CHF_TOIS.calculateEffectiveFromFixing(FIXING_END_DATE, REF_DATA),
0,
REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(CHF_TOIS);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 3;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(CHF_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < CHF_OBS.length; i++) {
when(mockRates.rate(CHF_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double afKnown = 0.0d;
double investmentFactorKnown = 1.0d;
for (int i = 1; i < lastFixing; i++) {
LocalDate fixingknown = FIXING_DATES[i];
LocalDate startDateKnown = CHF_TOIS.calculateEffectiveFromFixing(fixingknown, REF_DATA);
LocalDate endDateKnown = CHF_TOIS.calculateMaturityFromEffective(startDateKnown, REF_DATA);
double af = CHF_TOIS.getDayCount().yearFraction(startDateKnown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + af * FIXING_RATES[i];
}
double afNoCutoff = 0.0d;
double investmentFactorNoCutoff = 1.0d;
for (int i = lastFixing; i < 6; i++) {
LocalDate fixing = FIXING_DATES[i];
LocalDate startDate = CHF_TOIS.calculateEffectiveFromFixing(fixing, REF_DATA);
LocalDate endDate = CHF_TOIS.calculateMaturityFromEffective(startDate, REF_DATA);
double af = CHF_TOIS.getDayCount().yearFraction(startDate, endDate);
afNoCutoff += af;
investmentFactorNoCutoff *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactorNoCutoff - 1.0d) / afNoCutoff;
LocalDate dateMat = CHF_TOIS.calculateMaturityFromFixing(FIXING_DATES[5], REF_DATA);
OvernightIndexObservation obs = OvernightIndexObservation.of(CHF_TOIS, FIXING_DATES[lastFixing], REF_DATA);
when(mockRates.periodRate(obs, dateMat)).thenReturn(rateCmp);
double rateExpected = (investmentFactorKnown * (1.0 + rateCmp * afNoCutoff) - 1.0d)
/ (afKnown + afNoCutoff);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Test rate sensitivity against FD approximation.
* No cutoff period and two already fixed ON rate. ON index is TOIS (with a effective offset of 1; TN rate). */
public void rateTois0CutOffValuation2Sensitivity() {
// publication=0, cutoff=0, effective offset=1, TS: Fixing 2
LocalDate[] valuationDate = {date(2015, 1, 9), date(2015, 1, 12)};
OvernightCompoundedRateComputation ro = OvernightCompoundedRateComputation.of(
CHF_TOIS,
CHF_TOIS.calculateEffectiveFromFixing(FIXING_START_DATE, REF_DATA),
CHF_TOIS.calculateEffectiveFromFixing(FIXING_END_DATE, REF_DATA),
0,
REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(CHF_TOIS);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 3;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
OvernightIndexRates mockRatesUp = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvUp = new SimpleRatesProvider(mockRatesUp);
OvernightIndexRates mockRatesDw = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvDw = new SimpleRatesProvider(mockRatesDw);
when(mockRatesUp.getFixings()).thenReturn(tsb.build());
when(mockRatesDw.getFixings()).thenReturn(tsb.build());
double afNoCutoff = 0.0d;
double investmentFactorNoCutoff = 1.0d;
for (int i = lastFixing; i < 6; i++) {
LocalDate fixing = FIXING_DATES[i];
LocalDate startDate = CHF_TOIS.calculateEffectiveFromFixing(fixing, REF_DATA);
LocalDate endDate = CHF_TOIS.calculateMaturityFromEffective(startDate, REF_DATA);
double af = CHF_TOIS.getDayCount().yearFraction(startDate, endDate);
afNoCutoff += af;
investmentFactorNoCutoff *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactorNoCutoff - 1.0d) / afNoCutoff;
LocalDate dateMat = CHF_TOIS.calculateMaturityFromFixing(FIXING_DATES[5], REF_DATA);
OvernightIndexObservation obs = OvernightIndexObservation.of(CHF_TOIS, FIXING_DATES[lastFixing], REF_DATA);
when(mockRates.periodRate(obs, dateMat)).thenReturn(rateCmp);
when(mockRatesUp.periodRate(obs, dateMat)).thenReturn(rateCmp + EPS_FD);
when(mockRatesDw.periodRate(obs, dateMat)).thenReturn(rateCmp - EPS_FD);
OvernightRateSensitivity periodSensitivity = OvernightRateSensitivity.ofPeriod(obs, dateMat, 1.0d);
when(mockRates.periodRatePointSensitivity(obs, dateMat)).thenReturn(periodSensitivity);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesUp.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesDw.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvUp);
double rateDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvDw);
double sensitivityExpected = 0.5 * (rateUp - rateDw) / EPS_FD;
OvernightRateSensitivity sensitivityBuilderExpected =
OvernightRateSensitivity.ofPeriod(obs, dateMat, sensitivityExpected);
PointSensitivityBuilder sensitivityBuilderComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertTrue(sensitivityBuilderComputed.build().normalized().equalWithTolerance(
sensitivityBuilderExpected.build().normalized(), EPS_FD));
}
}
//-------------------------------------------------------------------------
/** No cutoff and two already fixed ON rate. ON index is Fed Fund. */
public void rateFedFund0CutOffValuation2() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing 2
LocalDate[] valuationDate = {date(2015, 1, 12), date(2015, 1, 13)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 3;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < USD_OBS.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double afKnown = 0.0d;
double investmentFactorKnown = 1.0d;
for (int i = 0; i < lastFixing - 1; i++) {
LocalDate fixingknown = FIXING_DATES[i + 1];
LocalDate endDateKnown = USD_FED_FUND.calculateMaturityFromEffective(fixingknown, REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(fixingknown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + FIXING_RATES[i + 1] * af;
}
double investmentFactor = 1.0;
double afNoCutoff = 0.0;
for (int i = lastFixing; i < 6; i++) {
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNoCutoff += af;
investmentFactor *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactor - 1.0d) / afNoCutoff;
when(mockRates.periodRate(USD_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp);
double rateExpected = (investmentFactorKnown * (1.0 + rateCmp * afNoCutoff) - 1.0d)
/ (afKnown + afNoCutoff);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Test rate sensitivity against FD approximation.
* No cutoff and two already fixed ON rate. ON index is Fed Fund. */
public void rateFedFund0CutOffValuation2Sensitivity() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing 2
LocalDate[] valuationDate = {date(2015, 1, 12), date(2015, 1, 13)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 3;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
OvernightIndexRates mockRatesUp = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvUp = new SimpleRatesProvider(mockRatesUp);
OvernightIndexRates mockRatesDw = mock(OvernightIndexRates.class);
SimpleRatesProvider simpleProvDw = new SimpleRatesProvider(mockRatesDw);
when(mockRatesUp.getFixings()).thenReturn(tsb.build());
when(mockRatesDw.getFixings()).thenReturn(tsb.build());
double investmentFactor = 1.0;
double afNoCutoff = 0.0;
for (int i = lastFixing; i < 6; i++) {
LocalDate endDate = USD_FED_FUND.calculateMaturityFromEffective(FIXING_DATES[i], REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(FIXING_DATES[i], endDate);
afNoCutoff += af;
investmentFactor *= 1.0d + af * FORWARD_RATES[i];
}
double rateCmp = (investmentFactor - 1.0d) / afNoCutoff;
when(mockRates.periodRate(USD_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp);
when(mockRatesUp.periodRate(USD_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp + EPS_FD);
when(mockRatesDw.periodRate(USD_OBS[lastFixing], FIXING_DATES[6])).thenReturn(rateCmp - EPS_FD);
OvernightRateSensitivity periodSensitivity =
OvernightRateSensitivity.ofPeriod(USD_OBS[lastFixing], FIXING_DATES[6], 1.0d);
when(mockRates.periodRatePointSensitivity(USD_OBS[lastFixing], FIXING_DATES[6])).thenReturn(periodSensitivity);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesUp.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
when(mockRatesDw.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateUp = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvUp);
double rateDw = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProvDw);
double sensitivityExpected = 0.5 * (rateUp - rateDw) / EPS_FD;
OvernightRateSensitivity sensitivityBuilderExpected =
OvernightRateSensitivity.ofPeriod(USD_OBS[lastFixing], FIXING_DATES[6], sensitivityExpected);
PointSensitivityBuilder sensitivityBuilderComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertTrue(sensitivityBuilderComputed.build().normalized().equalWithTolerance(
sensitivityBuilderExpected.build().normalized(), EPS_FD));
}
}
/** No cutoff, all ON rates already fixed. Time series up to 14-Jan (last fixing date used). */
public void rateFedFund0CutOffValuationEndTs14() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing all
LocalDate[] valuationDate = {date(2015, 1, 15), date(2015, 1, 16), date(2015, 1, 17)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 6;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < USD_OBS.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double afKnown = 0.0d;
double investmentFactorKnown = 1.0d;
for (int i = 0; i < 5; i++) {
LocalDate fixingknown = FIXING_DATES[i + 1];
LocalDate endDateKnown = USD_FED_FUND.calculateMaturityFromEffective(fixingknown, REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(fixingknown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + FIXING_RATES[i + 1] * af;
}
double rateExpected = (investmentFactorKnown - 1.0d) / afKnown;
for (int loopvaldate = 0; loopvaldate < valuationDate.length; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Test rate sensitivity. No cutoff, all ON rates already fixed. Thus expected sensitivity is none.
* Time series up to 14-Jan (last fixing date used). */
public void rateFedFund0CutOffValuationEndTs14Sensitivity() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing all
LocalDate[] valuationDate = {date(2015, 1, 15), date(2015, 1, 16), date(2015, 1, 17)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 6;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int loopvaldate = 0; loopvaldate < valuationDate.length; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
PointSensitivityBuilder sensitivityComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(sensitivityComputed, PointSensitivityBuilder.none());
}
}
/** No cutoff, all ON rates already fixed. Time series up to 15-Jan (one day after the last fixing date). */
public void rateFedFund0CutOffValuationEndTs15() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing all
LocalDate[] valuationDate = {date(2015, 1, 16), date(2015, 1, 17)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 7;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < USD_OBS.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double afKnown = 0.0d;
double investmentFactorKnown = 1.0d;
for (int i = 0; i < 5; i++) {
LocalDate fixingknown = FIXING_DATES[i + 1];
LocalDate endDateKnown = USD_FED_FUND.calculateMaturityFromEffective(fixingknown, REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(fixingknown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + FIXING_RATES[i + 1] * af;
}
double rateExpected = (investmentFactorKnown - 1.0d) / afKnown;
for (int loopvaldate = 0; loopvaldate < valuationDate.length; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Test rate sensitivity. No cutoff, all ON rates already fixed. Thus expected sensitivity is none.
* Time series up to 15-Jan (one day after the last fixing date). */
public void rateFedFund0CutOffValuationEndTs15Sensitivity() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing all
LocalDate[] valuationDate = {date(2015, 1, 16), date(2015, 1, 17)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 7;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int loopvaldate = 0; loopvaldate < valuationDate.length; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
PointSensitivityBuilder sensitivityComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(sensitivityComputed, PointSensitivityBuilder.none());
}
}
/** Two days cutoff, all ON rates already fixed. */
public void rateFedFund2CutOffValuationEnd() {
// publication=1, cutoff=2, effective offset=0, TS: Fixing all
LocalDate[] valuationDate = {date(2015, 1, 14), date(2015, 1, 15), date(2015, 1, 16)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 2, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 5;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < USD_OBS.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
double afKnown = 0.0d;
double investmentFactorKnown = 1.0d;
for (int i = 0; i < 4; i++) {
LocalDate fixingknown = FIXING_DATES[i + 1];
LocalDate endDateKnown = USD_FED_FUND.calculateMaturityFromEffective(fixingknown, REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(fixingknown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + FIXING_RATES[i + 1] * af;
}
LocalDate fixingknown = FIXING_DATES[5];
LocalDate endDateKnown = USD_FED_FUND.calculateMaturityFromEffective(fixingknown, REF_DATA);
double af = USD_FED_FUND.getDayCount().yearFraction(fixingknown, endDateKnown);
afKnown += af;
investmentFactorKnown *= 1.0d + FIXING_RATES[4] * af; //Cutoff
double rateExpected = (investmentFactorKnown - 1.0d) / afKnown;
for (int loopvaldate = 0; loopvaldate < 3; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
double rateComputed = OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(rateExpected, rateComputed, TOLERANCE_RATE);
}
}
/** Test rate sensitivity. Two days cutoff, all ON rates already fixed. Thus none is expected. */
public void rateFedFund2CutOffValuationEndSensitivity() {
// publication=1, cutoff=2, effective offset=0, TS: Fixing all
LocalDate[] valuationDate = {date(2015, 1, 14), date(2015, 1, 15), date(2015, 1, 16)};
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 2, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(mockRates);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 5;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int loopvaldate = 0; loopvaldate < 3; loopvaldate++) {
when(mockRates.getValuationDate()).thenReturn(valuationDate[loopvaldate]);
PointSensitivityBuilder sensitivityComputed = OBS_FWD_ONCMP.rateSensitivity(ro,
DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv);
assertEquals(sensitivityComputed, PointSensitivityBuilder.none());
}
}
/** One past fixing missing. Checking the error thrown. */
public void rateFedFund0CutOffValuation2MissingFixing() {
// publication=1, cutoff=0, effective offset=0, TS: Fixing 2
LocalDate valuationDate = date(2015, 1, 13);
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 2, REF_DATA);
OvernightIndexRates mockRates = mock(OvernightIndexRates.class);
when(mockRates.getIndex()).thenReturn(USD_FED_FUND);
SimpleRatesProvider simpleProv = new SimpleRatesProvider(valuationDate, mockRates);
when(mockRates.getValuationDate()).thenReturn(valuationDate);
LocalDateDoubleTimeSeriesBuilder tsb = LocalDateDoubleTimeSeries.builder();
int lastFixing = 2;
for (int i = 0; i < lastFixing; i++) {
tsb.put(FIXING_DATES[i], FIXING_RATES[i]);
}
when(mockRates.getFixings()).thenReturn(tsb.build());
for (int i = 0; i < lastFixing; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FIXING_RATES[i]);
}
for (int i = lastFixing; i < USD_OBS.length; i++) {
when(mockRates.rate(USD_OBS[i])).thenReturn(FORWARD_RATES[i]);
}
assertThrows(
() -> OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv),
PricingException.class);
assertThrows(
() -> OBS_FWD_ONCMP.rateSensitivity(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, simpleProv),
PricingException.class);
}
//-------------------------------------------------------------------------
private static final CurveInterpolator INTERPOLATOR = CurveInterpolators.DOUBLE_QUADRATIC;
private static final LocalDateDoubleTimeSeries TIME_SERIES;
static {
LocalDateDoubleTimeSeriesBuilder builder = LocalDateDoubleTimeSeries.builder();
for (int i = 0; i < FIXING_DATES.length; i++) {
builder.put(FIXING_DATES[i], FIXING_RATES[i]);
}
TIME_SERIES = builder.build();
}
private static final RatesFiniteDifferenceSensitivityCalculator CAL_FD =
new RatesFiniteDifferenceSensitivityCalculator(EPS_FD);
/** Test parameter sensitivity with fd calculator. No cutoff. */
public void rateNoCutOffForwardParameterSensitivity() { // publication=1, cutoff=0, effective offset=0, Forward
LocalDate[] valuationDate = {date(2015, 1, 1), date(2015, 1, 8)};
DoubleArray time_usd = DoubleArray.of(0.0, 0.5, 1.0, 2.0, 5.0, 10.0);
DoubleArray rate_usd = DoubleArray.of(0.0100, 0.0110, 0.0115, 0.0130, 0.0135, 0.0135);
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 0, REF_DATA);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
Curve fedFundCurve = InterpolatedNodalCurve.of(
Curves.zeroRates("USD-Fed-Fund", ACT_ACT_ISDA), time_usd, rate_usd, INTERPOLATOR);
ImmutableRatesProvider prov = ImmutableRatesProvider.builder(valuationDate[loopvaldate])
.overnightIndexCurve(USD_FED_FUND, fedFundCurve, TIME_SERIES)
.build();
PointSensitivityBuilder sensitivityBuilderComputed =
OBS_FWD_ONCMP.rateSensitivity(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, prov);
CurrencyParameterSensitivities parameterSensitivityComputed =
prov.parameterSensitivity(sensitivityBuilderComputed.build());
CurrencyParameterSensitivities parameterSensitivityExpected =
CAL_FD.sensitivity(prov, (p) -> CurrencyAmount.of(USD_FED_FUND.getCurrency(),
OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, (p))));
assertTrue(parameterSensitivityComputed.equalWithTolerance(parameterSensitivityExpected, EPS_FD * 10.0));
}
}
/** Test parameter sensitivity with fd calculator. Two days cutoff. */
public void rate2CutOffForwardParameterSensitivity() { // publication=1, cutoff=2, effective offset=0, Forward
LocalDate[] valuationDate = {date(2015, 1, 1), date(2015, 1, 8)};
DoubleArray time_usd = DoubleArray.of(0.0, 0.5, 1.0, 2.0, 5.0, 10.0);
DoubleArray rate_usd = DoubleArray.of(0.0100, 0.0110, 0.0115, 0.0130, 0.0135, 0.0135);
OvernightCompoundedRateComputation ro =
OvernightCompoundedRateComputation.of(USD_FED_FUND, FIXING_START_DATE, FIXING_END_DATE, 2, REF_DATA);
for (int loopvaldate = 0; loopvaldate < 2; loopvaldate++) {
Curve fedFundCurve = InterpolatedNodalCurve.of(
Curves.zeroRates("USD-Fed-Fund", ACT_ACT_ISDA), time_usd, rate_usd, INTERPOLATOR);
ImmutableRatesProvider prov = ImmutableRatesProvider.builder(valuationDate[loopvaldate])
.overnightIndexCurve(USD_FED_FUND, fedFundCurve, TIME_SERIES)
.build();
PointSensitivityBuilder sensitivityBuilderComputed =
OBS_FWD_ONCMP.rateSensitivity(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, prov);
CurrencyParameterSensitivities parameterSensitivityComputed =
prov.parameterSensitivity(sensitivityBuilderComputed.build());
CurrencyParameterSensitivities parameterSensitivityExpected =
CAL_FD.sensitivity(prov, (p) -> CurrencyAmount.of(USD_FED_FUND.getCurrency(),
OBS_FWD_ONCMP.rate(ro, DUMMY_ACCRUAL_START_DATE, DUMMY_ACCRUAL_END_DATE, (p))));
assertTrue(parameterSensitivityComputed.equalWithTolerance(parameterSensitivityExpected, EPS_FD * 10.0));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.internal.util.rb_trees;
import org.apache.uima.util.impl.Constants;
/**
* map<int, int%gt; uses separate objects (IntRBTNode) as nodes
*
* See the {@link org.apache.uima.internal.util.rb_trees.RedBlackTree RedBlackTree} class. This is a
* specialized instance with ints as elements.
*
*
*/
public class IntRedBlackTree {
// A note on the implementation: we closely follow CLR, down to
// function and variable names. Places where we depart from CLR are
// specifically commented in the code. The main difference is that
// we don't use a NIL sentinel, but null pointers instead. This
// makes the code somewhat less elegant in places. The meat of the
// implementation is in IntRBTNode.
// The root node of the tree.
IntRBTNode root = null;
// A counter to keep track of the size of the tree.
int size = 0;
/** Default constructor, does nothing. */
public IntRedBlackTree() {
}
public final int size() {
return this.size;
}
// ////////////////////////////////////////////////////////////////
// Map interface methods //
// ////////////////////////////////////////////////////////////////
public final void clear() {
this.root = null;
this.size = 0;
}
public final boolean containsKey(int key) {
return (IntRBTNode.find(this.root, key) == null) ? false : true;
}
public final boolean containsValue(int o) {
IntRBTIterator it = this.iterator();
while (it.hasNext()) {
if (o == it.next()) {
return true;
}
}
return false;
}
/**
* Insert an object with a given key into the tree.
*
* @param key
* The key under which the int is to be inserted.
* @param el
* The int to be inserted.
* @return <code>true</code>, if the key was not in the tree; <code>false</code>, if an element
* with that key was already in the tree. The old element is overwritten with the new one.
*/
public final boolean put(int key, int el) {
if (put(new IntRBTNode(key, el))) {
this.size++;
return true;
}
return false;
}
/**
* Delete the node with the given key from the tree, if it exists.
*
* @param key
* The key to be deleted.
* @return -
*/
public final int remove(int key) throws java.util.NoSuchElementException {
IntRBTNode node = IntRBTNode.find(this.root, key);
int ret;
if (node != null) {
ret = node.element;
this.size--;
IntRBTNode.delete(this, node);
} else {
throw new java.util.NoSuchElementException();
}
return ret;
}
public final int get(int key) throws java.util.NoSuchElementException {
if (this.root == null) {
throw new java.util.NoSuchElementException();
}
IntRBTNode node = IntRBTNode.find(this.root, key);
if (node == null) {
throw new java.util.NoSuchElementException();
}
return node.element;
}
public final boolean isEmpty() {
return (this.root == null);
}
public final int[] keySet() {
int[] set = new int[this.size];
if (this.root != null) {
this.root.keys(0, set);
}
return set;
}
/** Insert a IntRBTNode into the tree. Only used internally. */
private final boolean put(IntRBTNode node) {
return IntRBTNode.insert(this, node);
}
public final int getFirst() {
return this.getFirstNode().element;
}
private final IntRBTNode getFirstNode() {
if (this.root == null) {
return null;
}
IntRBTNode x = this.root;
while (x.left != null) {
x = x.left;
}
return x;
}
public IntRBTIterator iterator() {
return new IntRBTIterator(this);
}
public static class IntRBTIterator {
IntRBTNode current;
IntRBTIterator(IntRedBlackTree tree) {
this.current = tree.getFirstNode();
}
public boolean hasNext() {
return (this.current != null);
}
public int next() {
if (this.current == null) {
throw new java.util.NoSuchElementException();
}
int ret = this.current.element;
this.current = this.current.successor();
return ret;
}
public void remove() {
throw new UnsupportedOperationException();
}
}
/** Debugging aid. */
public void printKeys() {
if (this.root != null) {
this.root.printKeys(0);
}
System.out.println("Size: " + this.size);
}
/**
* Provides an array representation of the IntRedBlackTree. See
* {@link org.apache.uima.internal.util.rb_trees.IntRBTArray IntRBTArray} for the memory layout of
* the array. Note that the red-black information is lost in the translation. The resulting array
* is only meant to be read, not grown. The array is meant as input to construct an
* {@link org.apache.uima.internal.util.rb_trees.IntRBTArray IntRBTArray} object.
*
* @param offset
* An offset for internal addressing. If <code>offset > 0</code>, the addresses
* generated for right daughters in two-daughter nodes are shifted to the right. This is
* useful if the resulting array will be copied to a certain <code>offset</code> position
* in a different array.
* @return The resulting array representation.
*/
public int[] toArray(int offset) {
if (this.root == null) {
return Constants.EMPTY_INT_ARRAY;
}
return this.root.toArray(offset);
}
public IntRedBlackTree copy() {
IntRedBlackTree c = new IntRedBlackTree();
c.root = (null == root) ? null : root.copyNode(null);
c.size = size;
return c;
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.lookup;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Files;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.metamx.common.ISE;
import com.metamx.common.StringUtils;
import io.druid.concurrent.Execs;
import io.druid.jackson.DefaultObjectMapper;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.TimeUnit;
public class LookupReferencesManagerTest
{
private static final int CONCURRENT_THREADS = 16;
LookupReferencesManager lookupReferencesManager;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
ObjectMapper mapper = new DefaultObjectMapper();
private final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Execs.multiThreaded(
CONCURRENT_THREADS,
"hammer-time-%s"
));
@Before
public void setUp() throws IOException
{
mapper.registerSubtypes(MapLookupExtractorFactory.class);
lookupReferencesManager = new LookupReferencesManager(
new LookupConfig(Files.createTempDir().getAbsolutePath()),
mapper
);
Assert.assertTrue("must be closed before start call", lookupReferencesManager.isClosed());
lookupReferencesManager.start();
Assert.assertFalse("must start after start call", lookupReferencesManager.isClosed());
}
@After
public void tearDown()
{
lookupReferencesManager.stop();
Assert.assertTrue("stop call should close it", lookupReferencesManager.isClosed());
executorService.shutdownNow();
}
@Test(expected = ISE.class)
public void testGetExceptionWhenClosed()
{
lookupReferencesManager.stop();
lookupReferencesManager.get("test");
}
@Test(expected = ISE.class)
public void testAddExceptionWhenClosed()
{
lookupReferencesManager.stop();
lookupReferencesManager.put("test", EasyMock.createMock(LookupExtractorFactory.class));
}
@Test
public void testPutGetRemove()
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(true).once();
EasyMock.expect(lookupExtractorFactory.close()).andReturn(true).once();
EasyMock.replay(lookupExtractorFactory);
Assert.assertNull(lookupReferencesManager.get("test"));
lookupReferencesManager.put("test", lookupExtractorFactory);
Assert.assertEquals(lookupExtractorFactory, lookupReferencesManager.get("test"));
Assert.assertTrue(lookupReferencesManager.remove("test"));
Assert.assertNull(lookupReferencesManager.get("test"));
}
@Test
public void testCloseIsCalledAfterStopping() throws IOException
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(true).once();
EasyMock.expect(lookupExtractorFactory.close()).andReturn(true).once();
EasyMock.replay(lookupExtractorFactory);
lookupReferencesManager.put("testMock", lookupExtractorFactory);
lookupReferencesManager.stop();
EasyMock.verify(lookupExtractorFactory);
}
@Test
public void testCloseIsCalledAfterRemove() throws IOException
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(true).once();
EasyMock.expect(lookupExtractorFactory.close()).andReturn(true).once();
EasyMock.replay(lookupExtractorFactory);
lookupReferencesManager.put("testMock", lookupExtractorFactory);
lookupReferencesManager.remove("testMock");
EasyMock.verify(lookupExtractorFactory);
}
@Test
public void testRemoveInExisting()
{
Assert.assertFalse(lookupReferencesManager.remove("notThere"));
}
@Test
public void testGetNotThere()
{
Assert.assertNull(lookupReferencesManager.get("notThere"));
}
@Test
public void testAddingWithSameLookupName()
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createNiceMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(true).once();
LookupExtractorFactory lookupExtractorFactory2 = EasyMock.createNiceMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory2.start()).andReturn(true).times(2);
EasyMock.replay(lookupExtractorFactory, lookupExtractorFactory2);
Assert.assertTrue(lookupReferencesManager.put("testName", lookupExtractorFactory));
Assert.assertFalse(lookupReferencesManager.put("testName", lookupExtractorFactory2));
ImmutableMap<String, LookupExtractorFactory> extractorImmutableMap = ImmutableMap.of(
"testName",
lookupExtractorFactory2
);
lookupReferencesManager.put(extractorImmutableMap);
Assert.assertEquals(lookupExtractorFactory, lookupReferencesManager.get("testName"));
}
@Test
public void testAddLookupsThenGetAll()
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createNiceMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(true).once();
LookupExtractorFactory lookupExtractorFactory2 = EasyMock.createNiceMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory2.start()).andReturn(true).once();
EasyMock.replay(lookupExtractorFactory, lookupExtractorFactory2);
ImmutableMap<String, LookupExtractorFactory> extractorImmutableMap = ImmutableMap.of(
"name1",
lookupExtractorFactory,
"name2",
lookupExtractorFactory2
);
lookupReferencesManager.put(extractorImmutableMap);
Assert.assertEquals(extractorImmutableMap, lookupReferencesManager.getAll());
}
@Test(expected = ISE.class)
public void testExceptionWhenStartFail()
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(false).once();
EasyMock.replay(lookupExtractorFactory);
lookupReferencesManager.put("testMock", lookupExtractorFactory);
}
@Test(expected = ISE.class)
public void testputAllExceptionWhenStartFail()
{
LookupExtractorFactory lookupExtractorFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(lookupExtractorFactory.start()).andReturn(false).once();
ImmutableMap<String, LookupExtractorFactory> extractorImmutableMap = ImmutableMap.of(
"name1",
lookupExtractorFactory
);
lookupReferencesManager.put(extractorImmutableMap);
}
@Test
public void testUpdateIfNewOnlyIfIsNew()
{
final String lookupName = "some lookup";
LookupExtractorFactory oldFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
LookupExtractorFactory newFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(oldFactory.replaces(EasyMock.<LookupExtractorFactory>isNull())).andReturn(true).once();
EasyMock.expect(oldFactory.start()).andReturn(true).once();
EasyMock.expect(oldFactory.replaces(EasyMock.eq(oldFactory))).andReturn(false).once();
// Add new
EasyMock.expect(newFactory.replaces(EasyMock.eq(oldFactory))).andReturn(true).once();
EasyMock.expect(newFactory.start()).andReturn(true).once();
EasyMock.expect(oldFactory.close()).andReturn(true).once();
EasyMock.expect(newFactory.close()).andReturn(true).once();
EasyMock.replay(oldFactory, newFactory);
Assert.assertTrue(lookupReferencesManager.updateIfNew(lookupName, oldFactory));
Assert.assertFalse(lookupReferencesManager.updateIfNew(lookupName, oldFactory));
Assert.assertTrue(lookupReferencesManager.updateIfNew(lookupName, newFactory));
// Remove now or else EasyMock gets confused on lazy lookup manager stop handling
lookupReferencesManager.remove(lookupName);
EasyMock.verify(oldFactory, newFactory);
}
@Test(expected = ISE.class)
public void testUpdateIfNewExceptional()
{
final String lookupName = "some lookup";
LookupExtractorFactory newFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(newFactory.replaces(EasyMock.<LookupExtractorFactory>isNull())).andReturn(true).once();
EasyMock.expect(newFactory.start()).andReturn(false).once();
EasyMock.replay(newFactory);
try {
lookupReferencesManager.updateIfNew(lookupName, newFactory);
}
finally {
EasyMock.verify(newFactory);
}
}
@Test
public void testUpdateIfNewSuppressOldCloseProblem()
{
final String lookupName = "some lookup";
LookupExtractorFactory oldFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
LookupExtractorFactory newFactory = EasyMock.createStrictMock(LookupExtractorFactory.class);
EasyMock.expect(oldFactory.replaces(EasyMock.<LookupExtractorFactory>isNull())).andReturn(true).once();
EasyMock.expect(oldFactory.start()).andReturn(true).once();
// Add new
EasyMock.expect(newFactory.replaces(EasyMock.eq(oldFactory))).andReturn(true).once();
EasyMock.expect(newFactory.start()).andReturn(true).once();
EasyMock.expect(oldFactory.close()).andReturn(false).once();
EasyMock.expect(newFactory.close()).andReturn(true).once();
EasyMock.replay(oldFactory, newFactory);
lookupReferencesManager.updateIfNew(lookupName, oldFactory);
lookupReferencesManager.updateIfNew(lookupName, newFactory);
// Remove now or else EasyMock gets confused on lazy lookup manager stop handling
lookupReferencesManager.remove(lookupName);
EasyMock.verify(oldFactory, newFactory);
}
@Test
public void testBootstrapFromFile() throws IOException
{
LookupExtractorFactory lookupExtractorFactory = new MapLookupExtractorFactory(ImmutableMap.<String, String>of(
"key",
"value"
), true);
lookupReferencesManager.put("testMockForBootstrap", lookupExtractorFactory);
lookupReferencesManager.stop();
lookupReferencesManager.start();
Assert.assertEquals(lookupExtractorFactory, lookupReferencesManager.get("testMockForBootstrap"));
}
@Test
public void testConcurrencyStaaaaaaaaaaartStop() throws Exception
{
lookupReferencesManager.stop();
final CyclicBarrier cyclicBarrier = new CyclicBarrier(CONCURRENT_THREADS);
final Runnable start = new Runnable()
{
@Override
public void run()
{
try {
cyclicBarrier.await();
}
catch (InterruptedException | BrokenBarrierException e) {
throw Throwables.propagate(e);
}
lookupReferencesManager.start();
}
};
final Collection<ListenableFuture<?>> futures = new ArrayList<>(CONCURRENT_THREADS);
for (int i = 0; i < CONCURRENT_THREADS; ++i) {
futures.add(executorService.submit(start));
}
lookupReferencesManager.stop();
Futures.allAsList(futures).get(100, TimeUnit.MILLISECONDS);
for (ListenableFuture future : futures) {
Assert.assertNull(future.get());
}
}
@Test
public void testConcurrencyStartStoooooooooop() throws Exception
{
lookupReferencesManager.stop();
lookupReferencesManager.start();
final CyclicBarrier cyclicBarrier = new CyclicBarrier(CONCURRENT_THREADS);
final Runnable start = new Runnable()
{
@Override
public void run()
{
try {
cyclicBarrier.await();
}
catch (InterruptedException | BrokenBarrierException e) {
throw Throwables.propagate(e);
}
lookupReferencesManager.stop();
}
};
final Collection<ListenableFuture<?>> futures = new ArrayList<>(CONCURRENT_THREADS);
for (int i = 0; i < CONCURRENT_THREADS; ++i) {
futures.add(executorService.submit(start));
}
Futures.allAsList(futures).get(100, TimeUnit.MILLISECONDS);
for (ListenableFuture future : futures) {
Assert.assertNull(future.get());
}
}
@Test(timeout = 10000L)
public void testConcurrencySequentialChaos() throws Exception
{
final CountDownLatch runnableStartBarrier = new CountDownLatch(1);
final Random random = new Random(478137498L);
final int numUpdates = 100000;
final int numNamespaces = 100;
final CountDownLatch runnablesFinishedBarrier = new CountDownLatch(numUpdates);
final List<Runnable> runnables = new ArrayList<>(numUpdates);
final Map<String, Integer> maxNumber = new HashMap<>();
for (int i = 1; i <= numUpdates; ++i) {
final boolean shouldStart = random.nextInt(10) == 1;
final boolean shouldClose = random.nextInt(10) == 1;
final String name = Integer.toString(random.nextInt(numNamespaces));
final int position = i;
final LookupExtractorFactory lookupExtractorFactory = new LookupExtractorFactory()
{
@Override
public boolean start()
{
return shouldStart;
}
@Override
public boolean close()
{
return shouldClose;
}
@Override
public boolean replaces(@Nullable LookupExtractorFactory other)
{
if (other == null) {
return true;
}
final NamedIntrospectionHandler introspectionHandler = (NamedIntrospectionHandler) other.getIntrospectHandler();
return position > introspectionHandler.position;
}
@Nullable
@Override
public LookupIntrospectHandler getIntrospectHandler()
{
return new NamedIntrospectionHandler(position);
}
@Override
public String toString()
{
return String.format("TestFactroy position %d", position);
}
@Override
public LookupExtractor get()
{
return null;
}
};
if (shouldStart && (!maxNumber.containsKey(name) || maxNumber.get(name) < position)) {
maxNumber.put(name, position);
}
runnables.add(new LookupUpdatingRunnable(
name,
lookupExtractorFactory,
runnableStartBarrier,
lookupReferencesManager
));
}
////// Add some CHAOS!
Collections.shuffle(runnables, random);
final Runnable decrementFinished = new Runnable()
{
@Override
public void run()
{
runnablesFinishedBarrier.countDown();
}
};
for (Runnable runnable : runnables) {
executorService.submit(runnable).addListener(decrementFinished, MoreExecutors.sameThreadExecutor());
}
runnableStartBarrier.countDown();
do {
for (String name : maxNumber.keySet()) {
final LookupExtractorFactory factory;
try {
factory = lookupReferencesManager.get(name);
}
catch (ISE e) {
continue;
}
if (null == factory) {
continue;
}
final NamedIntrospectionHandler introspectionHandler = (NamedIntrospectionHandler) factory.getIntrospectHandler();
Assert.assertTrue(introspectionHandler.position >= 0);
}
} while (runnablesFinishedBarrier.getCount() > 0);
lookupReferencesManager.start();
for (String name : maxNumber.keySet()) {
final LookupExtractorFactory factory = lookupReferencesManager.get(name);
if (null == factory) {
continue;
}
final NamedIntrospectionHandler introspectionHandler = (NamedIntrospectionHandler) factory.getIntrospectHandler();
Assert.assertNotNull(introspectionHandler);
Assert.assertEquals(
StringUtils.safeFormat("Named position %s failed", name),
maxNumber.get(name),
Integer.valueOf(introspectionHandler.position)
);
}
Assert.assertEquals(maxNumber.size(), lookupReferencesManager.getAll().size());
}
@Test(timeout = 10000L)
public void testConcurrencyStartStopChaos() throws Exception
{
// Don't want to exercise snapshot here
final LookupReferencesManager manager = new LookupReferencesManager(new LookupConfig(null), mapper);
final Runnable chaosStart = new Runnable()
{
@Override
public void run()
{
manager.start();
}
};
final Runnable chaosStop = new Runnable()
{
@Override
public void run()
{
manager.stop();
}
};
final CountDownLatch runnableStartBarrier = new CountDownLatch(1);
final Random random = new Random(478137498L);
final int numUpdates = 100000;
final int numNamespaces = 100;
final CountDownLatch runnablesFinishedBarrier = new CountDownLatch(numUpdates);
final List<Runnable> runnables = new ArrayList<>(numUpdates);
final Map<String, Integer> maxNumber = new HashMap<>();
for (int i = 1; i <= numUpdates; ++i) {
final boolean shouldStart = random.nextInt(10) == 1;
final boolean shouldClose = random.nextInt(10) == 1;
final String name = Integer.toString(random.nextInt(numNamespaces));
final int position = i;
final LookupExtractorFactory lookupExtractorFactory = new LookupExtractorFactory()
{
@Override
public boolean start()
{
return shouldStart;
}
@Override
public boolean close()
{
return shouldClose;
}
@Override
public boolean replaces(@Nullable LookupExtractorFactory other)
{
if (other == null) {
return true;
}
final NamedIntrospectionHandler introspectionHandler = (NamedIntrospectionHandler) other.getIntrospectHandler();
return position > introspectionHandler.position;
}
@Nullable
@Override
public LookupIntrospectHandler getIntrospectHandler()
{
return new NamedIntrospectionHandler(position);
}
@Override
public String toString()
{
return String.format("TestFactroy position %d", position);
}
@Override
public LookupExtractor get()
{
return null;
}
};
if (random.nextFloat() < 0.001) {
if (random.nextBoolean()) {
runnables.add(chaosStart);
} else {
runnables.add(chaosStop);
}
} else {
if (shouldStart && (!maxNumber.containsKey(name) || maxNumber.get(name) < position)) {
maxNumber.put(name, position);
}
runnables.add(new LookupUpdatingRunnable(
name,
lookupExtractorFactory,
runnableStartBarrier,
manager
));
}
}
////// Add some CHAOS!
Collections.shuffle(runnables, random);
final Runnable decrementFinished = new Runnable()
{
@Override
public void run()
{
runnablesFinishedBarrier.countDown();
}
};
for (Runnable runnable : runnables) {
executorService.submit(runnable).addListener(decrementFinished, MoreExecutors.sameThreadExecutor());
}
runnableStartBarrier.countDown();
do {
for (String name : maxNumber.keySet()) {
final LookupExtractorFactory factory;
try {
factory = manager.get(name);
}
catch (ISE e) {
continue;
}
if (null == factory) {
continue;
}
final NamedIntrospectionHandler introspectionHandler = (NamedIntrospectionHandler) factory.getIntrospectHandler();
Assert.assertTrue(introspectionHandler.position >= 0);
}
} while (runnablesFinishedBarrier.getCount() > 0);
}
}
class LookupUpdatingRunnable implements Runnable
{
final String name;
final LookupExtractorFactory factory;
final CountDownLatch startLatch;
final LookupReferencesManager lookupReferencesManager;
LookupUpdatingRunnable(
String name,
LookupExtractorFactory factory,
CountDownLatch startLatch,
LookupReferencesManager lookupReferencesManager
)
{
this.name = name;
this.factory = factory;
this.startLatch = startLatch;
this.lookupReferencesManager = lookupReferencesManager;
}
@Override
public void run()
{
try {
startLatch.await();
}
catch (InterruptedException e) {
throw Throwables.propagate(e);
}
lookupReferencesManager.updateIfNew(name, factory);
}
}
class NamedIntrospectionHandler implements LookupIntrospectHandler
{
final int position;
NamedIntrospectionHandler(final int position)
{
this.position = position;
}
}
| |
/**
* Copyright (C) 2015-2019 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.as4.servlet;
import java.security.cert.X509Certificate;
import java.time.LocalDateTime;
import java.util.Locale;
import javax.annotation.CheckForSigned;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.w3c.dom.Document;
import com.helger.as4.attachment.EAS4CompressionMode;
import com.helger.as4.attachment.WSS4JAttachment;
import com.helger.as4.model.mpc.IMPC;
import com.helger.as4.model.pmode.IPMode;
import com.helger.as4.model.pmode.leg.PModeLeg;
import com.helger.as4.soap.ESOAPVersion;
import com.helger.as4.util.AS4ResourceHelper;
import com.helger.as4lib.ebms3header.Ebms3MessageInfo;
import com.helger.as4lib.ebms3header.Ebms3Messaging;
import com.helger.commons.collection.attr.IAttributeContainer;
import com.helger.commons.collection.impl.ICommonsList;
import com.helger.commons.collection.impl.ICommonsMap;
import com.helger.commons.string.StringHelper;
/**
* Read-only AS4 message state.
*
* @author Philip Helger
*/
public interface IAS4MessageState extends IAttributeContainer <String, Object>
{
/**
* @return Date and time when the receipt started. This is constantly set in
* the constructor and never <code>null</code>.
*/
@Nonnull
LocalDateTime getReceiptDT ();
/**
* @return The SOAP version of the current request as specified in the
* constructor. Never <code>null</code>.
*/
@Nonnull
ESOAPVersion getSOAPVersion ();
/**
* @return The resource manager as specified in the constructor. Never
* <code>null</code>.
*/
@Nonnull
AS4ResourceHelper getResourceHelper ();
/**
* @return The request locale to use. Never <code>null</code>.
*/
@Nonnull
Locale getLocale ();
/**
* @return The parent of the usermessage/signal message for further
* evaluation.
*/
@Nullable
Ebms3Messaging getMessaging ();
@Nullable
default String getRefToMessageID ()
{
Ebms3MessageInfo aMsgInfo = null;
final Ebms3Messaging aMessaging = getMessaging ();
if (aMessaging != null)
if (aMessaging.hasUserMessageEntries ())
aMsgInfo = aMessaging.getUserMessageAtIndex (0).getMessageInfo ();
else
if (aMessaging.hasSignalMessageEntries ())
aMsgInfo = aMessaging.getSignalMessageAtIndex (0).getMessageInfo ();
return aMsgInfo != null ? aMsgInfo.getMessageId () : "";
}
/**
* @return the PMode that is used with the current message
*/
@Nullable
IPMode getPMode ();
/**
* @return has saved the original attachment, can be encrypted or not depends
* if encryption is used or not
*/
@Nullable
ICommonsList <WSS4JAttachment> getOriginalAttachments ();
default boolean hasOriginalAttachments ()
{
final ICommonsList <WSS4JAttachment> aAttachments = getOriginalAttachments ();
return aAttachments != null && aAttachments.isNotEmpty ();
}
/**
* @return get the decrypted SOAP document, only the entire document no
* attachment
*/
@Nullable
Document getDecryptedSOAPDocument ();
default boolean hasDecryptedSOAPDocument ()
{
return getDecryptedSOAPDocument () != null;
}
/**
* @return getting decrypted attachment, if there were encrypted attachments
* to begin with
*/
@Nullable
ICommonsList <WSS4JAttachment> getDecryptedAttachments ();
default boolean hasDecryptedAttachments ()
{
final ICommonsList <WSS4JAttachment> aAttachments = getDecryptedAttachments ();
return aAttachments != null && aAttachments.isNotEmpty ();
}
/**
* @return IDs from all compressed attachments and/or payload
*/
@Nullable
ICommonsMap <String, EAS4CompressionMode> getCompressedAttachmentIDs ();
default boolean hasCompressedAttachmentIDs ()
{
return getCompressedAttachmentIDs () != null;
}
/**
* @param sID
* id to look up
* @return Looks up if a compression mode with the id sID exists and returns
* the mode else null
*/
@Nullable
default EAS4CompressionMode getAttachmentCompressionMode (@Nullable final String sID)
{
final ICommonsMap <String, EAS4CompressionMode> aIDs = getCompressedAttachmentIDs ();
return aIDs == null ? null : aIDs.get (sID);
}
/**
* @param sID
* the id to look up
* @return looks up if the compressed attachment contain the given ID
*/
default boolean containsCompressedAttachmentID (@Nullable final String sID)
{
final ICommonsMap <String, EAS4CompressionMode> aIDs = getCompressedAttachmentIDs ();
return aIDs != null && aIDs.containsKey (sID);
}
/**
* @return the MPC that is used in the current message exchange
*/
@Nullable
IMPC getMPC ();
default boolean hasMPC ()
{
return getMPC () != null;
}
/**
* @return true if a payload in the soap body is present, else false
*/
boolean isSoapBodyPayloadPresent ();
/**
* @return initiator set in the usermessage if the incoming message is a
* usermessage
*/
@Nullable
String getInitiatorID ();
default boolean hasInitiatorID ()
{
return StringHelper.hasText (getInitiatorID ());
}
/**
* @return responder set in the usermessage if the incoming message is a
* usermessage
*/
@Nullable
String getResponderID ();
default boolean hasResponderID ()
{
return StringHelper.hasText (getResponderID ());
}
/**
* @return The first provided certificate in the incoming message. May be
* <code>null</code>. Usually the certificate that was used for
* signing.
*/
@Nullable
X509Certificate getUsedCertificate ();
default boolean hasUsedCertificate ()
{
return getUsedCertificate () != null;
}
/**
* @return The effective leg to use. May be leg 1 or leg 2 of the PMode.
* @see #getPMode()
*/
@Nullable
PModeLeg getEffectivePModeLeg ();
/**
* @return 1 or 2, depending on the used leg. Any other value indicates
* "undefined".
*/
@CheckForSigned
int getEffectivePModeLegNumber ();
/**
* @return <code>true</code> if the incoming message was signed and the
* signature was verified, <code>false</code> otherwise.
*/
boolean isSoapSignatureChecked ();
/**
* @return <code>true</code> if the incoming message was decrypted,
* <code>false</code> otherwise.
*/
boolean isSoapDecrypted ();
}
| |
/**
* Simple Stack Machine
*
* Written by Atze Dijkstra, atze@cs.uu.nl,
* Copyright Utrecht University.
*
*/
package nl.uu.cs.ssm ;
import java.util.Enumeration;
import java.util.Vector;
public class Registers extends AbstractMemoryCellModel
implements MemoryCellModel
{
public static final int PC = 0 ;
public static final int SP = 1 ;
public static final int MP = 2 ;
public static final int HP = 3 ;
//public static final int RR = 4 ;
private static final int nrRegs = 8 ;
private static final String[] regNames = { "R0", "R1", "R2", "R3", "R4", "R5", "R6", "R7" } ;
private static final String[] regNamesAlias = { "PC", "SP", "MP", "HP", "RR" } ;
//private MemoryCell cells[] ;
private int cells[] ;
private Memory memory ;
private Messenger messenger ;
protected Registers( Memory m, Messenger msgr )
{
messenger = msgr ;
cells = new int[ nrRegs ] ;
for ( int i = 0 ; i < cells.length ; i++ )
//cells[ i ] = new MemoryCell() ;
cells[ i ] = 0 ;
memory = m ;
reset() ;
}
public void reset()
{
for ( int i = 0 ; i < cells.length ; i++ )
//cells[ i ].setValue( 0 ) ;
cells[ i ] = 0 ;
}
public static int getNrRegs( )
{
return nrRegs ;
}
/*
private static String[] getRegNames( )
{
return regNames ;
}
*/
public static String getRegName( int r )
{
return (r < regNames.length && r >= 0) ? regNames[ r ] : ("R"+r) ;
}
public static String getRegOrAliasName( int r )
{
return r < regNamesAlias.length ? regNamesAlias[r] : getRegName( r ) ;
}
public static String getRegNAliasName( int r )
{
return getRegName(r) + ( r < regNamesAlias.length ? ("(" + regNamesAlias[r] + ")") : "" ) ;
}
public static Enumeration<String> getRegNAliasNames( )
{
Vector<String> v = new Vector<String>() ;
Utils.addAllTo( v, Utils.asVector( regNames ) ) ;
Utils.addAllTo( v, Utils.asVector( regNamesAlias ) ) ;
return v.elements() ;
}
public static int findRegOfName( String nm ) // should be in some kinda environment
{
for ( int i = 0 ; i < regNames.length ; i++ )
{
if ( regNames[i].equals( nm ) )
return i ;
}
for ( int i = 0 ; i < regNamesAlias.length ; i++ )
{
if ( regNamesAlias[i].equals( nm ) )
return i ;
}
return -1 ;
}
private boolean checkWithinMemory( int r )
{
boolean res ;
if ( res = ( r < 0 || r >= cells.length ) )
{
messenger.println
( java.text.MessageFormat.format
( "attempt to access nonexisting register {0,number,integer}"
, new Object[] { new Integer(r) }
) ) ;
}
return ! res ;
}
public int getReg( int r )
{
int res = 0 ;
if ( checkWithinMemory( r ) )
{
res = cells[ r ] ;
}
return res ;
}
public int getRegDispl( int r, int d )
{
return getReg( r ) + d ;
}
public int getRegDisplInd( int r, int d )
{
return memory.getAt( getRegDispl( r, d ) ) ;
}
public int getRegInd( int r )
{
return getRegDisplInd( r, 0 ) ;
}
class UndoRegistersModification implements Modification
{
private int offset, value ;
UndoRegistersModification( int o, int v )
{
offset = o ;
value = v ;
}
public void modify()
{
setReg( offset, value ) ;
}
}
public void setReg( int r, int v )
{
if ( checkWithinMemory( r ) )
{
int oldv = cells[ r ] ;
cells[ r ] = v ;
fireCellChange( this, r, oldv, new UndoRegistersModification( r, oldv ) ) ;
}
}
public void setRegDisplInd( int r, int d, int v )
{
memory.setAt( getRegDispl( r, d ), v ) ;
}
public void setRegInd( int r, int v )
{
setRegDisplInd( r, 0, v ) ;
}
public int swapReg( int r, int v )
{
int oldVal = getReg( r ) ;
setReg( r, v ) ;
return oldVal ;
}
public void adjustReg( int r, int v )
{
setReg( r, getReg( r ) + v ) ;
}
public void setPC( int v )
{
setReg( PC, v ) ;
}
public int getPC( )
{
return getReg( PC ) ;
}
public void adjustPC( int v )
{
setPC( getPC() + v ) ;
}
public void setSP( int v )
{
setReg( SP, v ) ;
}
public int getSP( )
{
return getReg( SP ) ;
}
public void adjustSP( int v )
{
setSP( getSP() + v ) ;
}
public void setMP( int v )
{
setReg( MP, v ) ;
}
public int getMP( )
{
return getReg( MP ) ;
}
public void adjustMP( int v )
{
setMP( getMP() + v ) ;
}
public void setHP(int v) {
setReg (HP, v);
}
public int getHP() {
return getReg(HP);
}
public void adjustHP(int v) {
setHP(getHP() + v);
}
public int getTOS()
{
return getRegInd( SP ) ;
}
public String toString()
{
return "Register PC=" + getReg( PC ) + " SP=" + getReg( SP ) + " MP=" + getReg( MP ) ;
}
}
| |
/**
* Licensed under the GNU LESSER GENERAL PUBLIC LICENSE, version 2.1, dated February 1999.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the latest version of the GNU Lesser General
* Public License as published by the Free Software Foundation;
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program (LICENSE.txt); if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package org.jamwiki.model;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.jamwiki.WikiMessage;
import org.jamwiki.utils.Utilities;
/**
* Provides an object representing a Wiki log entry.
*/
public class LogItem implements Serializable {
public static final int LOG_TYPE_ALL = -1;
public static final int LOG_TYPE_BLOCK = 13;
public static final int LOG_TYPE_DELETE = 1;
public static final int LOG_TYPE_IMPORT = 2;
public static final int LOG_TYPE_MOVE = 3;
public static final int LOG_TYPE_PERMISSION = 4;
public static final int LOG_TYPE_UPLOAD = 6;
public static final int LOG_TYPE_USER_CREATION = 7;
public static final int LOG_SUBTYPE_DELETE_DELETE = 10;
public static final int LOG_SUBTYPE_DELETE_UNDELETE = 11;
public static final int LOG_SUBTYPE_DELETE_PURGE = 12;
public static final int LOG_SUBTYPE_BLOCK_BLOCK = 130;
public static final int LOG_SUBTYPE_BLOCK_UNBLOCK = 131;
public static Map<Integer, String> LOG_TYPES = new LinkedHashMap<Integer, String>();
static {
LOG_TYPES.put(LOG_TYPE_ALL, "log.caption.log.all");
LOG_TYPES.put(LOG_TYPE_BLOCK, "log.caption.log.block");
LOG_TYPES.put(LOG_TYPE_DELETE, "log.caption.log.deletion");
LOG_TYPES.put(LOG_TYPE_IMPORT, "log.caption.log.import");
LOG_TYPES.put(LOG_TYPE_MOVE, "log.caption.log.move");
LOG_TYPES.put(LOG_TYPE_PERMISSION, "log.caption.log.permission");
LOG_TYPES.put(LOG_TYPE_UPLOAD, "log.caption.log.upload");
LOG_TYPES.put(LOG_TYPE_USER_CREATION, "log.caption.log.user");
}
private String logComment;
private Timestamp logDate;
private List<String> logParams;
private Integer logSubType;
private int logType = -1;
private Integer topicId;
private Integer topicVersionId;
private String userDisplayName;
private Integer userId;
private String virtualWiki;
/**
* Create a log item from a topic, topic version and author name. If the topic
* version is not valid for logging this method will return <code>null</code>.
*/
public static LogItem initLogItem(Topic topic, TopicVersion topicVersion, String authorName) {
LogItem logItem = new LogItem();
if (!topicVersion.isLoggable() || !topicVersion.isRecentChangeAllowed()) {
return null;
}
logItem.setLogParams(topicVersion.getVersionParams());
switch (topicVersion.getEditType()) {
case TopicVersion.EDIT_DELETE:
logItem.setLogType(LOG_TYPE_DELETE);
logItem.setLogSubType(LOG_SUBTYPE_DELETE_DELETE);
break;
case TopicVersion.EDIT_UNDELETE:
logItem.setLogType(LOG_TYPE_DELETE);
logItem.setLogSubType(LOG_SUBTYPE_DELETE_UNDELETE);
break;
case TopicVersion.EDIT_MOVE:
if (StringUtils.isBlank(topic.getRedirectTo())) {
// add an additional check to ensure that reloading values does not create a bogus entry
return null;
}
logItem.setLogType(LOG_TYPE_MOVE);
break;
case TopicVersion.EDIT_PERMISSION:
logItem.setLogType(LOG_TYPE_PERMISSION);
break;
case TopicVersion.EDIT_IMPORT:
if (topic.getCurrentVersionId() != topicVersion.getTopicVersionId()) {
// only log the current version as an import item
return null;
}
logItem.setLogType(LOG_TYPE_IMPORT);
break;
case TopicVersion.EDIT_UPLOAD:
logItem.setLogType(LOG_TYPE_UPLOAD);
break;
default:
// not valid for logging
return null;
}
logItem.setLogComment(topicVersion.getEditComment());
logItem.setLogDate(topicVersion.getEditDate());
logItem.setTopicId(topic.getTopicId());
logItem.setTopicVersionId(topicVersion.getTopicVersionId());
logItem.setUserDisplayName(authorName);
logItem.setUserId(topicVersion.getAuthorId());
logItem.setVirtualWiki(topic.getVirtualWiki());
return logItem;
}
/**
* Create a log item from a wiki user.
*/
public static LogItem initLogItem(WikiUser wikiUser, String virtualWiki) {
LogItem logItem = new LogItem();
logItem.setLogType(LOG_TYPE_USER_CREATION);
logItem.setLogDate(wikiUser.getCreateDate());
logItem.setUserDisplayName(wikiUser.getUsername());
logItem.setUserId(wikiUser.getUserId());
logItem.setVirtualWiki(virtualWiki);
// format user log is "New user account created" (no params needed)
return logItem;
}
/**
* Create a log item from a user block record.
*/
public static LogItem initLogItem(UserBlock userBlock, String virtualWiki) {
LogItem logItem = new LogItem();
logItem.setLogType(LOG_TYPE_BLOCK);
if (userBlock.getUnblockDate() == null) {
logItem.setLogSubType(LOG_SUBTYPE_BLOCK_BLOCK);
logItem.setLogDate(userBlock.getBlockDate());
logItem.setUserDisplayName(userBlock.getBlockedByUsername());
logItem.setUserId(userBlock.getBlockedByUserId());
logItem.setLogComment(userBlock.getBlockReason());
// format block log is "{0} blocked until {1}"
logItem.addLogParam(userBlock.getBlockedUsernameOrIpAddress());
if (userBlock.getBlockEndDate() != null) {
logItem.addLogParam(userBlock.getBlockEndDate().toString());
}
} else {
logItem.setLogSubType(LOG_SUBTYPE_BLOCK_UNBLOCK);
logItem.setLogDate(userBlock.getUnblockDate());
logItem.setUserDisplayName(userBlock.getUnblockedByUsername());
logItem.setUserId(userBlock.getUnblockedByUserId());
logItem.setLogComment(userBlock.getUnblockReason());
// format block log is "{0} unblocked"
logItem.addLogParam(userBlock.getBlockedUsernameOrIpAddress());
}
logItem.setVirtualWiki(virtualWiki);
return logItem;
}
/**
* Create a log item from a topic, topic version and author name for the case of
* a topic version deletion.
*/
public static LogItem initLogItemPurge(Topic topic, TopicVersion topicVersion, WikiUser user, String ipAddress) {
LogItem logItem = new LogItem();
logItem.addLogParam(topic.getName());
logItem.addLogParam(Integer.toString(topicVersion.getTopicVersionId()));
logItem.setLogType(LOG_TYPE_DELETE);
logItem.setLogSubType(LOG_SUBTYPE_DELETE_PURGE);
logItem.setLogDate(new Timestamp(System.currentTimeMillis()));
logItem.setTopicId(topic.getTopicId());
if (user != null && user.getUserId() > 0) {
logItem.setUserId(user.getUserId());
logItem.setUserDisplayName(user.getUsername());
} else {
logItem.setUserDisplayName(ipAddress);
}
logItem.setVirtualWiki(topic.getVirtualWiki());
return logItem;
}
/**
*
*/
public static WikiMessage retrieveLogWikiMessage(String virtualWiki, int logType, Integer logSubType, String logParamString, Integer topicVersionId) {
String[] logParams = null;
if (!StringUtils.isBlank(logParamString)) {
logParams = logParamString.split("\\|");
}
WikiMessage logWikiMessage = null;
if (logType == LogItem.LOG_TYPE_BLOCK) {
if (logSubType.intValue() == LOG_SUBTYPE_BLOCK_BLOCK) {
if (logParams.length == 1) {
logWikiMessage = new WikiMessage("log.message.blockinfinite");
} else {
logWikiMessage = new WikiMessage("log.message.block");
}
// params are the blocked user and the block expiration.
String username = logParams[0];
String userPage = Namespace.namespace(Namespace.USER_ID).getLabel(virtualWiki) + Namespace.SEPARATOR + username;
logWikiMessage.addWikiLinkParam(userPage, username);
if (logParams.length > 1) {
logWikiMessage.addParam(logParams[1]);
}
} else {
logWikiMessage = new WikiMessage("log.message.unblock");
// param is the unblocked user.
String username = logParams[0];
String userPage = Namespace.namespace(Namespace.USER_ID).getLabel(virtualWiki) + Namespace.SEPARATOR + username;
logWikiMessage.addWikiLinkParam(userPage, username);
}
} else if (logType == LogItem.LOG_TYPE_DELETE) {
if (logSubType != null && logSubType.intValue() == LOG_SUBTYPE_DELETE_UNDELETE) {
logWikiMessage = new WikiMessage("log.message.undeletion");
} else if (logSubType != null && logSubType.intValue() == LOG_SUBTYPE_DELETE_PURGE) {
logWikiMessage = new WikiMessage("log.message.purge");
// first param is the topic name, second is the version number
if (logParams != null && logParams.length > 0) {
logWikiMessage.addWikiLinkParam(logParams[0]);
}
if (logParams != null && logParams.length > 1) {
logWikiMessage.addParam(logParams[1]);
}
} else {
logWikiMessage = new WikiMessage("log.message.deletion");
}
} else if (logType == LogItem.LOG_TYPE_IMPORT) {
logWikiMessage = new WikiMessage("log.message.import");
} else if (logType == LogItem.LOG_TYPE_MOVE) {
logWikiMessage = new WikiMessage("log.message.move");
} else if (logType == LogItem.LOG_TYPE_PERMISSION) {
logWikiMessage = new WikiMessage("log.message.permission");
} else if (logType == LogItem.LOG_TYPE_UPLOAD) {
logWikiMessage = new WikiMessage("log.message.upload");
} else if (logType == LogItem.LOG_TYPE_USER_CREATION) {
logWikiMessage = new WikiMessage("log.message.user");
}
// format params as links if they haven't already been set
if (logParams != null && logWikiMessage.getParamsLength() == 0) {
for (String logParam : logParams) {
logWikiMessage.addWikiLinkParam(logParam);
}
}
return logWikiMessage;
}
/**
*
*/
public String getLogComment() {
return this.logComment;
}
/**
*
*/
public void setLogComment(String logComment) {
this.logComment = logComment;
}
/**
*
*/
public Timestamp getLogDate() {
return this.logDate;
}
/**
*
*/
public void setLogDate(Timestamp logDate) {
this.logDate = logDate;
}
/**
* Utility method for adding a log param.
*/
private void addLogParam(String param) {
if (this.logParams == null) {
this.logParams = new ArrayList<String>();
}
this.logParams.add(param);
}
/**
*
*/
public List<String> getLogParams() {
return this.logParams;
}
/**
*
*/
public void setLogParams(List<String> logParams) {
this.logParams = logParams;
}
/**
* Utility method for converting the log params to a pipe-delimited string.
*/
public String getLogParamString() {
return Utilities.listToDelimitedString(this.logParams, "|");
}
/**
* Utility method for converting a log params pipe-delimited string to a list.
*/
public void setLogParamString(String logParamsString) {
this.setLogParams(Utilities.delimitedStringToList(logParamsString, "|"));
}
/**
* In most cases the log type is sufficient to determine how to classify a log
* item, but in some cases further granularity is required. One such example
* is deletion/undeletion, which are both part of the "deletion" log type.
*/
public Integer getLogSubType() {
return this.logSubType;
}
/**
* In most cases the log type is sufficient to determine how to classify a log
* item, but in some cases further granularity is required. One such example
* is deletion/undeletion, which are both part of the "deletion" log type.
*/
public void setLogSubType(Integer logSubType) {
this.logSubType = logSubType;
}
/**
* The log type determines what log (deletion, upload, etc) the log record will
* be classified under.
*/
public int getLogType() {
return this.logType;
}
/**
* The log type determines what log (deletion, upload, etc) the log record will
* be classified under.
*/
public void setLogType(int logType) {
this.logType = logType;
}
/**
* Utility method for retrieving the log type caption for the specific log type.
*/
public String getLogWikiLinkCaption() {
return LOG_TYPES.get(this.logType);
}
/**
* Utility method for displaying a formatted log message specific to the log type and
* params.
*/
public WikiMessage getLogWikiMessage() {
return LogItem.retrieveLogWikiMessage(this.getVirtualWiki(), this.getLogType(), this.getLogSubType(), this.getLogParamString(), this.getTopicVersionId());
}
/**
*
*/
public Integer getTopicId() {
return this.topicId;
}
/**
*
*/
public void setTopicId(Integer topicId) {
this.topicId = topicId;
}
/**
*
*/
public Integer getTopicVersionId() {
return this.topicVersionId;
}
/**
*
*/
public void setTopicVersionId(Integer topicVersionId) {
this.topicVersionId = topicVersionId;
}
/**
*
*/
public String getUserDisplayName() {
return this.userDisplayName;
}
/**
*
*/
public void setUserDisplayName(String userDisplayName) {
this.userDisplayName = userDisplayName;
}
/**
*
*/
public Integer getUserId() {
return this.userId;
}
/**
*
*/
public void setUserId(Integer userId) {
this.userId = userId;
}
/**
*
*/
public String getVirtualWiki() {
return this.virtualWiki;
}
/**
*
*/
public void setVirtualWiki(String virtualWiki) {
this.virtualWiki = virtualWiki;
}
/**
*
*/
public boolean isBlock() {
return this.logType == LOG_TYPE_BLOCK;
}
/**
*
*/
public boolean isDelete() {
return this.logType == LOG_TYPE_DELETE;
}
/**
*
*/
public boolean isImportLog() {
return this.logType == LOG_TYPE_IMPORT;
}
/**
*
*/
public boolean isMove() {
return this.logType == LOG_TYPE_MOVE;
}
/**
*
*/
public boolean isPermission() {
return this.logType == LOG_TYPE_PERMISSION;
}
/**
*
*/
public boolean isUpload() {
return this.logType == LOG_TYPE_UPLOAD;
}
/**
*
*/
public boolean isUser() {
return this.logType == LOG_TYPE_USER_CREATION;
}
}
| |
/*
* Copyright: (c) Mayo Foundation for Medical Education and
* Research (MFMER). All rights reserved. MAYO, MAYO CLINIC, and the
* triple-shield Mayo logo are trademarks and service marks of MFMER.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/lexevs-grid/LICENSE.txt for details.
*/
package org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeGraph.service;
import java.rmi.RemoteException;
import java.util.Calendar;
import java.util.GregorianCalendar;
import javax.xml.namespace.QName;
import org.LexGrid.LexBIG.DataModel.Collections.LocalNameList;
import org.LexGrid.LexBIG.DataModel.Collections.SortOptionList;
import org.LexGrid.LexBIG.DataModel.Core.ConceptReference;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeGraph;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet;
import org.LexGrid.LexBIG.LexBIGService.CodedNodeSet.PropertyType;
import org.LexGrid.LexBIG.cagrid.Utils;
import org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeGraph.common.CodedNodeGraphConstants;
import org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeGraph.service.globus.resource.CodedNodeGraphResource;
import org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.common.CodedNodeSetConstants;
import org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.service.globus.resource.CodedNodeSetResource;
import org.LexGrid.LexBIG.cagrid.iso21090.converter.ConvertUtils;
import org.apache.axis.message.MessageElement;
import org.globus.wsrf.ResourceKey;
import org.globus.wsrf.impl.SimpleResourceKey;
/**
* TODO:I am the service side implementation class. IMPLEMENT AND DOCUMENT ME
*
* @created by Introduce Toolkit version 1.1
*
*/
public class CodedNodeGraphImpl extends CodedNodeGraphImplBase {
public CodedNodeGraphImpl() throws RemoteException {
super();
}
public org.LexGrid.LexBIG.iso21090.DataModel.Collections.ResolvedConceptReferenceList resolveAsList(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.GraphResolutionPolicy graphResolutionPolicy) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
ConceptReference graphFocus = ConvertUtils.convert(graphResolutionPolicy.getGraphFocus(), ConceptReference.class);
boolean resolveForward = graphResolutionPolicy.getResolveForward().getValue();
boolean resolveBackward = graphResolutionPolicy.getResolveBackwards().getValue();
int resolveCodedEntryDepth = graphResolutionPolicy.getResolveCodedEntryDepth().getValue();
int resolveAssociationDepth = graphResolutionPolicy.getResolveAssociationDepth().getValue();
LocalNameList propertyNames = ConvertUtils.convert(graphResolutionPolicy.getPropertyNames(), LocalNameList.class);
PropertyType[] propertyTypes = Utils.convertPropertyType(graphResolutionPolicy.getPropertyTypes());
SortOptionList sortOptions = ConvertUtils.convert(graphResolutionPolicy.getSortOptions(), SortOptionList.class);
int maxToReturn = graphResolutionPolicy.getMaximumToReturn().getValue();
org.LexGrid.LexBIG.DataModel.Collections.ResolvedConceptReferenceList
list = getResourceHome().getAddressedResource().getCodedNodeGraph()
.resolveAsList(graphFocus, resolveForward, resolveBackward,
resolveCodedEntryDepth, resolveAssociationDepth,
propertyNames,
propertyTypes,
sortOptions, maxToReturn);
return ConvertUtils.convert(list, org.LexGrid.LexBIG.iso21090.DataModel.Collections.ResolvedConceptReferenceList.class);
} catch (Exception e) {
Utils.processException(e);
return null;
}
}
public org.LexGrid.LexBIG.DataModel.Collections.ConceptReferenceList listCodeRelationships(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.RelationshipTypeBasedPolicy relationshipTypeBasedPolicy) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
ConceptReference sourceCode = ConvertUtils.convert(relationshipTypeBasedPolicy.getSourceConcept(), ConceptReference.class);
ConceptReference targetCode = ConvertUtils.convert(relationshipTypeBasedPolicy.getTargetConcept(), ConceptReference.class);
boolean directOnly = relationshipTypeBasedPolicy.getDirectOnly().getValue();
try {
return
ConvertUtils.convert(getResourceHome().getAddressedResource().getCodedNodeGraph()
.listCodeRelationships(
sourceCode,
targetCode,
directOnly), org.LexGrid.LexBIG.DataModel.Collections.ConceptReferenceList.class);
} catch (Exception e) {
Utils.processException(e);
return null;
}
}
public void restrictToTargetCodeSystem(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.CodingSchemeIdentification codingSchemeIdentification) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
CodedNodeGraph cng = getResourceHome().getAddressedResource().getCodedNodeGraph();
cng = cng.restrictToTargetCodeSystem(codingSchemeIdentification.getName().getValue());
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public void restrictToCodeSystem(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.CodingSchemeIdentification codingSchemeIdentification) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
CodedNodeGraph cng = getResourceHome().getAddressedResource().getCodedNodeGraph();
cng = cng.restrictToCodeSystem(codingSchemeIdentification.getName().getValue());
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public void restrictToTargetCodes(org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.stubs.types.CodedNodeSetReference codes) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
QName name = CodedNodeSetConstants.RESOURCE_KEY;
MessageElement returnedValue = codes.getEndpointReference()
.getProperties().get(name);
String value = returnedValue.getValue();
ResourceKey newKey = new SimpleResourceKey(name, value);
CodedNodeSetResource cnsr = this.getCodedNodeSetResourceHome()
.getResource((newKey));
CodedNodeSet cns = cnsr.getCodedNodeSet();
getResourceHome().getAddressedResource().getCodedNodeGraph()
.restrictToTargetCodes(cns);
} catch (Exception e) {
Utils.processException(e);
}
}
public void restrictToSourceCodes(org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.stubs.types.CodedNodeSetReference codes) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
QName name = CodedNodeSetConstants.RESOURCE_KEY;
MessageElement returnedValue = codes.getEndpointReference()
.getProperties().get(name);
String value = returnedValue.getValue();
ResourceKey newKey = new SimpleResourceKey(name, value);
CodedNodeSetResource cnsr = this.getCodedNodeSetResourceHome()
.getResource((newKey));
CodedNodeSet cns = cnsr.getCodedNodeSet();
CodedNodeGraph cng = getResourceHome().getAddressedResource().getCodedNodeGraph();
cng = cng.restrictToSourceCodes(cns);
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public void restrictToDirectionalNames(org.LexGrid.LexBIG.iso21090.DataModel.Collections.NameAndValueList directionalNames,org.LexGrid.LexBIG.iso21090.DataModel.Collections.NameAndValueList associationQualifiers) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
CodedNodeGraph cng =
getResourceHome().getAddressedResource().getCodedNodeGraph();
cng = cng.restrictToDirectionalNames(
ConvertUtils.convert(directionalNames, org.LexGrid.LexBIG.DataModel.Collections.NameAndValueList.class),
ConvertUtils.convert(associationQualifiers, org.LexGrid.LexBIG.DataModel.Collections.NameAndValueList.class)
);
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public void restrictToAssociations(org.LexGrid.LexBIG.iso21090.DataModel.Collections.NameAndValueList associations,org.LexGrid.LexBIG.iso21090.DataModel.Collections.NameAndValueList associationQualifiers) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
CodedNodeGraph cng = getResourceHome()
.getAddressedResource()
.getCodedNodeGraph();
cng = cng.restrictToAssociations(
ConvertUtils.convert(associations, org.LexGrid.LexBIG.DataModel.Collections.NameAndValueList.class),
ConvertUtils.convert(associationQualifiers, org.LexGrid.LexBIG.DataModel.Collections.NameAndValueList.class));
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public void restrictToCodes(org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.stubs.types.CodedNodeSetReference codes) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
QName name = CodedNodeSetConstants.RESOURCE_KEY;
MessageElement returnedValue = codes.getEndpointReference()
.getProperties().get(name);
String value = returnedValue.getValue();
ResourceKey newKey = new SimpleResourceKey(name, value);
CodedNodeSetResource cnsr = this.getCodedNodeSetResourceHome()
.getResource((newKey));
CodedNodeSet cns = cnsr.getCodedNodeSet();
CodedNodeGraph cng = getResourceHome().getAddressedResource().getCodedNodeGraph();
cng = cng.restrictToCodes(cns);
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public void intersect(org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeGraph.stubs.types.CodedNodeGraphReference graph) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
QName name = CodedNodeGraphConstants.RESOURCE_KEY;
MessageElement returnedValue = graph.getEndpointReference()
.getProperties().get(name);
String value = returnedValue.getValue();
ResourceKey newKey = new SimpleResourceKey(name, value);
CodedNodeGraphResource cngr = this.getResourceHome().getResource(
(newKey));
CodedNodeGraph cng = cngr.getCodedNodeGraph();
CodedNodeGraph resource = getResourceHome().getAddressedResource().getCodedNodeGraph();
resource = resource.intersect(cng);
this.getResourceHome().getAddressedResource().setCodedNodeGraph(resource);
} catch (Exception e){
Utils.processException(e);
}
}
public void union(org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeGraph.stubs.types.CodedNodeGraphReference graph) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
QName name = CodedNodeGraphConstants.RESOURCE_KEY;
MessageElement returnedValue = graph.getEndpointReference()
.getProperties().get(name);
String value = returnedValue.getValue();
ResourceKey newKey = new SimpleResourceKey(name, value);
CodedNodeGraphResource cngr = this.getResourceHome().getResource(
(newKey));
CodedNodeGraph cng = cngr.getCodedNodeGraph();
CodedNodeGraph resource = getResourceHome().getAddressedResource().getCodedNodeGraph();
resource = resource.union(cng);
this.getResourceHome().getAddressedResource().setCodedNodeGraph(resource);
} catch (Exception e){
Utils.processException(e);
}
}
public void restrictToSourceCodeSystem(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.CodingSchemeIdentification codingSchemeIdentification) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
CodedNodeGraph cng = getResourceHome().getAddressedResource().getCodedNodeGraph();
cng = cng.restrictToSourceCodeSystem(codingSchemeIdentification.getName().getValue());
this.getResourceHome().getAddressedResource().setCodedNodeGraph(cng);
} catch (Exception e) {
Utils.processException(e);
}
}
public org.LexGrid.LexBIG.iso21090.DataModel.cagrid.CodeExistence isCodeInGraph(org.LexGrid.LexBIG.iso21090.DataModel.Core.ConceptReference code) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
try {
boolean isInGraph = getResourceHome().getAddressedResource().getCodedNodeGraph()
.isCodeInGraph(
ConvertUtils.convert(code, org.LexGrid.LexBIG.DataModel.Core.ConceptReference.class));
return Utils.wrapCodeExistence(isInGraph);
} catch (Exception e) {
Utils.processException(e);
return null;
}
}
public org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.stubs.types.CodedNodeSetReference toNodeList(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.NodeListPolicy nodeListPolicy) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
org.apache.axis.message.addressing.EndpointReferenceType epr = new org.apache.axis.message.addressing.EndpointReferenceType();
org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.service.globus.resource.CodedNodeSetResourceHome home = null;
org.globus.wsrf.ResourceKey resourceKey = null;
org.apache.axis.MessageContext ctx = org.apache.axis.MessageContext
.getCurrentContext();
String servicePath = ctx.getTargetService();
String homeName = org.globus.wsrf.Constants.JNDI_SERVICES_BASE_NAME
+ servicePath + "/" + "codedNodeSetHome";
try {
javax.naming.Context initialContext = new javax.naming.InitialContext();
home = (org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.service.globus.resource.CodedNodeSetResourceHome) initialContext
.lookup(homeName);
resourceKey = home.createResource();
// Grab the newly created resource
org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.service.globus.resource.CodedNodeSetResource thisResource = (org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.service.globus.resource.CodedNodeSetResource) home
.find(resourceKey);
// This is where the creator of this resource type can set whatever
// needs
// to be set on the resource so that it can function appropriatly
// for instance
// if you want the resouce to only have the query string then there
// is where you would
// give it the query string.
ConceptReference graphFocus = ConvertUtils.convert(nodeListPolicy.getGraphFocus(), ConceptReference.class);
boolean resolveForward = nodeListPolicy.getResolveForward().getValue();
boolean resolveBackward = nodeListPolicy.getResolveBackward().getValue();
int resolveAssociationDepth = nodeListPolicy.getResolveAssociationDepth().getValue();
int maxToReturn = nodeListPolicy.getMaximumToReturn().getValue();
CodedNodeSet cns = getResourceHome().getAddressedResource()
.getCodedNodeGraph().toNodeList(graphFocus, resolveForward,
resolveBackward, resolveAssociationDepth,
maxToReturn);
thisResource.setCodedNodeSet(cns);
// sample of setting creator only security. This will only allow the
// caller that created
// this resource to be able to use it.
// thisResource.setSecurityDescriptor(gov.nih.nci.cagrid.introduce.servicetools.security.SecurityUtils.createCreatorOnlyResourceSecurityDescriptor());
Calendar cal = new GregorianCalendar();
cal.add(Calendar.MINUTE, 5);
thisResource.setTerminationTime(cal);
String transportURL = (String) ctx
.getProperty(org.apache.axis.MessageContext.TRANS_URL);
transportURL = transportURL.substring(0, transportURL
.lastIndexOf('/') + 1);
transportURL += "CodedNodeSet";
epr = org.globus.wsrf.utils.AddressingUtils
.createEndpointReference(transportURL, resourceKey);
} catch (Exception e) {
Utils.processException(e);
}
// return the typed EPR
org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.stubs.types.CodedNodeSetReference ref = new org.LexGrid.LexBIG.cagrid.LexEVSGridService.CodedNodeSet.stubs.types.CodedNodeSetReference();
ref.setEndpointReference(epr);
return ref;
}
public org.LexGrid.LexBIG.iso21090.DataModel.cagrid.CodeRelationship areCodesRelated(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.RelationshipTypeBasedPolicy relationshipTypeBasedPolicy,org.LexGrid.LexBIG.iso21090.DataModel.Core.NameAndValue nameAndValue) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
ConceptReference sourceCode = ConvertUtils.convert(relationshipTypeBasedPolicy.getSourceConcept(), ConceptReference.class);
ConceptReference targetCode = ConvertUtils.convert(relationshipTypeBasedPolicy.getTargetConcept(), ConceptReference.class);
boolean directOnly = relationshipTypeBasedPolicy.getDirectOnly().getValue();
try {
boolean areRelated = getResourceHome().getAddressedResource().getCodedNodeGraph()
.areCodesRelated(
ConvertUtils.convert(nameAndValue, org.LexGrid.LexBIG.DataModel.Core.NameAndValue.class),
sourceCode,
targetCode,
directOnly);
return Utils.wrapCodeRelationship(areRelated);
} catch (Exception e) {
Utils.processException(e);
return null;
}
}
public org.LexGrid.LexBIG.DataModel.Collections.ConceptReferenceList listCodeRelationships2(org.LexGrid.LexBIG.iso21090.DataModel.cagrid.RelationshipDistanceBasedPolicy relationshipDistanceBasedPolicy) throws RemoteException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.InvalidServiceContextAccess, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBInvocationException, org.LexGrid.LexBIG.cagrid.LexEVSGridService.stubs.types.LBParameterException {
ConceptReference sourceCode = ConvertUtils.convert(relationshipDistanceBasedPolicy.getSourceConcept(), ConceptReference.class);
ConceptReference targetCode = ConvertUtils.convert(relationshipDistanceBasedPolicy.getTargetConcept(), ConceptReference.class);
int distance = relationshipDistanceBasedPolicy.getDistance().getValue();
try {
return
ConvertUtils.convert(getResourceHome().getAddressedResource().getCodedNodeGraph()
.listCodeRelationships(
sourceCode,
targetCode,
distance), org.LexGrid.LexBIG.DataModel.Collections.ConceptReferenceList.class);
} catch (Exception e) {
Utils.processException(e);
return null;
}
}
}
| |
package com.thetransactioncompany.cors;
import java.util.Iterator;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Handles incoming cross-origin (CORS) requests according to the configured
* access policy. Encapsulates the CORS processing logic as specified by the
* <a href="http://www.w3.org/TR/access-control/">W3C draft</a> from
* 2010-07-27.
*
* <p>Note that the actual CORS exception handling (which is outside the CORS
* specification scope) is left to the invoking class to implement.
*
* @author Vladimir Dzhuvinov
* @version $version$ (2011-12-02)
*/
public class CORSRequestHandler {
/**
* The CORS filter configuration, containing details of the cross-origin
* access policy.
*/
private CORSConfiguration config;
/**
* Pre-computed string of the CORS supported methods.
*/
private String supportedMethods;
/**
* Pre-computed string of the CORS supported headers.
*/
private String supportedHeaders;
/**
* Pre-computed string of the CORS exposed headers.
*/
private String exposedHeaders;
/**
* Creates a new CORS request handler.
*
* @param config Specifies the cross-origin access policy.
*/
public CORSRequestHandler(final CORSConfiguration config) {
this.config = config;
// Pre-compute some response headers
supportedMethods = serialize(config.supportedMethods, ", ");
supportedHeaders = serialize(config.supportedHeaders, ", ");
exposedHeaders = serialize(config.exposedHeaders, ", ");
}
/**
* Serialises the items of a set into a string. Each item must have a
* meaningful {@code toString()} method.
*
* @param set The set to serialise.
* @param sep The string separator to apply.
*
* @return The serialised set as string.
*/
private static String serialize(final Set set, final String sep) {
Iterator it = set.iterator();
String s = "";
while (it.hasNext()) {
s = s + it.next().toString();
if (it.hasNext())
s = s + sep;
}
return s;
}
/**
* Parses a header value consisting of zero or more space/comma/space+comma
* separated strings. The input string is trimmed before splitting.
*
* @param headerValue The header value, may be {@code null}.
*
* @return A string array of the parsed string items, empty if none
* were found or the input was {@code null}.
*/
private static String[] parseMultipleHeaderValues(final String headerValue) {
if (headerValue == null)
return new String[0];
String trimmedHeaderValue = headerValue.trim();
if (trimmedHeaderValue.isEmpty())
return new String[0];
return trimmedHeaderValue.split("\\s*,\\s*|\\s+");
}
/**
* Matches the list of request origins against the allowed origins. This
* is done by invoking the configuration helper method
* {@link CORSConfiguration#isAllowedOrigin}.
*
* @param requestOrigins The request origins (zero or more).
*
* @return The first case-insensitive match, or {@code null} if nothing
* matched.
*/
protected String checkOrigin(final String[] requestOrigins) {
for (String origin: requestOrigins) {
if (config.isAllowedOrigin(origin))
return origin;
}
return null;
}
/**
* Tags an HTTP servlet request to provide CORS information to
* downstream handlers.
*
* <p>Tagging is provided via {@code HttpServletRequest.setAttribute()}.
*
* <ul>
* <li>{@code cors.isCorsRequest} set to {@code true} or {@code false}.
* <li>{@code cors.origin} set to the value of the "Origin" header,
* {@code null} if undefined.
* <li>{@code cors.requestType} set to "actual" or "preflight" (for
* CORS requests).
* <li>{@code cors.requestHeaders} set to the value of the
* "Access-Control-Request-Headers" or {@code null} if
* undefined (added for preflight CORS requests only).
* </ul>
*
* @param request The servlet request to inspect and tag. Must not be
* {@code null}.
*/
public void tagRequest(final HttpServletRequest request) {
final CORSRequestType type = CORSRequestType.detect(request);
switch (type) {
case ACTUAL:
request.setAttribute("cors.isCorsRequest", true);
request.setAttribute("cors.origin", request.getHeader("Origin"));
request.setAttribute("cors.requestType", "actual");
break;
case PREFLIGHT:
request.setAttribute("cors.isCorsRequest", true);
request.setAttribute("cors.origin", request.getHeader("Origin"));
request.setAttribute("cors.requestType", "preflight");
request.setAttribute("cors.requestHeaders", request.getHeader("Access-Control-Request-Headers"));
break;
case OTHER:
request.setAttribute("cors.isCorsRequest", false);
}
}
/**
* Handles a simple or actual CORS request.
*
* <p>CORS specification: <a href="http://www.w3.org/TR/access-control/#resource-requests">Simple Cross-Origin Request, Actual Request, and Redirects</a>
*
* @param request The HTTP request.
* @param response The HTTP response.
*
* @throws InvalidCORSRequestException If not a valid CORS simple/
* actual request.
* @throws CORSOriginDeniedException If the origin is not allowed.
* @throws UnsupportedHTTPMethodException If the requested HTTP method
* is not supported by the CORS
* policy.
*/
public void handleActualRequest(final HttpServletRequest request, final HttpServletResponse response)
throws InvalidCORSRequestException,
CORSOriginDeniedException,
UnsupportedHTTPMethodException {
if (CORSRequestType.detect(request) != CORSRequestType.ACTUAL)
throw new InvalidCORSRequestException("Invalid simple/actual CORS request");
// Get request origins
final String originHeader = request.getHeader("Origin");
String[] requestOrigins = parseMultipleHeaderValues(originHeader);
// Check origin against allow list
String matchedOrigin = checkOrigin(requestOrigins);
if (matchedOrigin == null)
throw new CORSOriginDeniedException("CORS origin denied", requestOrigins);
// Check method
HTTPMethod method = null;
try {
method = HTTPMethod.valueOf(request.getMethod());
} catch (Exception e) {
// Parse exception
throw new UnsupportedHTTPMethodException("Unsupported HTTP method: " + request.getMethod());
}
if (! config.isSupportedMethod(method))
throw new UnsupportedHTTPMethodException("Unsupported HTTP method", method);
// Success, append response headers
response.addHeader("Access-Control-Allow-Origin", originHeader);
if (config.supportsCredentials)
response.addHeader("Access-Control-Allow-Credentials", "true");
if (! exposedHeaders.isEmpty())
response.addHeader("Access-Control-Expose-Headers", exposedHeaders);
// Tag request
request.setAttribute("cors.origin", originHeader);
request.setAttribute("cors.requestType", "actual");
}
/**
* Handles a preflight CORS request.
*
* <p>CORS specification: <a href="http://www.w3.org/TR/access-control/#resource-preflight-requests">Preflight Request</a>
*
* @param request The HTTP request.
* @param response The HTTP response.
*
* @throws InvalidCORSRequestException If not a valid CORS preflight
* request.
* @throws CORSOriginDeniedException If the origin is not allowed.
* @throws UnsupportedHTTPMethodException If the requested HTTP method
* is not supported by the CORS
* policy.
* @throws UnsupportedHTTPHeaderException If the requested HTTP header
* is not supported by the CORS
* policy.
*/
public void handlePreflightRequest(final HttpServletRequest request, final HttpServletResponse response)
throws InvalidCORSRequestException,
CORSOriginDeniedException,
UnsupportedHTTPMethodException,
UnsupportedHTTPHeaderException {
if (CORSRequestType.detect(request) != CORSRequestType.PREFLIGHT)
throw new InvalidCORSRequestException("Invalid preflight CORS request");
// Get request origins
final String originHeader = request.getHeader("Origin");
String[] requestOrigins = parseMultipleHeaderValues(originHeader);
// Check origin against allow list
String matchedOrigin = checkOrigin(requestOrigins);
if (matchedOrigin == null)
throw new CORSOriginDeniedException("CORS origin denied", requestOrigins);
// Parse requested method
// Note: method checking must be done after header parsing, see CORS spec
String requestMethodHeader = request.getHeader("Access-Control-Request-Method");
if (requestMethodHeader == null)
throw new InvalidCORSRequestException("Invalid preflight CORS request: Missing Access-Control-Request-Method header");
HTTPMethod requestedMethod = null;
try {
requestedMethod = HTTPMethod.valueOf(requestMethodHeader.toUpperCase());
} catch (Exception e) {
// Parse exception
throw new UnsupportedHTTPMethodException("Unsupported HTTP method: " + requestMethodHeader);
}
// Parse custom headers
final String[] requestHeaderValues = parseMultipleHeaderValues(request.getHeader("Access-Control-Request-Headers"));
final HeaderFieldName[] requestHeaders = new HeaderFieldName[requestHeaderValues.length];
for (int i=0; i<requestHeaders.length; i++) {
try {
requestHeaders[i] = new HeaderFieldName(requestHeaderValues[i]);
} catch (IllegalArgumentException e) {
// Invalid header name
throw new InvalidCORSRequestException("Invalid preflight CORS request: Bad request header value");
}
}
// Now, do method check
if (! config.isSupportedMethod(requestedMethod))
throw new UnsupportedHTTPMethodException("Unsupported HTTP method", requestedMethod);
// Author request headers check
for (int i=0; i<requestHeaders.length; i++) {
if (! config.supportedHeaders.contains(requestHeaders[i]))
throw new UnsupportedHTTPHeaderException("Unsupported HTTP request header", requestHeaders[i]);
}
// Success, append response headers
if (config.supportsCredentials) {
response.addHeader("Access-Control-Allow-Origin", originHeader);
response.addHeader("Access-Control-Allow-Credentials", "true");
}
else {
if (config.allowAnyOrigin)
response.addHeader("Access-Control-Allow-Origin", "*");
else
response.addHeader("Access-Control-Allow-Origin", originHeader);
}
if (config.maxAge > 0)
response.addHeader("Access-Control-Max-Age", Integer.toString(config.maxAge));
response.addHeader("Access-Control-Allow-Methods", supportedMethods);
if (! supportedHeaders.isEmpty())
response.addHeader("Access-Control-Allow-Headers", supportedHeaders);
}
}
| |
package com.jbooktrader.platform.chart;
import com.jbooktrader.platform.optimizer.*;
import com.jbooktrader.platform.preferences.*;
import com.jbooktrader.platform.strategy.*;
import com.jbooktrader.platform.util.ui.*;
import org.jfree.chart.*;
import org.jfree.chart.axis.*;
import org.jfree.chart.block.*;
import org.jfree.chart.plot.*;
import org.jfree.chart.renderer.*;
import org.jfree.chart.renderer.xy.*;
import org.jfree.chart.title.*;
import org.jfree.data.xy.*;
import org.jfree.ui.*;
import javax.swing.*;
import javax.swing.border.*;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
import static com.jbooktrader.platform.preferences.JBTPreferences.*;
/**
* Contour plot of optimization results
*
* @author Eugene Kononov
*/
public class OptimizationMap {
private final PreferencesHolder prefs;
private final PerformanceMetric sortPerformanceMetric;
private final Strategy strategy;
private final JDialog parent;
private final List<OptimizationResult> optimizationResults;
private JFreeChart chart;
private JComboBox<String> horizontalCombo, verticalCombo;
private JComboBox caseCombo, colorMapCombo;
private double min, max;
private ChartPanel chartPanel;
public OptimizationMap(JDialog parent, Strategy strategy, List<OptimizationResult> optimizationResults,
PerformanceMetric sortPerformanceMetric) {
prefs = PreferencesHolder.getInstance();
this.parent = parent;
this.strategy = strategy;
this.optimizationResults = optimizationResults;
this.sortPerformanceMetric = sortPerformanceMetric;
chart = createChart();
}
public JDialog getChartFrame() {
final JDialog chartFrame = new JDialog(parent);
chartFrame.setTitle("Optimization Map - " + strategy);
chartFrame.setModal(true);
JPanel northPanel = new JPanel(new SpringLayout());
JPanel centerPanel = new JPanel(new SpringLayout());
JPanel chartOptionsPanel = new JPanel(new SpringLayout());
Border etchedBorder = BorderFactory.createEtchedBorder(EtchedBorder.LOWERED);
TitledBorder border = BorderFactory.createTitledBorder(etchedBorder, "Optimization Map Options");
border.setTitlePosition(TitledBorder.TOP);
chartOptionsPanel.setBorder(border);
JLabel horizontalLabel = new JLabel("Horizontal:", SwingConstants.TRAILING);
horizontalCombo = new JComboBox<>();
horizontalLabel.setLabelFor(horizontalCombo);
JLabel verticalLabel = new JLabel("Vertical:", SwingConstants.TRAILING);
verticalCombo = new JComboBox<>();
verticalLabel.setLabelFor(verticalCombo);
StrategyParams params = optimizationResults.get(0).getParams();
for (StrategyParam param : params.getAll()) {
horizontalCombo.addItem(param.getName());
verticalCombo.addItem(param.getName());
}
horizontalCombo.setSelectedIndex(0);
verticalCombo.setSelectedIndex(1);
JLabel caseLabel = new JLabel("Case:", SwingConstants.TRAILING);
caseCombo = new JComboBox<>(new String[]{"Best", "Worst"});
caseCombo.setSelectedIndex(0);
caseLabel.setLabelFor(caseCombo);
JLabel colorMapLabel = new JLabel("Color map:", SwingConstants.TRAILING);
colorMapCombo = new JComboBox<>(new String[]{"Heat", "Gray"});
colorMapLabel.setLabelFor(colorMapCombo);
chartOptionsPanel.add(horizontalLabel);
chartOptionsPanel.add(horizontalCombo);
chartOptionsPanel.add(verticalLabel);
chartOptionsPanel.add(verticalCombo);
chartOptionsPanel.add(caseLabel);
chartOptionsPanel.add(caseCombo);
chartOptionsPanel.add(colorMapLabel);
chartOptionsPanel.add(colorMapCombo);
SpringUtilities.makeOneLineGrid(chartOptionsPanel);
northPanel.add(chartOptionsPanel);
SpringUtilities.makeTopOneLineGrid(northPanel);
chartPanel = new ChartPanel(chart);
TitledBorder chartBorder = BorderFactory.createTitledBorder(etchedBorder, "Optimization Map");
chartBorder.setTitlePosition(TitledBorder.TOP);
chartPanel.setBorder(chartBorder);
centerPanel.add(chartPanel);
SpringUtilities.makeOneLineGrid(centerPanel);
int chartWidth = prefs.getInt(OptimizationMapWidth);
int chartHeight = prefs.getInt(OptimizationMapHeight);
chartFrame.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
prefs.set(OptimizationMapWidth, chartFrame.getWidth());
prefs.set(OptimizationMapHeight, chartFrame.getHeight());
chartFrame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
}
});
horizontalCombo.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
repaint();
}
});
verticalCombo.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
repaint();
}
});
caseCombo.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
repaint();
}
});
colorMapCombo.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
repaint();
}
});
repaint();
chartFrame.getContentPane().add(northPanel, BorderLayout.NORTH);
chartFrame.getContentPane().add(centerPanel, BorderLayout.CENTER);
chartFrame.getContentPane().setMinimumSize(new Dimension(720, 550));
chartFrame.pack();
chartFrame.setSize(chartWidth, chartHeight);
chartFrame.setLocationRelativeTo(null);
return chartFrame;
}
private void repaint() {
chart = createChart();
chartPanel.setChart(chart);
}
private double getMetric(OptimizationResult optimizationResult) {
return optimizationResult.get(sortPerformanceMetric);
}
private XYZDataset createOptimizationDataset() {
double[] x, y, z;
int size = optimizationResults.size();
x = new double[size];
y = new double[size];
z = new double[size];
Map<String, Double> values = new HashMap<>();
int xParameterIndex = (horizontalCombo == null) ? 0 : horizontalCombo.getSelectedIndex();
int yParameterIndex = (verticalCombo == null) ? 1 : verticalCombo.getSelectedIndex();
int index = 0;
min = max = getMetric(optimizationResults.get(index));
int selectedCase = (caseCombo == null) ? 0 : caseCombo.getSelectedIndex();
synchronized (optimizationResults) {
for (OptimizationResult optimizationResult : optimizationResults) {
StrategyParams params = optimizationResult.getParams();
x[index] = params.get(xParameterIndex).getValue();
y[index] = params.get(yParameterIndex).getValue();
z[index] = getMetric(optimizationResult);
String key = x[index] + "," + y[index];
Double value = values.get(key);
if (value != null) {
if (selectedCase == 0) {
z[index] = Math.max(value, z[index]);
} else if (selectedCase == 1) {
z[index] = Math.min(value, z[index]);
}
}
values.put(key, z[index]);
min = Math.min(min, z[index]);
max = Math.max(max, z[index]);
index++;
}
}
DefaultXYZDataset dataset = new DefaultXYZDataset();
dataset.addSeries("optimization", new double[][]{x, y, z});
return dataset;
}
private JFreeChart createChart() {
XYZDataset dataset = createOptimizationDataset();
NumberAxis xAxis = new NumberAxis();
xAxis.setAutoRangeIncludesZero(false);
NumberAxis yAxis = new NumberAxis();
yAxis.setAutoRangeIncludesZero(false);
xAxis.setLabel(horizontalCombo == null ? null : (String) horizontalCombo.getSelectedItem());
yAxis.setLabel(verticalCombo == null ? null : (String) verticalCombo.getSelectedItem());
XYBlockRenderer renderer = new XYBlockRenderer();
int paintScaleIndex = (colorMapCombo == null) ? 0 : colorMapCombo.getSelectedIndex();
PaintScale paintScale = (paintScaleIndex == 0) ? new HeatPaintScale() : new GrayPaintScale();
renderer.setPaintScale(paintScale);
XYPlot plot = new XYPlot(dataset, xAxis, yAxis, renderer);
chart = new JFreeChart(plot);
chart.removeLegend();
chart.getPlot().setOutlineStroke(new BasicStroke(1.0f));
NumberAxis scaleAxis = new NumberAxis(sortPerformanceMetric.getName());
scaleAxis.setRange(min, max);
PaintScaleLegend legend = new PaintScaleLegend(paintScale, scaleAxis);
legend.setFrame(new BlockBorder(Color.GRAY));
legend.setPadding(new RectangleInsets(5, 5, 5, 5));
legend.setMargin(new RectangleInsets(4, 6, 40, 6));
legend.setPosition(RectangleEdge.RIGHT);
chart.addSubtitle(legend);
return chart;
}
private abstract class PaintScaleAdapter implements PaintScale {
public double getUpperBound() {
return max;
}
public double getLowerBound() {
return min;
}
}
private class HeatPaintScale extends PaintScaleAdapter {
public Paint getPaint(double value) {
double normalizedValue = (value - min) / (max - min);
double saturation = Math.max(0.1, Math.abs(2 * normalizedValue - 1));
double red = 0;
double blue = 0.7;
double hue = blue - normalizedValue * (blue - red);
return Color.getHSBColor((float) hue, (float) saturation, 1);
}
}
public class GrayPaintScale extends PaintScaleAdapter {
public Paint getPaint(double value) {
double normalizedValue = value - min;
double clrs = 255.0 / (max - min);
int color = (int) (255 - normalizedValue * clrs);
return new Color(color, color, color, 255);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.atteo.xmlcombiner;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.assertj.core.api.Assertions;
import org.assertj.core.util.Lists;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import com.google.common.io.Files;
import static java.util.Collections.emptyList;
import static org.xmlunit.assertj.XmlAssert.assertThat;
public class XmlCombinerTest {
@Test
public void identity() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String content = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(content, content)).and(content).areSimilar();
}
@Test
public void mergeChildren() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void appendChildren() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1' combine.children='append'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void commentPropagation() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <!-- Service 1 -->\n"
+ " <service id='1'>\n"
+ " <!-- This comment will be removed -->\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <!-- Service 1 with different configuration -->\n"
+ " <service id='1'>\n"
+ " <!-- Changed value -->\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ " <!-- End of configuration file -->\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <!-- Service 1 with different configuration -->\n"
+ " <service id='1'>\n"
+ " <!-- Changed value -->\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ " <!-- End of configuration file -->\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void attributes() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1' parameter='parameter' parameter2='parameter2'/>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1' parameter='other value' parameter3='parameter3'/>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='1' parameter='other value' parameter2='parameter2' parameter3='parameter3'/>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void remove() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ " <service id='2' combine.self='remove'/>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1' combine.self='remove'/>\n"
+ " <service id='2'/>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='2'/>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void override() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1' combine.self='override'>\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>other value</parameter>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void multipleChildren() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter9>parameter2</parameter9>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='1'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter9>parameter2</parameter9>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void defaults() throws SAXException, IOException, ParserConfigurationException, TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1' combine.self='DEFAULTS'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter9>parameter2</parameter9>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ " <service id='2' combine.self='DEFAULTS'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='2'>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='2'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
}
@Test
public void overridable() throws SAXException, IOException, ParserConfigurationException, TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='id1' combine.self='overridable'>\n"
+ " <test/>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='id1'>\n"
+ " <test/>\n"
+ " </service>\n"
+ "</config>";
String dominant2 = "\n"
+ "<config>\n"
+ " <service id='id1'/>\n"
+ "</config>";
String dominant3 = "\n"
+ "<config>\n"
+ " <service id='id2'/>\n"
+ "</config>";
String result3 = "\n"
+ "<config>\n"
+ " <service id='id1'>\n"
+ " <test/>\n"
+ " </service>\n"
+ " <service id='id2'/>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
assertThat(combineWithIdKey(recessive, dominant2)).and(dominant2).areSimilar();
assertThat(combineWithIdKey(recessive, dominant3)).and(result3).areSimilar();
assertThat(combineWithIdKey(recessive, dominant, dominant3)).and(result3).areSimilar();
}
@Test
public void overridableByTag() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='id1' combine.self='overridable_by_tag'>\n"
+ " <test/>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='id1'>\n"
+ " <test/>\n"
+ " </service>\n"
+ "</config>";
String dominant2 = "\n"
+ "<config>\n"
+ " <service id='id1'/>\n"
+ "</config>";
String dominant3 = "\n"
+ "<config>\n"
+ " <service id='id2'/>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
assertThat(combineWithIdKey(recessive, dominant2)).and(dominant2).areSimilar();
assertThat(combineWithIdKey(recessive, dominant3)).and(dominant3).areSimilar();
}
@Test
public void subnodes() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<outer>\n"
+ " <inner>\n"
+ " content\n"
+ " </inner>\n"
+ " <inner2>\n"
+ " content2\n"
+ " </inner2>\n"
+ "</outer>";
String dominant = "\n"
+ "<outer>\n"
+ " <inner>\n"
+ " content3\n"
+ " </inner>\n"
+ "</outer>";
String result = "\n"
+ "<outer>\n"
+ " <inner>\n"
+ " content3\n"
+ " </inner>\n"
+ " <inner2>\n"
+ " content2\n"
+ " </inner2>\n"
+ "</outer>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areSimilar();
String dominant2 = "\n"
+ "<outer combine.children='APPEND'>\n"
+ " <inner>\n"
+ " content3\n"
+ " </inner>\n"
+ "</outer>";
String result2 = "\n"
+ "<outer>\n"
+ " <inner>\n"
+ " content\n"
+ " </inner>\n"
+ " <inner2>\n"
+ " content2\n"
+ " </inner2>\n"
+ " <inner>\n"
+ " content3\n"
+ " </inner>\n"
+ "</outer>";
assertThat(combineWithIdKey(recessive, dominant2)).and(result2).areSimilar();
String dominant3 = "\n"
+ "<outer combine.self='override'>\n"
+ " <inner>\n"
+ " content3\n"
+ " </inner>\n"
+ "</outer>";
String result3 = "\n"
+ "<outer>\n"
+ " <inner>\n"
+ " content3\n"
+ " </inner>\n"
+ "</outer>";
assertThat(combineWithIdKey(recessive, dominant3)).and(result3).areSimilar();
}
@Test
public void threeDocuments() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1' combine.self='DEFAULTS'>\n"
+ " <parameter>parameter</parameter>\n"
+ " </service>\n"
+ " <service id='2' combine.self='DEFAULTS'>\n"
+ " <parameter>parameter</parameter>\n"
+ " </service>\n"
+ " <service id='3'>\n"
+ " <parameter>parameter</parameter>\n"
+ " </service>\n"
+ "</config>";
String middle = "\n"
+ "<config>\n"
+ " <service id='1' combine.self='DEFAULTS'>\n"
+ " <parameter3>parameter3</parameter3>\n"
+ " </service>\n"
+ " <service id='2' combine.self='DEFAULTS'>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ " <service id='3' combine.self='DEFAULTS'>\n"
+ " <parameter2>parameter</parameter2>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='2'>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='2'>\n"
+ " <parameter>parameter</parameter>\n"
+ " <parameter2>parameter2</parameter2>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, middle, dominant)).and(result).areSimilar();
}
@Test
public void shouldWorkWithCustomKeys() throws IOException, SAXException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service name='a'>\n"
+ " <parameter>old value2</parameter>\n"
+ " </service>\n"
+ " <service name='b'>\n"
+ " <parameter>old value</parameter>\n"
+ " </service>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service name='b'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ " <service name='a'>\n"
+ " <parameter>new value2</parameter>\n"
+ " </service>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service name='a'>\n"
+ " <parameter>new value2</parameter>\n"
+ " </service>\n"
+ " <service name='b'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ "</config>";
assertThat(combineWithIdKey(recessive, dominant)).and(result).areNotIdentical();
assertThat(combineWithKey("n", recessive, dominant)).and(result).areNotIdentical();
assertThat(combineWithKey("name", recessive, dominant)).and(result).areSimilar();
}
@Test
public void shouldWorkWithCustomIdAttribute2() throws IOException, SAXException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <nested>\n"
+ " <service name='a'>\n"
+ " <parameter>old value2</parameter>\n"
+ " </service>\n"
+ " </nested>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <nested>\n"
+ " <service name='a'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ " </nested>\n"
+ "</config>";
assertThat(combineWithKey("name", recessive, dominant)).and(dominant).areSimilar();
}
@Test
public void shouldSupportManyCustomKeys() throws IOException, SAXException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <nested>\n"
+ " <service name='a'>\n"
+ " <parameter>old value2</parameter>\n"
+ " </service>\n"
+ " <service name='b' id='2'>\n"
+ " <parameter>old value2</parameter>\n"
+ " </service>\n"
+ " </nested>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <nested>\n"
+ " <service name='a' id='2'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ " <service name='b' id='2'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ " </nested>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <nested>\n"
+ " <service name='a'>\n"
+ " <parameter>old value2</parameter>\n"
+ " </service>\n"
+ " <service name='b' id='2'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ " <service name='a' id='2'>\n"
+ " <parameter>new value</parameter>\n"
+ " </service>\n"
+ " </nested>\n"
+ "</config>";
assertThat(combineWithKeys(Lists.newArrayList("name", "id"), recessive, dominant)).and(result).areSimilar();
}
@Test
public void shouldAllowToSpecifyKeys() throws IOException, SAXException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <nested combine.keys='id'>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <nested>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " </nested>\n"
+ " </nested>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <nested>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <nested combine.keys='name'>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " </nested>\n"
+ " </nested>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <nested>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <nested>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ " </nested>\n"
+ " </nested>\n"
+ " <service id='1'/>\n"
+ " <service id='2'/>\n"
+ "</config>";
assertThat(combineWithKey("", recessive, dominant)).and(result).areSimilar();
}
@Test
public void shouldAllowToSpecifyArtificialKey() throws IOException, SAXException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service combine.id='1' name='a'/>\n"
+ " <service combine.id='2' name='b'/>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service combine.id='1' name='c'/>\n"
+ " <service combine.id='3' name='d'/>\n"
+ "</config>";
String result = "\n"
+ "<config>\n"
+ " <service name='c'/>\n"
+ " <service name='b'/>\n"
+ " <service name='d'/>\n"
+ "</config>";
assertThat(combineWithKey("", recessive, dominant)).and(result).areSimilar();
}
@Test
public void shouldSupportFilters() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "\n"
+ "<config>\n"
+ " <service combine.id='1' value='1'/>\n"
+ " <service combine.id='2' value='2'/>\n"
+ "</config>";
String dominant = "\n"
+ "<config>\n"
+ " <service combine.id='1' value='10'/>\n"
+ " <service combine.id='3' value='20'/>\n"
+ "</config>";
String result = "\n"
+ "<config processed='true'>\n"
+ " <service value='11' processed='true'/>\n"
+ " <service value='2' processed='true'/>\n"
+ " <service value='20' processed='true'/>\n"
+ "</config>";
XmlCombiner.Filter filter = (recessive1, dominant1, result1) -> {
result1.setAttribute("processed", "true");
if (recessive1 == null || dominant1 == null) {
return;
}
Attr recessiveNode = recessive1.getAttributeNode("value");
Attr dominantNode = dominant1.getAttributeNode("value");
if (recessiveNode == null || dominantNode == null) {
return;
}
int recessiveValue = Integer.parseInt(recessiveNode.getValue());
int dominantValue = Integer.parseInt(dominantNode.getValue());
result1.setAttribute("value", Integer.toString(recessiveValue + dominantValue));
};
assertThat(combineWithKeysAndFilter(emptyList(), filter, recessive, dominant)).and(result).areSimilar();
}
@Test
public void shouldSupportReadingAndStoringFiles() throws IOException, ParserConfigurationException, SAXException,
TransformerException {
// given
Path input = Paths.get("target/test.in");
Path output = Paths.get("target/test.out");
Files.asCharSink(input.toFile(), StandardCharsets.UTF_8).write("<config/>");
// when
XmlCombiner combiner = new XmlCombiner();
combiner.combine(input);
combiner.buildDocument(output);
List<String> lines = Files.asCharSource(output.toFile(), StandardCharsets.UTF_8).readLines();
// then
Assertions.assertThat(lines).hasSize(1);
Assertions.assertThat(lines.iterator().next()).contains("<config/>");
}
@Test
public void shouldRemoveRootElement() throws SAXException, IOException, ParserConfigurationException,
TransformerException {
String recessive = "<config/>";
String dominant = "<config combine.self=\"REMOVE\"/>";
String result = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>";
Assertions.assertThat(combineWithIdKey(recessive, dominant)).isEqualTo(result);
}
private static String combineWithIdKey(String... inputs) throws IOException,
ParserConfigurationException, SAXException, TransformerException {
return combineWithKey("id", inputs);
}
private static String combineWithKey(String keyAttributeName, String... inputs) throws IOException,
ParserConfigurationException, SAXException, TransformerException {
return combineWithKeys(Lists.newArrayList(keyAttributeName), inputs);
}
private static String combineWithKeys(List<String> keyAttributeNames, String... inputs) throws IOException,
ParserConfigurationException, SAXException, TransformerException {
XmlCombiner combiner = new XmlCombiner(keyAttributeNames);
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
for (String input : inputs) {
Document document = builder.parse(new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
combiner.combine(document);
}
Document result = combiner.buildDocument();
Transformer transformer = TransformerFactory.newInstance().newTransformer();
StringWriter writer = new StringWriter();
transformer.transform(new DOMSource(result), new StreamResult(writer));
return writer.toString();
}
private static String combineWithKeysAndFilter(List<String> keyAttributeNames, XmlCombiner.Filter filter,
String... inputs) throws IOException, ParserConfigurationException, SAXException, TransformerException {
XmlCombiner combiner = new XmlCombiner(keyAttributeNames);
combiner.setFilter(filter);
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
for (String input : inputs) {
Document document = builder.parse(new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
combiner.combine(document);
}
Document result = combiner.buildDocument();
Transformer transformer = TransformerFactory.newInstance().newTransformer();
StringWriter writer = new StringWriter();
transformer.transform(new DOMSource(result), new StreamResult(writer));
return writer.toString();
}
}
| |
package org.pesc.cds.networkserver.controller;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpEntity;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.fluent.Form;
import org.apache.http.client.fluent.Request;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.pesc.cds.networkserver.domain.Transaction;
import org.pesc.cds.networkserver.service.DatasourceManagerUtil;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
@Controller
public class transferController {
private static final Log log = LogFactory.getLog(transferController.class);
/**
* receiveFile REST endpoint<p>
* This is the REST method for a network server to receive a transaction from another network server.
* <ul>
* <li>is this network server's id == recipientId</li>
* <li></li>
* </ul>
*
* @param recipientId Will use the recipientId to send to end point
* @param file <code>MultipartFile (required)</code>
* @param networkServerId id of sending network server
* @param senderId id of sending organization
* @param fileFormat compliant file format
* @param fileSize <code>Long</code>
* @param webServiceUrl <code>String</code>
* @return
*/
@RequestMapping(value="/sendFile", method=RequestMethod.POST)
public ModelAndView sendFile(
@RequestParam(value="recipientId", required=true) Integer recipientId,
@RequestParam(value="file") MultipartFile file,
@RequestParam(value="networkServerId", required=true) Integer networkServerId,
@RequestParam(value="senderId") Integer senderId,
@RequestParam(value="fileFormat", required=true) String fileFormat,
@RequestParam(value="fileSize", defaultValue="0") Long fileSize,
@RequestParam(value="webServiceUrl", required=true) String webServiceUrl,
RedirectAttributes redir
) {
ModelAndView mav = new ModelAndView("redirect:/transfer");
if (!file.isEmpty()) {
try {
// write action to database
// [RECEIVED FILE] recipientId:p1, neworkServerId:p3, senderId:p4, fileFormat:p5, fileSize:p6
Transaction tx = new Transaction();
tx.setRecipientId(recipientId);
tx.setNetworkServerId(networkServerId);
tx.setSenderId(senderId==null ? networkServerId : senderId);
tx.setFileFormat(fileFormat);
tx.setFileSize(file.getSize());
tx.setDirection("SEND");
tx.setSent(new Timestamp(Calendar.getInstance().getTimeInMillis()));
// update response map
Transaction savedTx = DatasourceManagerUtil.getTransactions().save(tx);
log.debug(String.format(
"saved Transaction: {%n recipientId: %s,%n networkServerId: %s,%n senderId: %s,%n fileFormat: %s%n}",
savedTx.getRecipientId(),
savedTx.getNetworkServerId(),
savedTx.getSenderId(),
savedTx.getFileFormat()
));
redir.addAttribute("error", false);
redir.addAttribute("status","upload successfull");
// send http post to network server
CloseableHttpClient client = HttpClients.createDefault();
try {
HttpPost post = new HttpPost(webServiceUrl);
HttpEntity reqEntity = MultipartEntityBuilder.create()
.addPart("recipientId", new StringBody(DatasourceManagerUtil.getIdentification().getId().toString()))
.addPart("networkServerId", new StringBody(recipientId.toString()))
.addPart("fileFormat", new StringBody(fileFormat))
.addPart("transactionId", new StringBody(tx.getId().toString()))
.addPart("webServiceUrl", new StringBody(DatasourceManagerUtil.getIdentification().getWebServiceUrl()))
.addPart("file", new FileBody((File)file))
.build();
post.setEntity(reqEntity);
client.execute(post);
} finally {
client.close();
}
} catch (Exception e) {
redir.addAttribute("error", true);
redir.addAttribute("status", String.format("upload failed: %s", e.getMessage()));
}
} else {
redir.addAttribute("error", true);
redir.addAttribute("status", "missing file");
}
return mav;
}
/**
* When another network server sends a file
*
* @param recipientId In this case this is the network server that we need to send the response to
* @param file The transferred file
* @param fileFormat The expected format of the file
* @param transactionId This is the identifier of the transaction record from the sending network server, we send it back
* @param webServiceUrl This is the url to the network server that we will send the response back to
*/
@RequestMapping(value="/receiveFile", method=RequestMethod.POST)
public void receiveFile(
@RequestParam(value="recipientId", required=true) Integer recipientId,
@RequestParam(value="networkServerId", required=true) Integer networkServerId,
@RequestParam(value="file") MultipartFile file,
@RequestParam(value="fileFormat", required=true) String fileFormat,
@RequestParam(value="transactionId", required=true) Integer transactionId,
@RequestParam(value="webServiceUrl", required=true) String webServiceUrl
) {
log.debug(String.format("received file from network server " + recipientId));
Transaction tx = new Transaction();
// we need the directoryId for this organization in the organizations table
tx.setRecipientId(networkServerId);
tx.setNetworkServerId(recipientId);
tx.setFileFormat(fileFormat);
tx.setFileSize(file.getSize());
tx.setDirection("RECEIVE");
tx.setReceived(new Timestamp(Calendar.getInstance().getTimeInMillis()));
tx.setStatus(true);
Transaction savedTx = DatasourceManagerUtil.getTransactions().save(tx);
//save file to network server save directory
Path path = Paths.get( String.format("%s/%s/%s", DatasourceManagerUtil.getSystemProperties().getFilePath(), tx.getId(), file.getOriginalFilename()) );
try {
byte[] bytes = file.getBytes();
File f = new File( path.toString() );
File fp = f.getParentFile();
if(!fp.exists() && !fp.mkdirs()) {
tx.setError("Could not create directory: " + fp);
} else {
try {
if(!f.createNewFile()) {
tx.setError(String.format("file %s already exists", file.getOriginalFilename()));
} else {
tx.setFilePath(f.getPath());
BufferedOutputStream stream = new BufferedOutputStream(new FileOutputStream(f));
stream.write(bytes);
stream.close();
}
} catch(IOException ioex) {
tx.setError(ioex.getMessage());
}
}
} catch(Exception ex) {
tx.setError(ex.getMessage());
}
DatasourceManagerUtil.getTransactions().save(tx);
// send response back to sending network server
try {
Request.Post(webServiceUrl).bodyForm(Form.form().add("transactionId", transactionId.toString()).build()).execute().returnContent();
} catch (ClientProtocolException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
@RequestMapping(value="/response", method=RequestMethod.POST)
public void acceptResponse(@RequestParam(value="transactionId", required=true) Integer transactionId) {
Transaction tx = DatasourceManagerUtil.getTransactions().byId(transactionId);
if(tx!=null) {
tx.setStatus(true);
tx.setReceived(new Timestamp(Calendar.getInstance().getTimeInMillis()));
DatasourceManagerUtil.getTransactions().save(tx);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.managers.eventstorage;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.events.DiscoveryEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.events.EventType;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.GridTopic;
import org.apache.ignite.internal.IgniteDeploymentCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.events.DiscoveryCustomEvent;
import org.apache.ignite.internal.managers.GridManagerAdapter;
import org.apache.ignite.internal.managers.communication.GridIoManager;
import org.apache.ignite.internal.managers.communication.GridMessageListener;
import org.apache.ignite.internal.managers.deployment.GridDeployment;
import org.apache.ignite.internal.processors.platform.PlatformEventFilterListener;
import org.apache.ignite.internal.util.GridConcurrentLinkedHashSet;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.GPR;
import org.apache.ignite.internal.util.typedef.internal.LT;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.marshaller.Marshaller;
import org.apache.ignite.plugin.security.SecurityPermission;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.eventstorage.EventStorageSpi;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static org.apache.ignite.events.EventType.EVTS_ALL;
import static org.apache.ignite.events.EventType.EVTS_DISCOVERY_ALL;
import static org.apache.ignite.events.EventType.EVT_NODE_FAILED;
import static org.apache.ignite.events.EventType.EVT_NODE_LEFT;
import static org.apache.ignite.events.EventType.EVT_NODE_METRICS_UPDATED;
import static org.apache.ignite.internal.GridTopic.TOPIC_EVENT;
import static org.apache.ignite.internal.managers.communication.GridIoPolicy.PUBLIC_POOL;
/**
* Grid event storage SPI manager.
*/
public class GridEventStorageManager extends GridManagerAdapter<EventStorageSpi> {
/** Local event listeners. */
private final ConcurrentMap<Integer, Set<GridLocalEventListener>> lsnrs = new ConcurrentHashMap8<>();
/** Busy lock to control activity of threads. */
private final ReadWriteLock busyLock = new ReentrantReadWriteLock();
/** Is local node daemon? */
private final boolean isDaemon;
/** Recordable events arrays length. */
private final int len;
/** Marshaller. */
private final Marshaller marsh;
/** Request listener. */
private RequestListener msgLsnr;
/** Events types enabled in configuration. */
private final int[] cfgInclEvtTypes;
/** Events of these types should be recorded. */
private volatile int[] inclEvtTypes;
/**
* Maps event type to boolean ({@code true} for recordable events).
* This array is used for listeners notification. It may be wider,
* than {@link #userRecordableEvts} since it always contain internal
* events which are required for system.
*/
private volatile boolean[] recordableEvts;
/**
* Maps user recordable event type to boolean ({@code true} for recordable events).
* This array is used for event recording with configured SPI. It may contain
* less elements, than {@link #recordableEvts}, since it contains only those
* events which are intended to be recorded with configured SPI.
*/
private volatile boolean[] userRecordableEvts;
/**
* @param ctx Kernal context.
*/
public GridEventStorageManager(GridKernalContext ctx) {
super(ctx, ctx.config().getEventStorageSpi());
marsh = ctx.config().getMarshaller();
isDaemon = ctx.isDaemon();
int[] cfgInclEvtTypes0 = ctx.config().getIncludeEventTypes();
if (F.isEmpty(cfgInclEvtTypes0))
cfgInclEvtTypes = U.EMPTY_INTS;
else {
cfgInclEvtTypes0 = copy(cfgInclEvtTypes0);
Arrays.sort(cfgInclEvtTypes0);
if (cfgInclEvtTypes0[0] < 0)
throw new IllegalArgumentException("Invalid event type: " + cfgInclEvtTypes0[0]);
cfgInclEvtTypes = compact(cfgInclEvtTypes0, cfgInclEvtTypes0.length);
}
// Initialize recordable events arrays.
int maxIdx = 0;
for (int type : EVTS_ALL) {
if (type > maxIdx)
maxIdx = type;
}
// Javadoc to GridEventType states that all types in range from 1 to 1000
// are reserved for internal Ignite events.
assert maxIdx <= 1000 : "Invalid max index: " + maxIdx;
// We don't want to pre-process passed in types,
// but use them directly as indexes.
// So, we need to allocate bigger array.
len = maxIdx + 1;
boolean[] recordableEvts = new boolean[len];
boolean[] userRecordableEvts = new boolean[len];
Collection<Integer> inclEvtTypes0 = new HashSet<>(U.toIntList(cfgInclEvtTypes));
// Internal events are always "recordable" for notification
// purposes (regardless of whether they were enabled or disabled).
// However, won't be sent down to SPI level if user specifically excluded them.
for (int type : EVTS_ALL) {
boolean userRecordable = inclEvtTypes0.remove(type);
if (userRecordable)
userRecordableEvts[type] = true;
// Internal event or user recordable event.
if (isInternalEvent(type) || userRecordable)
recordableEvts[type] = true;
if (log.isDebugEnabled())
log.debug("Event recordable status [type=" + U.gridEventName(type) +
", recordable=" + recordableEvts[type] +
", userRecordable=" + userRecordableEvts[type] + ']');
}
this.recordableEvts = recordableEvts;
this.userRecordableEvts = userRecordableEvts;
int[] inclEvtTypes = U.toIntArray(inclEvtTypes0);
Arrays.sort(inclEvtTypes);
this.inclEvtTypes = inclEvtTypes;
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
int lsnrsCnt = 0;
for (Set<GridLocalEventListener> lsnrs0 : lsnrs.values())
lsnrsCnt += lsnrs0.size();
X.println(">>>");
X.println(">>> Event storage manager memory stats [grid=" + ctx.gridName() + ']');
X.println(">>> Total listeners: " + lsnrsCnt);
X.println(">>> Recordable events size: " + recordableEvts.length);
X.println(">>> User recordable events size: " + userRecordableEvts.length);
}
/**
* Enters busy state in which manager cannot be stopped.
*
* @return {@code true} if entered to busy state.
*/
private boolean enterBusy() {
return busyLock.readLock().tryLock();
}
/**
* Leaves busy state.
*/
private void leaveBusy() {
busyLock.readLock().unlock();
}
/** {@inheritDoc} */
@SuppressWarnings({"LockAcquiredButNotSafelyReleased"})
@Override public void onKernalStop0(boolean cancel) {
// Acquire write lock so that any new thread could not be started.
busyLock.writeLock().lock();
if (msgLsnr != null)
ctx.io().removeMessageListener(TOPIC_EVENT, msgLsnr);
msgLsnr = null;
lsnrs.clear();
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) throws IgniteCheckedException {
stopSpi();
if (log.isDebugEnabled())
log.debug(stopInfo());
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
Map<IgnitePredicate<? extends Event>, int[]> evtLsnrs = ctx.config().getLocalEventListeners();
if (evtLsnrs != null) {
for (IgnitePredicate<? extends Event> lsnr : evtLsnrs.keySet())
addLocalEventListener(lsnr, evtLsnrs.get(lsnr));
}
startSpi();
msgLsnr = new RequestListener();
ctx.io().addMessageListener(TOPIC_EVENT, msgLsnr);
if (log.isDebugEnabled())
log.debug(startInfo());
}
/**
* Records event if it's recordable.
*
* @param evt Event to record.
*/
public void record(Event evt) {
assert evt != null;
if (!enterBusy())
return;
try {
int type = evt.type();
if (!isRecordable(type)) {
LT.warn(log, "Trying to record event without checking if it is recordable: " +
U.gridEventName(type));
}
// Override user recordable settings for daemon node.
if ((isDaemon || isUserRecordable(type)) && !isHiddenEvent(type))
try {
getSpi().record(evt);
}
catch (IgniteSpiException e) {
U.error(log, "Failed to record event: " + evt, e);
}
if (isRecordable(type))
notifyListeners(evt);
}
finally {
leaveBusy();
}
}
/**
* Gets types of enabled user-recordable events.
*
* @return Array of types of enabled user-recordable events.
*/
public int[] enabledEvents() {
boolean[] userRecordableEvts0 = userRecordableEvts;
int[] enabledEvts = new int[len];
int enabledEvtsLen = 0;
for (int type = 0; type < len; type++) {
if (userRecordableEvts0[type])
enabledEvts[enabledEvtsLen++] = type;
}
return U.unique(enabledEvts, enabledEvtsLen, inclEvtTypes, inclEvtTypes.length);
}
/**
* Enables provided events.
*
* @param types Events to enable.
*/
public synchronized void enableEvents(int[] types) {
assert types != null;
ctx.security().authorize(null, SecurityPermission.EVENTS_ENABLE, null);
boolean[] userRecordableEvts0 = userRecordableEvts;
boolean[] recordableEvts0 = recordableEvts;
int[] inclEvtTypes0 = inclEvtTypes;
int[] userTypes = new int[types.length];
int userTypesLen = 0;
for (int type : types) {
if (type < len) {
userRecordableEvts0[type] = true;
recordableEvts0[type] = true;
}
else
userTypes[userTypesLen++] = type;
}
if (userTypesLen > 0) {
Arrays.sort(userTypes, 0, userTypesLen);
userTypes = compact(userTypes, userTypesLen);
inclEvtTypes0 = U.unique(inclEvtTypes0, inclEvtTypes0.length, userTypes, userTypesLen);
}
// Volatile write.
// The below line is intentional to ensure a volatile write is
// made to the array, since it is exist access via unsynchronized blocks.
userRecordableEvts = userRecordableEvts0;
recordableEvts = recordableEvts0;
inclEvtTypes = inclEvtTypes0;
}
/**
* Disables provided events.
*
* @param types Events to disable.
*/
@SuppressWarnings("deprecation")
public synchronized void disableEvents(int[] types) {
assert types != null;
ctx.security().authorize(null, SecurityPermission.EVENTS_DISABLE, null);
boolean[] userRecordableEvts0 = userRecordableEvts;
boolean[] recordableEvts0 = recordableEvts;
int[] inclEvtTypes0 = inclEvtTypes;
int[] userTypes = new int[types.length];
int userTypesLen = 0;
for (int type : types) {
if (binarySearch(cfgInclEvtTypes, type)) {
U.warn(log, "Can't disable event since it was enabled in configuration: " + U.gridEventName(type));
continue;
}
if (type < len) {
userRecordableEvts0[type] = false;
if (!isInternalEvent(type))
recordableEvts0[type] = false;
}
else
userTypes[userTypesLen++] = type;
}
if (userTypesLen > 0) {
Arrays.sort(userTypes, 0, userTypesLen);
userTypes = compact(userTypes, userTypesLen);
inclEvtTypes0 = U.difference(inclEvtTypes0, inclEvtTypes0.length, userTypes, userTypesLen);
}
// Volatile write.
// The below line is intentional to ensure a volatile write is
// made to the array, since it is exist access via unsynchronized blocks.
userRecordableEvts = userRecordableEvts0;
recordableEvts = recordableEvts0;
inclEvtTypes = inclEvtTypes0;
}
/**
* Removes duplicates in non-decreasing array.
*
* @param arr Array.
* @param len Prefix length.
* @return Arrays with removed duplicates.
*/
private int[] compact(int[] arr, int len) {
assert arr != null;
assert U.isNonDecreasingArray(arr, len);
if (arr.length <= 1)
return U.copyIfExceeded(arr, len);
int newLen = 1;
for (int i = 1; i < len; i++) {
if (arr[i] != arr[newLen - 1])
arr[newLen++] = arr[i];
}
return U.copyIfExceeded(arr, len);
}
/**
* Checks whether or not this event is a hidden system event.
* <p>
* Hidden events are NEVER sent to SPI level. They serve purpose of local
* notification for the local node.
*
* @param type Event type to check.
* @return {@code true} if this is a system hidden event.
*/
private boolean isHiddenEvent(int type) {
return type == EVT_NODE_METRICS_UPDATED;
}
/**
* Checks whether or not this event is an internal event.
* <p>
* Internal event types are always recordable for notification purposes
* but may not be sent down to SPI level for storage and subsequent querying.
*
* @param type Event type.
* @return {@code true} if this is an internal event.
*/
private boolean isInternalEvent(int type) {
return type == DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT || F.contains(EVTS_DISCOVERY_ALL, type);
}
/**
* Checks if the event type is user-recordable.
*
* @param type Event type to check.
* @return {@code true} if passed event should be recorded, {@code false} - otherwise.
*/
public boolean isUserRecordable(int type) {
assert type > 0 : "Invalid event type: " + type;
return type < len ? userRecordableEvts[type] : isUserRecordable0(type);
}
/**
* Checks whether this event type should be recorded. Note that internal event types are
* always recordable for notification purposes but may not be sent down to SPI level for
* storage and subsequent querying.
*
* @param type Event type to check.
* @return Whether or not this event type should be recorded.
*/
public boolean isRecordable(int type) {
assert type > 0 : "Invalid event type: " + type;
return type < len ? recordableEvts[type] : isUserRecordable0(type);
}
/**
* Checks whether all provided events are user-recordable.
* <p>
* Note that this method supports only predefined Ignite events.
*
* @param types Event types.
* @return Whether all events are recordable.
* @throws IllegalArgumentException If {@code types} contains user event type.
*/
public boolean isAllUserRecordable(int[] types) {
assert types != null;
boolean[] userRecordableEvts0 = userRecordableEvts;
for (int type : types) {
if (type < 0 || type >= len)
throw new IllegalArgumentException("Invalid event type: " + type);
if (!userRecordableEvts0[type])
return false;
}
return true;
}
/**
* Checks if the event type is user-recordable against grid configuration.
*
* @param type Event type to check.
* @return {@code true} if passed event should be recorded, {@code false} - otherwise.
*/
private boolean isUserRecordable0(int type) {
return binarySearch(inclEvtTypes, type);
}
/**
* @param arr Sorted array to search in.
* @param val Value.
* @return {@code True} if value has been found.
*/
private boolean binarySearch(@Nullable int[] arr, int val) {
if (F.isEmpty(arr))
return false;
// If length is relatively small, full iteration is faster.
return arr.length <= 128 ? F.contains(arr, val) : Arrays.binarySearch(arr, val) >= 0;
}
/**
* Adds local user event listener.
*
* @param lsnr User listener to add.
* @param types Event types to subscribe listener for.
*/
public void addLocalEventListener(IgnitePredicate<? extends Event> lsnr, int[] types) {
try {
ctx.resource().injectGeneric(lsnr);
}
catch (IgniteCheckedException e) {
throw new IgniteException("Failed to inject resources to event listener: " + lsnr, e);
}
addLocalEventListener(new UserListenerWrapper(lsnr), types);
}
/**
* Adds local event listener. Note that this method specifically disallow an empty
* array of event type to prevent accidental subscription for all system event that
* may lead to a drastic performance decrease.
*
* @param lsnr Listener to add.
* @param types Event types to subscribe listener for.
*/
public void addLocalEventListener(GridLocalEventListener lsnr, int[] types) {
assert lsnr != null;
assert types != null;
assert types.length > 0;
if (!enterBusy())
return;
try {
for (int t : types) {
getOrCreate(t).add(lsnr);
if (!isRecordable(t))
U.warn(log, "Added listener for disabled event type: " + U.gridEventName(t));
}
}
finally {
leaveBusy();
}
}
/**
* Adds local event listener.
*
* @param lsnr Listener to add.
* @param type Event type to subscribe listener for.
* @param types Additional event types to subscribe listener for.
*/
public void addLocalEventListener(GridLocalEventListener lsnr, int type, @Nullable int... types) {
assert lsnr != null;
if (!enterBusy())
return;
try {
getOrCreate(type).add(lsnr);
if (!isRecordable(type))
U.warn(log, "Added listener for disabled event type: " + U.gridEventName(type));
if (types != null) {
for (int t : types) {
getOrCreate(t).add(lsnr);
if (!isRecordable(t))
U.warn(log, "Added listener for disabled event type: " + U.gridEventName(t));
}
}
}
finally {
leaveBusy();
}
}
/**
* @param type Event type.
* @return Listeners for given event type.
*/
private Collection<GridLocalEventListener> getOrCreate(Integer type) {
Set<GridLocalEventListener> set = lsnrs.get(type);
if (set == null) {
set = new GridConcurrentLinkedHashSet<>();
Set<GridLocalEventListener> prev = lsnrs.putIfAbsent(type, set);
if (prev != null)
set = prev;
}
assert set != null;
return set;
}
/**
* Removes user listener for specified events, if any. If no event types provided - it
* removes the listener for all its registered events.
*
* @param lsnr User listener predicate.
* @param types Event types.
* @return Returns {@code true} if removed.
*/
public boolean removeLocalEventListener(IgnitePredicate<? extends Event> lsnr, @Nullable int... types) {
return removeLocalEventListener(new UserListenerWrapper(lsnr), types);
}
/**
* Removes listener for specified events, if any. If no event types provided - it
* remove the listener for all its registered events.
*
* @param lsnr Listener.
* @param types Event types.
* @return Returns {@code true} if removed.
*/
public boolean removeLocalEventListener(GridLocalEventListener lsnr, @Nullable int... types) {
assert lsnr != null;
boolean found = false;
if (F.isEmpty(types)) {
for (Set<GridLocalEventListener> set : lsnrs.values())
if (set.remove(lsnr))
found = true;
}
else {
assert types != null;
for (int type : types) {
Set<GridLocalEventListener> set = lsnrs.get(type);
if (set != null && set.remove(lsnr))
found = true;
}
}
if (lsnr instanceof UserListenerWrapper)
{
IgnitePredicate p = ((UserListenerWrapper)lsnr).listener();
if (p instanceof PlatformEventFilterListener)
((PlatformEventFilterListener)p).onClose();
}
return found;
}
/**
*
* @param p Optional predicate.
* @param types Event types to wait for.
* @return Event future.
*/
public <T extends Event> IgniteInternalFuture<T> waitForEvent(@Nullable final IgnitePredicate<T> p,
@Nullable int... types) {
final GridFutureAdapter<T> fut = new GridFutureAdapter<>();
addLocalEventListener(new GridLocalEventListener() {
@Override public void onEvent(Event evt) {
if (p == null || p.apply((T)evt)) {
fut.onDone((T)evt);
removeLocalEventListener(this);
}
}
}, F.isEmpty(types) ? EventType.EVTS_ALL : types);
return fut;
}
/**
*
* @param timeout Timeout.
* @param c Optional continuation.
* @param p Optional predicate.
* @param types Event types to wait for.
* @return Event.
* @throws IgniteCheckedException Thrown in case of any errors.
*/
public Event waitForEvent(long timeout, @Nullable Runnable c,
@Nullable final IgnitePredicate<? super Event> p, int... types) throws IgniteCheckedException {
assert timeout >= 0;
final GridFutureAdapter<Event> fut = new GridFutureAdapter<>();
addLocalEventListener(new GridLocalEventListener() {
@Override public void onEvent(Event evt) {
if (p == null || p.apply(evt)) {
fut.onDone(evt);
removeLocalEventListener(this);
}
}
}, types);
try {
if (c != null)
c.run();
}
catch (Exception e) {
throw new IgniteCheckedException(e);
}
return fut.get(timeout);
}
/**
* @param evt Event to notify about.
*/
private void notifyListeners(Event evt) {
assert evt != null;
notifyListeners(lsnrs.get(evt.type()), evt);
}
/**
* @param set Set of listeners.
* @param evt Grid event.
*/
private void notifyListeners(@Nullable Collection<GridLocalEventListener> set, Event evt) {
assert evt != null;
if (!F.isEmpty(set)) {
assert set != null;
for (GridLocalEventListener lsnr : set) {
try {
lsnr.onEvent(evt);
}
catch (Throwable e) {
U.error(log, "Unexpected exception in listener notification for event: " + evt, e);
if (e instanceof Error)
throw (Error)e;
}
}
}
}
/**
* @param p Grid event predicate.
* @return Collection of grid events.
*/
@SuppressWarnings("unchecked")
public <T extends Event> Collection<T> localEvents(IgnitePredicate<T> p) {
assert p != null;
if (p instanceof PlatformEventFilterListener) {
PlatformEventFilterListener p0 = (PlatformEventFilterListener)p;
p0.initialize(ctx);
try {
return (Collection<T>)getSpi().localEvents(p0);
}
finally {
p0.onClose();
}
}
else
return getSpi().localEvents(p);
}
/**
* @param p Grid event predicate.
* @param nodes Collection of nodes.
* @param timeout Maximum time to wait for result, if {@code 0}, then wait until result is received.
* @return Collection of events.
*/
public <T extends Event> IgniteInternalFuture<List<T>> remoteEventsAsync(final IgnitePredicate<T> p,
final Collection<? extends ClusterNode> nodes, final long timeout) {
assert p != null;
assert nodes != null;
final GridFutureAdapter<List<T>> fut = new GridFutureAdapter<>();
ctx.closure().runLocalSafe(new GPR() {
@Override public void run() {
try {
fut.onDone(query(p, nodes, timeout));
}
catch (IgniteCheckedException e) {
fut.onDone(e);
}
}
}, true);
return fut;
}
/**
* @param p Grid event predicate.
* @param nodes Collection of nodes.
* @param timeout Maximum time to wait for result, if {@code 0}, then wait until result is received.
* @return Collection of events.
* @throws IgniteCheckedException Thrown in case of any errors.
*/
@SuppressWarnings({"SynchronizationOnLocalVariableOrMethodParameter", "deprecation"})
private <T extends Event> List<T> query(IgnitePredicate<T> p, Collection<? extends ClusterNode> nodes,
long timeout) throws IgniteCheckedException {
assert p != null;
assert nodes != null;
if (nodes.isEmpty()) {
U.warn(log, "Failed to query events for empty nodes collection.");
return Collections.emptyList();
}
GridIoManager ioMgr = ctx.io();
final List<T> evts = new ArrayList<>();
final AtomicReference<Throwable> err = new AtomicReference<>();
final Set<UUID> uids = new HashSet<>();
final Object qryMux = new Object();
for (ClusterNode node : nodes)
uids.add(node.id());
GridLocalEventListener evtLsnr = new GridLocalEventListener() {
@Override public void onEvent(Event evt) {
assert evt instanceof DiscoveryEvent;
synchronized (qryMux) {
uids.remove(((DiscoveryEvent)evt).eventNode().id());
if (uids.isEmpty())
qryMux.notifyAll();
}
}
};
GridMessageListener resLsnr = new GridMessageListener() {
@SuppressWarnings("deprecation")
@Override public void onMessage(UUID nodeId, Object msg) {
assert nodeId != null;
assert msg != null;
if (!(msg instanceof GridEventStorageMessage)) {
U.error(log, "Received unknown message: " + msg);
return;
}
GridEventStorageMessage res = (GridEventStorageMessage)msg;
try {
if (res.eventsBytes() != null)
res.events(U.<Collection<Event>>unmarshal(marsh, res.eventsBytes(),
U.resolveClassLoader(ctx.config())));
if (res.exceptionBytes() != null)
res.exception(U.<Throwable>unmarshal(marsh, res.exceptionBytes(),
U.resolveClassLoader(ctx.config())));
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to unmarshal events query response: " + msg, e);
return;
}
synchronized (qryMux) {
if (uids.remove(nodeId)) {
if (res.events() != null)
evts.addAll((Collection<T>)res.events());
}
else
U.warn(log, "Received duplicate response (ignoring) [nodeId=" + nodeId +
", msg=" + res + ']');
if (res.exception() != null)
err.set(res.exception());
if (uids.isEmpty() || err.get() != null)
qryMux.notifyAll();
}
}
};
Object resTopic = TOPIC_EVENT.topic(IgniteUuid.fromUuid(ctx.localNodeId()));
try {
addLocalEventListener(evtLsnr, new int[] {
EVT_NODE_LEFT,
EVT_NODE_FAILED
});
ioMgr.addMessageListener(resTopic, resLsnr);
byte[] serFilter = U.marshal(marsh, p);
GridDeployment dep = ctx.deploy().deploy(p.getClass(), U.detectClassLoader(p.getClass()));
if (dep == null)
throw new IgniteDeploymentCheckedException("Failed to deploy event filter: " + p);
GridEventStorageMessage msg = new GridEventStorageMessage(
resTopic,
serFilter,
p.getClass().getName(),
dep.classLoaderId(),
dep.deployMode(),
dep.userVersion(),
dep.participants());
sendMessage(nodes, TOPIC_EVENT, msg, PUBLIC_POOL);
if (timeout == 0)
timeout = Long.MAX_VALUE;
long now = U.currentTimeMillis();
// Account for overflow of long value.
long endTime = now + timeout <= 0 ? Long.MAX_VALUE : now + timeout;
long delta = timeout;
Collection<UUID> uidsCp = null;
synchronized (qryMux) {
try {
while (!uids.isEmpty() && err.get() == null && delta > 0) {
qryMux.wait(delta);
delta = endTime - U.currentTimeMillis();
}
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IgniteCheckedException("Got interrupted while waiting for event query responses.", e);
}
if (err.get() != null)
throw new IgniteCheckedException("Failed to query events due to exception on remote node.", err.get());
if (!uids.isEmpty())
uidsCp = new LinkedList<>(uids);
}
// Outside of synchronization.
if (uidsCp != null) {
for (Iterator<UUID> iter = uidsCp.iterator(); iter.hasNext();)
// Ignore nodes that have left the grid.
if (ctx.discovery().node(iter.next()) == null)
iter.remove();
if (!uidsCp.isEmpty())
throw new IgniteCheckedException("Failed to receive event query response from following nodes: " +
uidsCp);
}
}
finally {
ioMgr.removeMessageListener(resTopic, resLsnr);
removeLocalEventListener(evtLsnr);
}
return evts;
}
/**
* Sends message accounting for local and remote nodes.
*
* @param nodes Nodes to receive event.
* @param topic Topic to send the message to.
* @param msg Event to be sent.
* @param plc Type of processing.
* @throws IgniteCheckedException If sending failed.
*/
private void sendMessage(Collection<? extends ClusterNode> nodes, GridTopic topic,
GridEventStorageMessage msg, byte plc) throws IgniteCheckedException {
ClusterNode locNode = F.find(nodes, null, F.localNode(ctx.localNodeId()));
Collection<? extends ClusterNode> rmtNodes = F.view(nodes, F.remoteNodes(ctx.localNodeId()));
if (locNode != null)
ctx.io().send(locNode, topic, msg, plc);
if (!rmtNodes.isEmpty()) {
msg.responseTopicBytes(U.marshal(marsh, msg.responseTopic()));
ctx.io().send(rmtNodes, topic, msg, plc);
}
}
/**
* @param arr Array.
* @return Array copy.
*/
private int[] copy(int[] arr) {
assert arr != null;
return Arrays.copyOf(arr, arr.length);
}
/**
* @param arr Array.
* @return Array copy.
*/
private boolean[] copy(boolean[] arr) {
assert arr != null;
return Arrays.copyOf(arr, arr.length);
}
/**
*
*/
private class RequestListener implements GridMessageListener {
/** {@inheritDoc} */
@Override public void onMessage(UUID nodeId, Object msg) {
assert nodeId != null;
assert msg != null;
if (!enterBusy())
return;
try {
if (!(msg instanceof GridEventStorageMessage)) {
U.warn(log, "Received unknown message: " + msg);
return;
}
GridEventStorageMessage req = (GridEventStorageMessage)msg;
ClusterNode node = ctx.discovery().node(nodeId);
if (node == null) {
U.warn(log, "Failed to resolve sender node that does not exist: " + nodeId);
return;
}
if (log.isDebugEnabled())
log.debug("Received event query request: " + req);
Throwable ex = null;
IgnitePredicate<Event> filter = null;
Collection<Event> evts;
try {
if (req.responseTopicBytes() != null)
req.responseTopic(U.unmarshal(marsh, req.responseTopicBytes(), U.resolveClassLoader(ctx.config())));
GridDeployment dep = ctx.deploy().getGlobalDeployment(
req.deploymentMode(),
req.filterClassName(),
req.filterClassName(),
req.userVersion(),
nodeId,
req.classLoaderId(),
req.loaderParticipants(),
null);
if (dep == null)
throw new IgniteDeploymentCheckedException("Failed to obtain deployment for event filter " +
"(is peer class loading turned on?): " + req);
filter = U.unmarshal(marsh, req.filter(), U.resolveClassLoader(dep.classLoader(), ctx.config()));
// Resource injection.
ctx.resource().inject(dep, dep.deployedClass(req.filterClassName()), filter);
// Get local events.
evts = localEvents(filter);
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to query events [nodeId=" + nodeId + ", filter=" + filter + ']', e);
evts = Collections.emptyList();
ex = e;
}
catch (Throwable e) {
U.error(log, "Failed to query events due to user exception [nodeId=" + nodeId +
", filter=" + filter + ']', e);
evts = Collections.emptyList();
ex = e;
if (e instanceof Error)
throw (Error)e;
}
// Response message.
GridEventStorageMessage res = new GridEventStorageMessage(evts, ex);
try {
if (log.isDebugEnabled())
log.debug("Sending event query response to node [nodeId=" + nodeId + "res=" + res + ']');
if (!ctx.localNodeId().equals(nodeId)) {
res.eventsBytes(U.marshal(marsh, res.events()));
res.exceptionBytes(U.marshal(marsh, res.exception()));
}
ctx.io().send(node, req.responseTopic(), res, PUBLIC_POOL);
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to send event query response to node [node=" + nodeId + ", res=" +
res + ']', e);
}
}
finally {
leaveBusy();
}
}
}
/**
* Wraps user listener predicate provided via {@link org.apache.ignite.IgniteEvents#localListen(org.apache.ignite.lang.IgnitePredicate, int...)}.
*/
private class UserListenerWrapper implements GridLocalEventListener {
/** */
private final IgnitePredicate<Event> lsnr;
/**
* @param lsnr User listener predicate.
*/
private UserListenerWrapper(IgnitePredicate<? extends Event> lsnr) {
this.lsnr = (IgnitePredicate<Event>)lsnr;
}
/**
* @return User listener.
*/
private IgnitePredicate<? extends Event> listener() {
return lsnr;
}
/** {@inheritDoc} */
@Override public void onEvent(Event evt) {
if (!lsnr.apply(evt))
removeLocalEventListener(this);
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
UserListenerWrapper that = (UserListenerWrapper)o;
return lsnr.equals(that.lsnr);
}
/** {@inheritDoc} */
@Override public int hashCode() {
return lsnr.hashCode();
}
}
}
| |
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.image.metadata;
import static com.lightcrafts.image.metadata.IPTCConstants.*;
/**
* An <code>IPTCTags</code> defines the constants used for IPTC metadata tags.
*
* @author Paul J. Lucas [paul@lightcrafts.com]
*/
public interface IPTCTags extends ImageMetaTags {
////////// IPTC Information Interchange Model (IIM) //////////////////////
/**
* Contains preferably the name of the person who created the content of
* this news object, a photographer for photos, a graphic artist for
* graphics, or a writer for textual news. If it is not appropriate to add
* the name of a person the name of a company or organisation could be
* applied as well.
* <p>
* Type: ASCII.
* @see #IPTC_CREATOR
*/
int IPTC_BY_LINE = IPTC_RECORD_APP << 8 | 0x50;
/**
* Contains the job title of the person who created the content of this
* news object. As this is sort of a qualifier the Creator element has to
* be filled in as mandatory prerequisite for using Creator's Jobtitle.
* <p>
* Type: ASCII.
* @see #IPTC_CREATOR_JOBTITLE
*/
int IPTC_BY_LINE_TITLE = IPTC_RECORD_APP << 8 | 0x55;
/**
* A textual description, including captions of the news object's content,
* particularly used where the object is not text.
* <p>
* Type: ASCII.
*/
int IPTC_CAPTION_ABSTRACT = IPTC_RECORD_APP << 8 | 0x78;
/**
* To denote the category of a story. Composition: one, two or three alpha
* characters, upper and lower case in any combination, e.g., "Fin" for
* financial or "Pol" for political, etc.
* <p>
* Type: ASCII.
*/
int IPTC_CATEGORY = IPTC_RECORD_APP << 8 | 0x0F;
/**
* Name of the city the content is focussing on -- either the place shown
* in visual media or referenced by text or audio media. This element is at
* the third level of a top-down geographical hierarchy.
* <p>
* Type: ASCII.
*/
int IPTC_CITY = IPTC_RECORD_APP << 8 | 0x5A;
/**
* Identifies the person or organisation that can provide further
* background information on the object data.
* <p>
* Type: ASCII.
*/
int IPTC_CONTACT = IPTC_RECORD_APP << 8 | 0x76;
/**
* Indicates the code of a country/geographical location referenced by the
* object. Where ISO has established an appropriate country code under ISO
* 3166, that code will be used. When ISO 3166 does not adequately provide
* for identification of a location or a country, e.g., ships at sea,
* space, IPTC will assign an appropriate three-character code under the
* provisions of ISO 3166 to avoid conflicts.
* <p>
* Type: ASCII.
*/
int IPTC_CONTENT_LOCATION_CODE = IPTC_RECORD_APP << 8 | 0x1A;
/**
* Provides a full, publishable name of a country/geographical location
* referenced by the content of the object, according to guidelines of the
* provider.
* <p>
* Type: ASCII.
*/
int IPTC_CONTENT_LOCATION_NAME = IPTC_RECORD_APP << 8 | 0x1B;
/**
* Contains any necessary copyright notice for claiming the intellectual
* property for this news ject and should identify the current owner of the
* copyright for the news object. Other tities like the creator of the news
* object may be added. Notes on usage rights should be ovided in
* {@link #IPTC_RIGHTS_USAGE_TERMS}.
* <p>
* Type: ASCII.
*/
int IPTC_COPYRIGHT_NOTICE = IPTC_RECORD_APP << 8 | 0x74;
/**
* Indicates the code of the country/primary location where the
* intellectual property of the object data was created, e.g., a photo was
* taken, an event occurred.
* <p>
* Type: ASCII.
*/
int IPTC_COUNTRY_PRIMARY_LOCATION_CODE = IPTC_RECORD_APP << 8 | 0x64;
/**
* Provides full, publishable, name of the country/primary location where
* the intellectual property of the object data was created, according to
* guidelines of the provider.
* <p>
* Type: ASCII.
*/
int IPTC_COUNTRY_PRIMARY_LOCATION_NAME = IPTC_RECORD_APP << 8 | 0x65;
/**
* Identifies the provider of the news object, who is not necessarily the
* owner/creator.
* <p>
* Type: ASCII.
*/
int IPTC_CREDIT = IPTC_RECORD_APP << 8 | 0x6E;
/**
* Designates the date and optionally the time the intellectual content of
* the news object was created rather than the date of the creation of the
* physical representation. If no time is given the value should default to
* 00:00:00.
* <p>
* Type: ASCII.
*/
int IPTC_DATE_CREATED = IPTC_RECORD_APP << 8 | 0x37;
/**
* Uses the format CCYYMMDD (century, year, month, day) as defined in ISO
* 8601 to indicate year, month and day the service sent the material.
* <p>
* Type: ASCII.
*/
int IPTC_DATE_SENT = IPTC_RECORD_ENV << 8 | 0x46;
/**
* This is to accommodate some providers who require routing information
* above the appropriate OSI layers.
* <p>
* Type: ASCII.
*/
int IPTC_DESTINATION = IPTC_RECORD_ENV << 8 | 0x05;
/**
* Represented in the form CCYYMMDD to designate the date the digital
* representation of the objectdata was created. Follows ISO 8601
* standard. Thus a photo taken during the American Civil War would carry
* a Digital Creation Date within the past several years rather than the
* date where the image was captured on film, glass plate or other
* substrate during that epoch (1861-1865).
* <p>
* Type: ASCII.
*/
int IPTC_DIGITAL_CREATION_DATE = IPTC_RECORD_APP << 8 | 0x3E;
/**
* Represented in the form HHMMSS+/-HHMM to designate the time the digital
* representation of the objectdata was created. Follows ISO 8601
* standard.
* <p>
* Type: ASCII.
*/
int IPTC_DIGITAL_CREATION_TIME = IPTC_RECORD_APP << 8 | 0x3F;
/**
* Status of the object data, according to the practice of the provider.
* <p>
* Type: ASCII.
*/
int IPTC_EDIT_STATUS = IPTC_RECORD_APP << 8 | 0x07;
/**
* The characters form a number that will be unique.
* <p>
* Type: ASCII.
*/
int IPTC_ENVELOPE_NUMBER = IPTC_RECORD_ENV << 8 | 0x28;
/**
* Specifies the envelope handling priority and not the editorial urgency
* (see #IPTC_URGENCY). 1' indicates the most urgent, '5' the normal
* urgency, and '8' the least urgent copy. The numeral '9' indicates a User
* Defined Priority. The numeral '0' is reserved for future use.
* <p>
* Type: ASCII
*/
int IPTC_ENVELOPE_PRIORITY = IPTC_RECORD_ENV << 8 | 0x3C;
/**
* Designates in the form CCYYMMDD the latest date the provider or owner
* intends the object data to be used. Follows ISO 8601 standard.
* <p>
* Type: ASCII.
*/
int IPTC_EXPIRATION_DATE = IPTC_RECORD_APP << 8 | 0x25;
/**
* Designates in the form HHMMSS+/-HHMM the latest time the provider or
* owner intends the object data to be used. Follows ISO 8601 standard.
* <p>
* Type: ASCII.
*/
int IPTC_EXPIRATION_TIME = IPTC_RECORD_APP << 8 | 0x26;
/**
* Identifies object data that recurs often and predictably. Enables users
* to immediately find or recall such an object.
* <p>
* Type: ASCII.
*/
int IPTC_FIXTURE_IDENTIFIER = IPTC_RECORD_APP << 8 | 0x16;
/**
* A publishable entry providing a synopsis of the contents of the news
* object. Headline is not the same as Title.
* <p>
* Type: ASCII.
*/
int IPTC_HEADLINE = IPTC_RECORD_APP << 8 | 0x69;
/**
* Keywords to express the subject of the content. Keywords may be free
* text and don't have to be taken from a controlled vocabulary. Values
* from the controlled vocabulary IPTC Subject Codes must go to the
* {@link #IPTC_SUBJECT_CODE} element.
* <p>
* Type: ASCII.
*/
int IPTC_KEYWORDS = IPTC_RECORD_APP << 8 | 0x19;
/**
* Describes the major national language of the object, according to the
* 2-letter codes of ISO 639:1988. Does not define or imply any coded
* character set, but is used for internal routing, e.g., to various
* editorial desks.
* <p>
* Type: ASCII.
*/
int IPTC_LANGUAGE_IDENTIFIER = IPTC_RECORD_APP << 8 | 0x83;
/**
* Defines the nature of the object independent of the subject.
* <p>
* Type: ASCII.
*/
int IPTC_OBJECT_ATTRIBUTE_REFERENCE = IPTC_RECORD_APP << 8 | 0x04;
/**
* News cycle:
* <blockquote>
* <table border="0" cellpadding="0">
* <tr><td><code>a</code> = </td><td>morning</td></tr>
* <tr><td><code>p</code> = </td><td>evening</td></tr>
* <tr><td><code>b</code> = </td><td>both</td></tr>
* </table>
* </blockquote>
* Type: ASCII.
*/
int IPTC_OBJECT_CYCLE = IPTC_RECORD_APP << 8 | 0x4B;
/**
* A shorthand reference for the news object. While a technical identifer
* goes to an identifier element, Title holds a short verbal and human
* readable name. Title is not the same as {@link #IPTC_HEADLINE}.
* <p>
* Type: ASCII.
*/
int IPTC_OBJECT_NAME = IPTC_RECORD_APP << 8 | 0x05;
/**
* Number or identifier for the purpose of improved workflow handling. This
* ID should be added by the creator or provider for transmission and
* routing purposes only and should have no significance for archiving.
* <p>
* Type: ASCII.
*/
int IPTC_ORIGINAL_TRANSMISSION_REFERENCE = IPTC_RECORD_APP << 8 | 0x67;
/**
* Identifies the type of program used to originate the object data.
* <p>
* Type: ASCII.
*/
int IPTC_ORIGINATING_PROGRAM = IPTC_RECORD_APP << 8 | 0x41;
/**
* Allows a provider to identify subsets of its overall service. Used to
* provide receiving organisation data on which to select, route, or
* otherwise handle data.
* <p>
* Type: ASCII
*/
int IPTC_PRODUCT_ID = IPTC_RECORD_ENV << 8 | 0x32;
/**
* Used to identify the version of the program in
* {@link #IPTC_ORIGINATING_PROGRAM}.
* <p>
* Type: ASCII.
*/
int IPTC_PROGRAM_VERSION = IPTC_RECORD_APP << 8 | 0x42;
/**
* Name of the subregion of a country -- either called province or state or
* anything else -- the content is focussing on -- either the subregion
* shown in visual media or referenced by text or audio media. This element
* is at the second level of a top-down geographical hierarchy.
* <p>
* Type: ASCII.
*/
int IPTC_PROVINCE_STATE = IPTC_RECORD_APP << 8 | 0x5F;
/**
* The version of IPTC metadata.
* <p>
* Type: Unsigned short.
*/
int IPTC_RECORD_VERSION = IPTC_RECORD_APP << 8;
/**
* The release date of the news obejct.
* <p>
* Type: ASCII.
*/
int IPTC_RELEASE_DATE = IPTC_RECORD_APP << 8 | 0x1E;
/**
* The release time of the news object.
* <p>
* Type: ASCII.
*/
int IPTC_RELEASE_TIME = IPTC_RECORD_APP << 8 | 0x23;
/**
* Identifies the provider and product.
* <p>
* Type: ASCII.
*/
int IPTC_SERVICE_IDENTIFIER = IPTC_RECORD_ENV << 8 | 0x1E;
/**
* Identifies the original owner of the copyright for the intellectual
* content of the news object. This could be an agency, a member of an
* agency or an individual. Source could be different from
* {@link #IPTC_CREATOR} and from the entities in the
* {@link #IPTC_COPYRIGHT_NOTICE}.
* <p>
* Type: ASCII.
*/
int IPTC_SOURCE = IPTC_RECORD_APP << 8 | 0x73;
/**
* Any of a number of instructions from the provider or creator to the
* receiver of the news object which might include any of the following:
* embargoes (NewsMagazines OUT) and other restrictions not covered by the
* {@link #IPTC_RIGHTS_USAGE_TERMS} field; information regarding the
* original means of capture (scanning notes, colourspace info) or other
* specific text information that the user may need for accurate
* reproduction; additional permissions or credits required when
* publishing.
* <p>
* Type: ASCII.
*/
int IPTC_SPECIAL_INSTRUCTIONS = IPTC_RECORD_APP << 8 | 0x28;
/**
* Identifies the location within a city from which the object data
* originates according to guidelines established by the provider.
* <p>
* Type: ASCII.
*/
int IPTC_SUBLOCATION = IPTC_RECORD_APP << 8 | 0x5C;
/**
* Categories supplemental to {@link #IPTC_CATEGORY}.
* <p>
* Type: ASCII.
*/
int IPTC_SUPPLEMENTAL_CATEGORIES = IPTC_RECORD_APP << 8 | 0x14;
/**
* The time the news object was created.
* <p>
* Type: ASCII.
*/
int IPTC_TIME_CREATED = IPTC_RECORD_APP << 8 | 0x3C;
/**
* Uses the format HHMMSS+/-HHMM where HHMMSS refers to local hour, minute
* and seconds and HHMM refers to hours and minutes ahead (+) or behind (-)
* Universal Coordinated Time as described in ISO 8601. This is the time
* the service sent the material.
* <p>
* Type: ASCII.
*/
int IPTC_TIME_SENT = IPTC_RECORD_ENV << 8 | 0x50;
/**
* UNO Unique Name of Object, providing eternal, globally unique
* identification for objects as specified in the IIM, independent of
* provider and for any media form. The provider must ensure the UNO is
* unique. Objects with the same UNO are identical.
* <p>
* Type: ASCII.
*/
int IPTC_UNO = IPTC_RECORD_ENV << 8 | 0x64;
/**
* To indicate the editorial urgency of a story. Composition: one numeral
* from a scale ranging from 1 for the most urgent, 5 for normal, and 8 for
* the least urgent.
* <p>
* Type: Unsigned byte.
*/
int IPTC_URGENCY = IPTC_RECORD_APP << 8 | 0x0A;
/**
* Identifier or the name of the person involved in writing, editing or
* correcting the description of the news object.
* <p>
* Type: ASCII.
*/
int IPTC_WRITER_EDITOR = IPTC_RECORD_APP << 8 | 0x7A;
////////// New for IPTC XMP Core //////////////////////////////////////////
/**
* The creator's contact information. This is actually comprised of all
* the <code>IPTC_CI_</code> tags and doesn't have a value itself.
*/
int IPTC_CREATOR_CONTACT_INFO = 0xF100;
/**
* The contact information address part. Comprises an optional company name
* and all required information to locate the building or postbox to which
* mail should be sent. To that end, the address is a multiline field.
* <p>
* Type: ASCII.
*/
int IPTC_CI_ADDRESS = 0xF101;
/**
* The contact information city part.
* <p>
* Type: ASCII.
*/
int IPTC_CI_CITY = 0xF102;
/**
* The contact information country part.
* <p>
* Type: ASCII.
*/
int IPTC_CI_COUNTRY = 0xF103;
/**
* The contact information email address part. Multiple email addresses can
* be given, separated by a comma.
* <p>
* Type: ASCII.
*/
int IPTC_CI_EMAILS = 0xF104;
/**
* The contact information phone number part. Multiple numbers can be
* given, separated by a comma.
* <p>
* Type: ASCII.
*/
int IPTC_CI_PHONES = 0xF105;
/**
* The contact information part denoting the local postal code.
* <p>
* Type: ASCII.
*/
int IPTC_CI_POSTAL_CODE = 0xF106;
/**
* The contact information part denoting regional information like state or
* province.
* <p>
* Type: ASCII.
*/
int IPTC_CI_STATE_PROVINCE = 0xF107;
/**
* The contact information web address part. Multiple addresses can be
* given, separated by a comma.
* <p>
* Type: ASCII.
*/
int IPTC_CI_WEB_URLS = 0xF108;
/**
* Free text instructions on how this news object can be legally used.
* <p>
* Type: ASCII.
*/
int IPTC_RIGHTS_USAGE_TERMS = 0xF204;
/**
* Describes the scene of a photo content. Specifies one ore more terms
* from the IPTC "Scene-NewsCodes". Each Scene is represented as a string
* of 6 digits in an unordered list.
* <p>
* Type: ASCII.
*/
int IPTC_SCENE = 0xF205;
/**
* Specifies one or more Subjects from the IPTC "Subject-NewsCodes"
* taxonomy to categorize the content. Each Subject is represented as a
* string of 8 digits in an unordered list.
* <p>
* Type: ASCII.
*/
int IPTC_SUBJECT_CODE = 0xF206;
////////// New IPTC XMP Core names for original IPTC headers //////////////
int IPTC_COUNTRY = IPTC_COUNTRY_PRIMARY_LOCATION_NAME;
int IPTC_COUNTRY_CODE = IPTC_COUNTRY_PRIMARY_LOCATION_CODE;
int IPTC_CREATOR = IPTC_BY_LINE;
int IPTC_CREATOR_JOBTITLE = IPTC_BY_LINE_TITLE;
int IPTC_DESCRIPTION = IPTC_CAPTION_ABSTRACT;
int IPTC_DESCRIPTION_WRITER = IPTC_WRITER_EDITOR;
int IPTC_INSTRUCTIONS = IPTC_SPECIAL_INSTRUCTIONS;
int IPTC_INTELLECTUAL_GENRE = IPTC_OBJECT_ATTRIBUTE_REFERENCE;
int IPTC_JOB_ID = IPTC_ORIGINAL_TRANSMISSION_REFERENCE;
int IPTC_LOCATION = IPTC_SUBLOCATION;
int IPTC_PROVIDER = IPTC_CREDIT;
int IPTC_TITLE = IPTC_OBJECT_NAME;
}
/* vim:set et sw=4 ts=4: */
| |
package eu.cloudopting.domain;
import eu.cloudopting.domain.util.DatabaseEncryptionConfiguration;
import eu.cloudopting.events.api.entity.BaseEntity;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.TypeDef;
import org.jasypt.hibernate4.type.EncryptedStringType;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import com.fasterxml.jackson.annotation.JsonIgnore;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
import java.util.List;
import java.util.Set;
@Entity
@Table(schema = "public",name = "cloud_accounts")
@Configurable
@TypeDef(
name="encryptedString",
typeClass=EncryptedStringType.class,
parameters= {
@Parameter(name="encryptorRegisteredName", value=DatabaseEncryptionConfiguration.STRING_ENCRYPTOR_NAME)
}
)
public class CloudAccounts implements BaseEntity {
@PersistenceContext
transient EntityManager entityManager;
public static final List<String> fieldNames4OrderClauseFilter = java.util.Arrays.asList("");
public static final EntityManager entityManager() {
EntityManager em = new CloudAccounts().entityManager;
if (em == null) throw new IllegalStateException("Entity manager has not been injected (is the Spring Aspects JAR configured as an AJC/AJDT aspects library?)");
return em;
}
public static long countCloudAccountses() {
return entityManager().createQuery("SELECT COUNT(o) FROM CloudAccounts o", Long.class).getSingleResult();
}
public static List<CloudAccounts> findAllCloudAccountses() {
return entityManager().createQuery("SELECT o FROM CloudAccounts o", CloudAccounts.class).getResultList();
}
public static List<CloudAccounts> findAllCloudAccountses(String sortFieldName, String sortOrder) {
String jpaQuery = "SELECT o FROM CloudAccounts o";
if (fieldNames4OrderClauseFilter.contains(sortFieldName)) {
jpaQuery = jpaQuery + " ORDER BY " + sortFieldName;
if ("ASC".equalsIgnoreCase(sortOrder) || "DESC".equalsIgnoreCase(sortOrder)) {
jpaQuery = jpaQuery + " " + sortOrder;
}
}
return entityManager().createQuery(jpaQuery, CloudAccounts.class).getResultList();
}
public static CloudAccounts findCloudAccounts(Long id) {
if (id == null) return null;
return entityManager().find(CloudAccounts.class, id);
}
public static List<CloudAccounts> findCloudAccountsEntries(int firstResult, int maxResults) {
return entityManager().createQuery("SELECT o FROM CloudAccounts o", CloudAccounts.class).setFirstResult(firstResult).setMaxResults(maxResults).getResultList();
}
public static List<CloudAccounts> findCloudAccountsEntries(int firstResult, int maxResults, String sortFieldName, String sortOrder) {
String jpaQuery = "SELECT o FROM CloudAccounts o";
if (fieldNames4OrderClauseFilter.contains(sortFieldName)) {
jpaQuery = jpaQuery + " ORDER BY " + sortFieldName;
if ("ASC".equalsIgnoreCase(sortOrder) || "DESC".equalsIgnoreCase(sortOrder)) {
jpaQuery = jpaQuery + " " + sortOrder;
}
}
return entityManager().createQuery(jpaQuery, CloudAccounts.class).setFirstResult(firstResult).setMaxResults(maxResults).getResultList();
}
@Transactional
public void persist() {
if (this.entityManager == null) this.entityManager = entityManager();
this.entityManager.persist(this);
}
@Transactional
public void remove() {
if (this.entityManager == null) this.entityManager = entityManager();
if (this.entityManager.contains(this)) {
this.entityManager.remove(this);
} else {
CloudAccounts attached = CloudAccounts.findCloudAccounts(this.id);
this.entityManager.remove(attached);
}
}
@Transactional
public void flush() {
if (this.entityManager == null) this.entityManager = entityManager();
this.entityManager.flush();
}
@Transactional
public void clear() {
if (this.entityManager == null) this.entityManager = entityManager();
this.entityManager.clear();
}
@Transactional
public CloudAccounts merge() {
if (this.entityManager == null) this.entityManager = entityManager();
CloudAccounts merged = this.entityManager.merge(this);
this.entityManager.flush();
return merged;
}
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "id")
private Long id;
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
@JsonIgnore
@ManyToOne
@JoinColumn(name = "organization_id", referencedColumnName = "id", nullable = false)
private Organizations organizationId;
@OneToMany(mappedBy = "cloudAccount")
private Set<Customizations> customizationss;
@ManyToOne
@JoinColumn(name = "provider_id", referencedColumnName = "id", nullable = false)
private Providers providerId;
@Column(name = "name", length = 50)
@NotNull
private String name;
@Column(name = "api_key", length = 50)
@NotNull
//@Type(type="encryptedString")
private String apiKey;
@Column(name = "secret_key", length = 50)
@NotNull
//@Type(type="encryptedString")
private String secretKey;
@Column(name = "endpoint", length = 100)
@NotNull
private String endpoint;
@Column(name = "is_trial")
private Boolean isTrial;
public Organizations getOrganizationId() {
return organizationId;
}
public void setOrganizationId(Organizations organizationId) {
this.organizationId = organizationId;
}
public Providers getProviderId() {
return providerId;
}
public void setProviderId(Providers providerId) {
this.providerId = providerId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public String getSecretKey() {
return secretKey;
}
public void setSecretKey(String secretKey) {
this.secretKey = secretKey;
}
public String getEndpoint() {
return endpoint;
}
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
public Set<Customizations> getCustomizationss() {
return customizationss;
}
public void setCustomizationss(Set<Customizations> customizationss) {
this.customizationss = customizationss;
}
public Boolean getIsTrial() {
return isTrial;
}
public void setIsTrial(Boolean isTrial) {
this.isTrial = isTrial;
}
}
| |
/**
* Copyright (C) 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dashbuilder.dataset.def;
import org.dashbuilder.dataprovider.DataSetProviderType;
import org.dashbuilder.dataset.sort.ColumnSort;
import org.dashbuilder.dataset.validation.groups.ElasticSearchDataSetDefValidation;
import org.jboss.errai.common.client.api.annotations.Portable;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
/**
* <p>DataSet definition class for ElasticSearch provider.</p>
*
* <p>This dataset provides these configuration parameters:</p>
* <ul>
* <li>
* <code>serverURL</code> - The URL for the ElasticSearch server instance (MANDATORY)
* </li>
* <li>
* <code>clusterName</code> - The name of the cluster in the ElasticSearch server.
* </li>
* <li>
* <code>index</code> - The name of the index. It can be a concrete index name, a collection of index names, comma separated, or the keyword <code>_all</code> for working with all available indexes in the ElasticSearch server (OPTIONAL - Defaults to <code>_all</code>)
* </li>
* <li>
* <code>type</code> - The type name. Only applicable if <code>index</code> parameter is set. It can be a concrete type name, a collection of type names, comma separated, or the keyword <code>_all</code> for working with all available type in the ElasticSearch server (OPTIONAL - Defaults to <code>_all</code>)
* </li>
* <li>
* <code>query</code> - You can perform your custom ElasticSearch DSL query for this data provider. If this parameter exist, the parameters <code>index</code>, <code>type</code> and <code>field</code> are skipped. (OPTIONAL)
* </li>
* <li>
* <code>relevance</code> - The relevance value for search results (OPTIONAL)
* </li>
* <li>
* <code>columns</code> - If not specified, the column definitions for the ElasticSearch dataset are automatically given by querying the index mappings. Otherwise, you can bind a column to another datatype in dashbuilder application using this parameters (OPTIONAL)
* </li>
* </ul>
*
* @since 0.3.0
*/
@Portable
public class ElasticSearchDataSetDef extends DataSetDef {
// Constants.
public static enum ElasticSearchKeywords {
ALL;
private static final String KEYWORD_ALL = "_all";
@Override
public String toString() {
if (this.equals(ALL)) return KEYWORD_ALL;
return super.toString();
}
}
// Data Set user parameters.
@NotNull(message = "{dataSetApi_elDataSetDef_serverURL_notNull}", groups = {ElasticSearchDataSetDefValidation.class})
@Size(min = 1, message = "{dataSetApi_elDataSetDef_serverURL_notNull}", groups = {ElasticSearchDataSetDefValidation.class})
protected String serverURL;
protected String clusterName;
/**
* Index/es to query. Can handle multiple values, comma separated.
*/
@NotNull(message = "{dataSetApi_elDataSetDef_index_notNull}", groups = {ElasticSearchDataSetDefValidation.class})
@Size(min = 1, message = "{dataSetApi_elDataSetDef_index_notNull}", groups = {ElasticSearchDataSetDefValidation.class})
protected String index;
/**
* Type/es to query. Can handle multiple values, comma separated. Not mandatory.
*/
protected String type;
protected String query;
protected String relevance;
protected ColumnSort columnSort;
public ElasticSearchDataSetDef() {
super.setProvider(DataSetProviderType.ELASTICSEARCH);
}
public String getServerURL() {
return serverURL;
}
public void setServerURL(String serverURL) {
this.serverURL = serverURL;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public String getIndex() {
return index;
}
public void setIndex(String index) {
this.index = index;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public String getRelevance() {
return relevance;
}
public void setRelevance(String relevance) {
this.relevance = relevance;
}
public boolean isCacheEnabled() {
return cacheEnabled;
}
public void setCacheEnabled(boolean cacheEnabled) {
this.cacheEnabled = cacheEnabled;
}
public Integer getCacheMaxRows() {
return cacheMaxRows;
}
public void setCacheMaxRows(Integer cacheMaxRows) {
this.cacheMaxRows = cacheMaxRows;
}
public ColumnSort getColumnSort() {
return columnSort;
}
public void setColumnSort(ColumnSort columnSort) {
this.columnSort = columnSort;
}
@Override
public boolean equals(Object obj) {
try {
ElasticSearchDataSetDef other = (ElasticSearchDataSetDef) obj;
if (!super.equals(other)) {
return false;
}
if (serverURL != null && !serverURL.equals(other.serverURL)) {
return false;
}
if (clusterName != null && !clusterName.equals(other.clusterName)) {
return false;
}
if (index != null && !index.equals(other.index)) {
return false;
}
return true;
} catch (ClassCastException e) {
return false;
}
}
@Override
public DataSetDef clone() {
ElasticSearchDataSetDef def = new ElasticSearchDataSetDef();
clone(def);
def.setServerURL(getServerURL());
def.setClusterName(getClusterName());
def.setIndex(getIndex());
def.setType(getType());
return def;
}
public String toString() {
StringBuilder out = new StringBuilder();
out.append("UUID=").append(UUID).append("\n");
out.append("Provider=").append(provider).append("\n");
out.append("Public=").append(isPublic).append("\n");
out.append("Push enabled=").append(pushEnabled).append("\n");
out.append("Push max size=").append(pushMaxSize).append(" Kb\n");
out.append("Server URL=").append(serverURL).append("\n");
out.append("Index=").append(index).append("\n");
out.append("Type=").append(type).append("\n");
out.append("Query=").append(query).append("\n");
out.append("Get all columns=").append(allColumnsEnabled).append("\n");
out.append("Cache enabled=").append(cacheEnabled).append("\n");
out.append("Cache max rows=").append(cacheMaxRows).append(" Kb\n");
return out.toString();
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2015 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.rq;
import com.google.common.base.Joiner;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.RestResponse;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URI;
import java.util.Arrays;
import java.util.HashSet;
import org.apache.commons.lang.StringUtils;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.takes.Request;
import org.takes.Response;
import org.takes.Take;
import org.takes.http.FtRemote;
import org.takes.rs.RsText;
/**
* Test case for {@link RqMultipart.Base}.
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 0.9
* @checkstyle MultipleStringLiteralsCheck (500 lines)
* @checkstyle ClassDataAbstractionCouplingCheck (500 lines)
* @link <a href="http://www.w3.org/TR/html401/interact/forms.html">Forms in HTML</a>
*/
@SuppressWarnings("PMD.TooManyMethods")
public final class RqMultipartTest {
/**
* Carriage return constant.
*/
private static final String CRLF = "\r\n";
/**
* Content disposition.
*/
private static final String DISPOSITION = "Content-Disposition";
/**
* RqMultipart.Base can satisfy equals contract.
* @throws IOException if some problem inside
*/
@Test
public void satisfiesEqualsContract() throws IOException {
final Request req = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "449 N Wolfe Rd, Sunnyvale, CA 94085"),
RqMultipartTest.DISPOSITION, "form-data; name=\"t-1\""
),
new RqWithHeader(
new RqFake("", "", ""),
RqMultipartTest.DISPOSITION,
"form-data; name=\"data\"; filename=\"a.bin\""
)
);
MatcherAssert.assertThat(
new RqMultipart.Base(req),
Matchers.equalTo(new RqMultipart.Base(req))
);
}
/**
* RqMultipart.Base can throw exception on no closing boundary found.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnNoClosingBoundaryFound() throws IOException {
new RqMultipart.Base(
new RqFake(
Arrays.asList(
"POST /h?a=4 HTTP/1.1",
"Host: rtw.example.com",
"Content-Type: multipart/form-data; boundary=AaB01x",
"Content-Length: 100007"
),
Joiner.on(RqMultipartTest.CRLF).join(
"--AaB01x",
"Content-Disposition: form-data; fake=\"t2\"",
"",
"447 N Wolfe Rd, Sunnyvale, CA 94085",
"Content-Transfer-Encoding: uwf-8"
)
)
);
}
/**
* RqMultipart.Fake can throw exception on no name
* at Content-Disposition header.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnNoNameAtContentDispositionHeader()
throws IOException {
new RqMultipart.Fake(
new RqWithHeader(
new RqFake("", "", "340 N Wolfe Rd, Sunnyvale, CA 94085"),
RqMultipartTest.DISPOSITION, "form-data; fake=\"t-3\""
)
);
}
/**
* RqMultipart.Base can throw exception on no boundary
* at Content-Type header.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnNoBoundaryAtContentTypeHeader()
throws IOException {
new RqMultipart.Base(
new RqFake(
Arrays.asList(
"POST /h?s=3 HTTP/1.1",
"Host: wwo.example.com",
"Content-Type: multipart/form-data; boundaryAaB03x",
"Content-Length: 100005"
),
""
)
);
}
/**
* RqMultipart.Base can throw exception on invalid Content-Type header.
* @throws IOException if some problem inside
*/
@Test(expected = IOException.class)
public void throwsExceptionOnInvalidContentTypeHeader() throws IOException {
new RqMultipart.Base(
new RqFake(
Arrays.asList(
"POST /h?r=3 HTTP/1.1",
"Host: www.example.com",
"Content-Type: multipart; boundary=AaB03x",
"Content-Length: 100004"
),
""
)
);
}
/**
* RqMultipart.Base can parse http body.
* @throws IOException If some problem inside
*/
@Test
public void parsesHttpBody() throws IOException {
final RqMultipart multi = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "40 N Wolfe Rd, Sunnyvale, CA 94085"),
DISPOSITION, "form-data; name=\"t4\""
),
new RqWithHeader(
new RqFake("", "", ""),
DISPOSITION,
"form-data; name=\"data\"; filename=\"a.bin\""
)
);
MatcherAssert.assertThat(
new RqHeaders.Base(
multi.part("t4").iterator().next()
).header(DISPOSITION),
Matchers.hasItem("form-data; name=\"t4\"")
);
MatcherAssert.assertThat(
new RqPrint(
new RqHeaders.Base(
multi.part("t4").iterator().next()
)
).printBody(),
Matchers.allOf(
Matchers.startsWith("40 N"),
Matchers.endsWith("CA 94085")
)
);
}
/**
* RqMultipart.Fake can return empty iterator on invalid part request.
* @throws IOException If some problem inside
*/
@Test
public void returnsEmptyIteratorOnInvalidPartRequest() throws IOException {
final RqMultipart multi = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "443 N Wolfe Rd, Sunnyvale, CA 94085"),
DISPOSITION, "form-data; name=\"t5\""
),
new RqWithHeader(
new RqFake("", "", ""),
DISPOSITION,
"form-data; name=\"data\"; filename=\"a.zip\""
)
);
MatcherAssert.assertThat(
multi.part("fake").iterator().hasNext(),
Matchers.is(false)
);
}
/**
* RqMultipart.Fake can return correct name set.
* @throws IOException If some problem inside
*/
@Test
public void returnsCorrectNamesSet() throws IOException {
final RqMultipart multi = new RqMultipart.Fake(
new RqFake(),
new RqWithHeader(
new RqFake("", "", "441 N Wolfe Rd, Sunnyvale, CA 94085"),
DISPOSITION, "form-data; name=\"address\""
),
new RqWithHeader(
new RqFake("", "", ""),
DISPOSITION,
"form-data; name=\"data\"; filename=\"a.bin\""
)
);
MatcherAssert.assertThat(
multi.names(),
Matchers.<Iterable<String>>equalTo(
new HashSet<String>(Arrays.asList("address", "data"))
)
);
}
/**
* RqMultipart.Base can return correct part length.
* @throws IOException If some problem inside
*/
@Test
public void returnsCorrectPartLength() throws IOException {
final int length = 5000;
final Request req = new RqFake(
Arrays.asList(
"POST /post?u=3 HTTP/1.1",
"Host: www.example.com",
"Content-Type: multipart/form-data; boundary=zzz"
),
Joiner.on(RqMultipartTest.CRLF).join(
"--zzz",
"Content-Disposition: form-data; name=\"x-1\"",
"",
StringUtils.repeat("X", length),
"--zzz--"
)
);
MatcherAssert.assertThat(
new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("x-1").body().available(),
Matchers.equalTo(length)
);
}
/**
* RqMultipart.Base can work in integration mode.
* @throws IOException if some problem inside
*/
@Test
public void consumesHttpRequest() throws IOException {
final Take take = new Take() {
@Override
public Response act(final Request req) throws IOException {
return new RsText(
new RqPrint(
new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("f-1")
).printBody()
);
}
};
new FtRemote(take).exec(
// @checkstyle AnonInnerLengthCheck (50 lines)
new FtRemote.Script() {
@Override
public void exec(final URI home) throws IOException {
new JdkRequest(home)
.method("POST")
.header(
"Content-Type",
"multipart/form-data; boundary=AaB0zz"
)
.body()
.set(
Joiner.on(RqMultipartTest.CRLF).join(
"--AaB0zz",
"Content-Disposition: form-data; name=\"f-1\"",
"",
"my picture",
"--AaB0zz--"
)
)
.back()
.fetch()
.as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.assertBody(Matchers.containsString("pic"));
}
}
);
}
/**
* RqMultipart.Base can handle a big request in an acceptable time.
* @throws IOException If some problem inside
*/
@Test
public void handlesRequestInTime() throws IOException {
final int length = 100000000;
final File temp = File.createTempFile("handlesRequestInTime", ".tmp");
final BufferedWriter bwr = new BufferedWriter(new FileWriter(temp));
bwr.write(
Joiner.on(RqMultipartTest.CRLF).join(
"--zzz",
"Content-Disposition: form-data; name=\"test\"",
"",
""
)
);
for (int ind = 0; ind < length; ++ind) {
bwr.write("X");
}
bwr.write(RqMultipartTest.CRLF);
bwr.write("--zzz--");
bwr.write(RqMultipartTest.CRLF);
bwr.close();
final long start = System.currentTimeMillis();
final Request req = new RqFake(
Arrays.asList(
"POST /post?u=3 HTTP/1.1",
"Host: example.com",
"Content-Type: multipart/form-data; boundary=zzz"
),
new FileInputStream(temp)
);
MatcherAssert.assertThat(
new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("test").body().available(),
Matchers.equalTo(length)
);
MatcherAssert.assertThat(
System.currentTimeMillis() - start,
//@checkstyle MagicNumberCheck (1 line)
Matchers.lessThan(3000L)
);
temp.delete();
}
/**
* RqMultipart.Base doesn't distort the content.
* @throws IOException If some problem inside
*/
@Test
public void notDistortContent() throws IOException {
final int length = 1000000;
final File temp = File.createTempFile("notDistortContent", ".tmp");
final BufferedWriter bwr = new BufferedWriter(new FileWriter(temp));
bwr.write(
Joiner.on(RqMultipartTest.CRLF).join(
"--zzz1",
"Content-Disposition: form-data; name=\"test1\"",
"",
""
)
);
final int byt = 0x7f;
for (int idx = 0; idx < length; ++idx) {
bwr.write(idx % byt);
}
bwr.write(RqMultipartTest.CRLF);
bwr.write("--zzz1--");
bwr.write(RqMultipartTest.CRLF);
bwr.close();
final Request req = new RqFake(
Arrays.asList(
"POST /post?u=3 HTTP/1.1",
"Host: exampl.com",
"Content-Type: multipart/form-data; boundary=zzz1"
),
new FileInputStream(temp)
);
final InputStream stream = new RqMultipart.Smart(
new RqMultipart.Base(req)
).single("test1").body();
MatcherAssert.assertThat(
stream.available(),
Matchers.equalTo(length)
);
for (int idx = 0; idx < length; ++idx) {
MatcherAssert.assertThat(
String.format("byte %d not matched", idx),
stream.read(),
Matchers.equalTo(idx % byt)
);
}
temp.delete();
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.plugins;
import com.intellij.AbstractBundle;
import com.intellij.CommonBundle;
import com.intellij.diagnostic.PluginException;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.components.ComponentConfig;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionsArea;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.extensions.impl.ExtensionsAreaImpl;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.NullableLazyValue;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.containers.StringInterner;
import com.intellij.util.xmlb.JDOMXIncluder;
import com.intellij.util.xmlb.XmlSerializer;
import gnu.trove.THashMap;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.*;
/**
* @author mike
*/
public class IdeaPluginDescriptorImpl implements IdeaPluginDescriptor {
public static final IdeaPluginDescriptorImpl[] EMPTY_ARRAY = new IdeaPluginDescriptorImpl[0];
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.plugins.PluginDescriptor");
private final NullableLazyValue<String> myDescription = new NullableLazyValue<String>() {
@Override
protected String compute() {
return computeDescription();
}
};
private String myName;
private PluginId myId;
private String myResourceBundleBaseName;
private String myChangeNotes;
private String myVersion;
private String myVendor;
private String myVendorEmail;
private String myVendorUrl;
private String myVendorLogoPath;
private String myCategory;
private String url;
private File myPath;
private PluginId[] myDependencies = PluginId.EMPTY_ARRAY;
private PluginId[] myOptionalDependencies = PluginId.EMPTY_ARRAY;
private Map<PluginId, String> myOptionalConfigs;
private Map<PluginId, IdeaPluginDescriptorImpl> myOptionalDescriptors;
@Nullable private List<Element> myActionsElements;
private ComponentConfig[] myAppComponents;
private ComponentConfig[] myProjectComponents;
private ComponentConfig[] myModuleComponents;
private boolean myDeleted;
private ClassLoader myLoader;
private HelpSetPath[] myHelpSets;
@Nullable private MultiMap<String, Element> myExtensions;
@Nullable private MultiMap<String, Element> myExtensionsPoints;
private String myDescriptionChildText;
private String myDownloadCounter;
private long myDate;
private boolean myUseIdeaClassLoader;
private boolean myUseCoreClassLoader;
private boolean myEnabled = true;
private String mySinceBuild;
private String myUntilBuild;
private Boolean mySkipped;
private List<String> myModules;
public IdeaPluginDescriptorImpl(@NotNull File pluginPath) {
myPath = pluginPath;
}
/**
* @deprecated
* use {@link com.intellij.util.containers.StringInterner#intern(Object)} directly instead
*/
@NotNull
@Deprecated
public static String intern(@NotNull String s) {
return s;
}
/**
* @deprecated
* use {@link com.intellij.openapi.util.JDOMUtil#internElement(org.jdom.Element, com.intellij.util.containers.StringInterner)}
*/
@SuppressWarnings("unused")
@Deprecated
public static void internJDOMElement(@NotNull Element rootElement) {
}
@Nullable
private static List<Element> copyElements(@Nullable Element[] elements, StringInterner interner) {
if (elements == null || elements.length == 0) {
return null;
}
List<Element> result = new SmartList<Element>();
for (Element extensionsRoot : elements) {
for (Element element : extensionsRoot.getChildren()) {
JDOMUtil.internElement(element, interner);
result.add(element);
}
}
return result;
}
@SuppressWarnings("HardCodedStringLiteral")
private static String createDescriptionKey(final PluginId id) {
return "plugin." + id + ".description";
}
private static ComponentConfig[] mergeComponents(ComponentConfig[] first, ComponentConfig[] second) {
if (first == null) {
return second;
}
if (second == null) {
return first;
}
return ArrayUtil.mergeArrays(first, second);
}
@Override
public File getPath() {
return myPath;
}
public void setPath(@NotNull File path) {
myPath = path;
}
public void readExternal(@NotNull Document document, @NotNull URL url) throws InvalidDataException, FileNotFoundException {
Application application = ApplicationManager.getApplication();
readExternal(document, url, application != null && application.isUnitTestMode());
}
public void readExternal(@NotNull Document document, @NotNull URL url, boolean ignoreMissingInclude) throws InvalidDataException, FileNotFoundException {
document = JDOMXIncluder.resolve(document, url.toExternalForm(), ignoreMissingInclude);
Element rootElement = document.getRootElement();
JDOMUtil.internElement(rootElement, new StringInterner());
readExternal(document.getRootElement());
}
public void readExternal(@NotNull URL url) throws InvalidDataException, FileNotFoundException {
try {
Document document = JDOMUtil.loadDocument(url);
readExternal(document, url);
}
catch (FileNotFoundException e) {
throw e;
}
catch (IOException e) {
throw new InvalidDataException(e);
}
catch (JDOMException e) {
throw new InvalidDataException(e);
}
}
// used in upsource
protected void readExternal(@NotNull Element element) {
final PluginBean pluginBean = XmlSerializer.deserialize(element, PluginBean.class);
url = pluginBean.url;
myName = pluginBean.name;
String idString = pluginBean.id;
if (idString == null || idString.isEmpty()) {
idString = myName;
}
myId = idString == null ? null : PluginId.getId(idString);
String internalVersionString = pluginBean.formatVersion;
if (internalVersionString != null) {
try {
//noinspection ResultOfMethodCallIgnored
Integer.parseInt(internalVersionString);
}
catch (NumberFormatException e) {
LOG.error(new PluginException("Invalid value in plugin.xml format version: '" + internalVersionString + "'", e, myId));
}
}
myUseIdeaClassLoader = pluginBean.useIdeaClassLoader;
if (pluginBean.ideaVersion != null) {
mySinceBuild = pluginBean.ideaVersion.sinceBuild;
myUntilBuild = pluginBean.ideaVersion.untilBuild;
}
myResourceBundleBaseName = pluginBean.resourceBundle;
myDescriptionChildText = pluginBean.description;
myChangeNotes = pluginBean.changeNotes;
myVersion = pluginBean.pluginVersion;
myCategory = pluginBean.category;
if (pluginBean.vendor != null) {
myVendor = pluginBean.vendor.name;
myVendorEmail = pluginBean.vendor.email;
myVendorUrl = pluginBean.vendor.url;
myVendorLogoPath = pluginBean.vendor.logo;
}
// preserve items order as specified in xml (filterBadPlugins will not fail if module comes first)
Set<PluginId> dependentPlugins = new LinkedHashSet<PluginId>();
Set<PluginId> optionalDependentPlugins = new LinkedHashSet<PluginId>();
if (pluginBean.dependencies != null) {
myOptionalConfigs = new THashMap<PluginId, String>();
for (PluginDependency dependency : pluginBean.dependencies) {
String text = dependency.pluginId;
if (!StringUtil.isEmpty(text)) {
PluginId id = PluginId.getId(text);
dependentPlugins.add(id);
if (dependency.optional) {
optionalDependentPlugins.add(id);
if (!StringUtil.isEmpty(dependency.configFile)) {
myOptionalConfigs.put(id, dependency.configFile);
}
}
}
}
}
myDependencies = dependentPlugins.isEmpty() ? PluginId.EMPTY_ARRAY : dependentPlugins.toArray(new PluginId[dependentPlugins.size()]);
myOptionalDependencies = optionalDependentPlugins.isEmpty() ? PluginId.EMPTY_ARRAY : optionalDependentPlugins.toArray(new PluginId[optionalDependentPlugins.size()]);
if (pluginBean.helpSets == null || pluginBean.helpSets.length == 0) {
myHelpSets = HelpSetPath.EMPTY;
}
else {
myHelpSets = new HelpSetPath[pluginBean.helpSets.length];
PluginHelpSet[] sets = pluginBean.helpSets;
for (int i = 0, n = sets.length; i < n; i++) {
PluginHelpSet pluginHelpSet = sets[i];
myHelpSets[i] = new HelpSetPath(pluginHelpSet.file, pluginHelpSet.path);
}
}
myAppComponents = pluginBean.applicationComponents;
myProjectComponents = pluginBean.projectComponents;
myModuleComponents = pluginBean.moduleComponents;
if (myAppComponents == null) myAppComponents = ComponentConfig.EMPTY_ARRAY;
if (myProjectComponents == null) myProjectComponents = ComponentConfig.EMPTY_ARRAY;
if (myModuleComponents == null) myModuleComponents = ComponentConfig.EMPTY_ARRAY;
StringInterner interner = new StringInterner();
List<Element> extensions = copyElements(pluginBean.extensions, interner);
if (extensions != null) {
myExtensions = MultiMap.createSmart();
for (Element extension : extensions) {
myExtensions.putValue(ExtensionsAreaImpl.extractEPName(extension), extension);
}
}
List<Element> extensionPoints = copyElements(pluginBean.extensionPoints, interner);
if (extensionPoints != null) {
myExtensionsPoints = MultiMap.createSmart();
for (Element extensionPoint : extensionPoints) {
myExtensionsPoints.putValue(StringUtil.notNullize(extensionPoint.getAttributeValue(ExtensionsAreaImpl.ATTRIBUTE_AREA)), extensionPoint);
}
}
myActionsElements = copyElements(pluginBean.actions, interner);
if (pluginBean.modules != null && !pluginBean.modules.isEmpty()) {
myModules = pluginBean.modules;
}
}
// made public for Upsource
public void registerExtensionPoints(@NotNull ExtensionsArea area) {
if (myExtensionsPoints != null) {
for (Element element : myExtensionsPoints.get(StringUtil.notNullize(area.getAreaClass()))) {
area.registerExtensionPoint(this, element);
}
}
}
// made public for Upsource
public void registerExtensions(@NotNull ExtensionsArea area, @NotNull String epName) {
if (myExtensions != null) {
for (Element element : myExtensions.get(epName)) {
area.registerExtension(this, element);
}
}
}
@Override
public String getDescription() {
return myDescription.getValue();
}
@Override
public String getChangeNotes() {
return myChangeNotes;
}
@Override
public String getName() {
return myName;
}
@Override
@NotNull
public PluginId[] getDependentPluginIds() {
return myDependencies;
}
@Override
@NotNull
public PluginId[] getOptionalDependentPluginIds() {
return myOptionalDependencies;
}
@Override
public String getVendor() {
return myVendor;
}
public void setVendor( final String val )
{
myVendor = val;
}
@Override
public String getVersion() {
return myVersion;
}
@Override
public String getResourceBundleBaseName() {
return myResourceBundleBaseName;
}
@Override
public String getCategory() {
return myCategory;
}
/*
This setter was explicitly defined to be able to set a category for a
descriptor outside its loading from the xml file.
Problem was that most commonly plugin authors do not publish the plugin's
category in its .xml file so to be consistent in plugins representation
(e.g. in the Plugins form) we have to set this value outside.
*/
public void setCategory( String category ){
myCategory = category;
}
@SuppressWarnings("UnusedDeclaration") // Used in Upsource
@Nullable
public MultiMap<String, Element> getExtensionsPoints() {
return myExtensionsPoints;
}
@SuppressWarnings("UnusedDeclaration") // Used in Upsource
@Nullable
public MultiMap<String, Element> getExtensions() {
return myExtensions;
}
@SuppressWarnings("HardCodedStringLiteral")
@NotNull
public List<File> getClassPath() {
if (myPath.isDirectory()) {
final List<File> result = new ArrayList<File>();
final File classesDir = new File(myPath, "classes");
if (classesDir.exists()) {
result.add(classesDir);
}
final File[] files = new File(myPath, "lib").listFiles();
if (files != null && files.length > 0) {
for (final File f : files) {
if (f.isFile()) {
final String name = f.getName();
if (StringUtil.endsWithIgnoreCase(name, ".jar") || StringUtil.endsWithIgnoreCase(name, ".zip")) {
result.add(f);
}
}
else {
result.add(f);
}
}
}
return result;
}
else {
return Collections.singletonList(myPath);
}
}
@Override
@Nullable
public List<Element> getActionsDescriptionElements() {
return myActionsElements;
}
@Override
@NotNull
public ComponentConfig[] getAppComponents() {
return myAppComponents;
}
@Override
@NotNull
public ComponentConfig[] getProjectComponents() {
return myProjectComponents;
}
@Override
@NotNull
public ComponentConfig[] getModuleComponents() {
return myModuleComponents;
}
@Override
public String getVendorEmail() {
return myVendorEmail;
}
public void setVendorEmail( final String val )
{
myVendorEmail = val;
}
@Override
public String getVendorUrl() {
return myVendorUrl;
}
public void setVendorUrl( final String val )
{
myVendorUrl = val;
}
@Override
public String getUrl() {
return url;
}
public void setUrl( final String val )
{
url = val;
}
@NonNls
public String toString() {
return "PluginDescriptor[name='" + myName + "', classpath='" + myPath + "']";
}
public boolean isDeleted() {
return myDeleted;
}
public void setDeleted(boolean deleted) {
myDeleted = deleted;
}
public void setLoader(ClassLoader loader) {
myLoader = loader;
}
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof IdeaPluginDescriptorImpl)) return false;
final IdeaPluginDescriptorImpl pluginDescriptor = (IdeaPluginDescriptorImpl)o;
return myName == null ? pluginDescriptor.myName == null : myName.equals(pluginDescriptor.myName);
}
public int hashCode() {
return myName != null ? myName.hashCode() : 0;
}
@Override
@NotNull
public HelpSetPath[] getHelpSets() {
return myHelpSets;
}
@Override
public PluginId getPluginId() {
return myId;
}
/*
This setter was explicitly defined to be able to set downloads count for a
descriptor outside its loading from the xml file since this information
is available only from the site.
*/
public void setDownloadsCount(String downloadsCount) {
myDownloadCounter = downloadsCount;
}
@Override
public String getDownloads(){
return myDownloadCounter;
}
public long getDate(){
return myDate;
}
/*
This setter was explicitly defined to be able to set date for a
descriptor outside its loading from the xml file since this information
is available only from the site.
*/
public void setDate( long date ){
myDate = date;
}
@Override
public ClassLoader getPluginClassLoader() {
return myLoader != null ? myLoader : getClass().getClassLoader();
}
@Override
public String getVendorLogoPath() {
return myVendorLogoPath;
}
public void setVendorLogoPath(final String vendorLogoPath) {
myVendorLogoPath = vendorLogoPath;
}
@Override
public boolean getUseIdeaClassLoader() {
return myUseIdeaClassLoader;
}
public boolean isUseCoreClassLoader() {
return myUseCoreClassLoader;
}
public void setUseCoreClassLoader(final boolean useCoreClassLoader) {
myUseCoreClassLoader = useCoreClassLoader;
}
private String computeDescription() {
ResourceBundle bundle = null;
if (myResourceBundleBaseName != null) {
try {
bundle = AbstractBundle.getResourceBundle(myResourceBundleBaseName, getPluginClassLoader());
}
catch (MissingResourceException e) {
LOG.info("Cannot find plugin " + myId + " resource-bundle: " + myResourceBundleBaseName);
}
}
if (bundle == null) {
return myDescriptionChildText;
}
return CommonBundle.messageOrDefault(bundle, createDescriptionKey(myId), myDescriptionChildText == null ? "" : myDescriptionChildText);
}
public void insertDependency(@NotNull IdeaPluginDescriptor d) {
PluginId[] deps = new PluginId[getDependentPluginIds().length + 1];
deps[0] = d.getPluginId();
System.arraycopy(myDependencies, 0, deps, 1, deps.length - 1);
myDependencies = deps;
}
@Override
public boolean isEnabled() {
return myEnabled;
}
@Override
public void setEnabled(final boolean enabled) {
myEnabled = enabled;
}
@Override
public String getSinceBuild() {
return mySinceBuild;
}
@Override
public String getUntilBuild() {
return myUntilBuild;
}
Map<PluginId, String> getOptionalConfigs() {
return myOptionalConfigs;
}
Map<PluginId, IdeaPluginDescriptorImpl> getOptionalDescriptors() {
return myOptionalDescriptors;
}
void setOptionalDescriptors(@NotNull Map<PluginId, IdeaPluginDescriptorImpl> optionalDescriptors) {
myOptionalDescriptors = optionalDescriptors;
}
void mergeOptionalConfig(final IdeaPluginDescriptorImpl descriptor) {
if (myExtensions == null) {
myExtensions = descriptor.myExtensions;
}
else if (descriptor.myExtensions != null) {
myExtensions.putAllValues(descriptor.myExtensions);
}
if (myExtensionsPoints == null) {
myExtensionsPoints = descriptor.myExtensionsPoints;
}
else if (descriptor.myExtensionsPoints != null) {
myExtensionsPoints.putAllValues(descriptor.myExtensionsPoints);
}
if (myActionsElements == null) {
myActionsElements = descriptor.myActionsElements;
}
else if (descriptor.myActionsElements != null) {
myActionsElements.addAll(descriptor.myActionsElements);
}
myAppComponents = mergeComponents(myAppComponents, descriptor.myAppComponents);
myProjectComponents = mergeComponents(myProjectComponents, descriptor.myProjectComponents);
myModuleComponents = mergeComponents(myModuleComponents, descriptor.myModuleComponents);
}
public Boolean getSkipped() {
return mySkipped;
}
public void setSkipped(final Boolean skipped) {
mySkipped = skipped;
}
@Override
public boolean isBundled() {
if (PluginManagerCore.CORE_PLUGIN_ID.equals(myId.getIdString())) {
return true;
}
String path;
try {
//to avoid paths like this /home/kb/IDEA/bin/../config/plugins/APlugin
path = getPath().getCanonicalPath();
} catch (IOException e) {
path = getPath().getAbsolutePath();
}
if (ApplicationManager.getApplication() != null && ApplicationManager.getApplication().isInternal()) {
if (path.startsWith(PathManager.getHomePath() + File.separator + "out" + File.separator + "classes")) {
return true;
}
}
return path.startsWith(PathManager.getPreInstalledPluginsPath());
}
@Nullable
public List<String> getModules() {
return myModules;
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.inbandtelemetry.api;
import com.google.common.annotations.Beta;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.packet.TpPort;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Network-level INT configuration.
*/
@Beta
public final class IntConfig {
/**
* Represents a type of telemetry spec to collect in the dataplane.
*/
public enum TelemetrySpec {
/**
* Embeds telemetry metadata according to the INT specification.
*
* @see <a href="https://github.com/p4lang/p4-applications/blob/master/docs/INT.pdf">
* INT sepcification</a>
*/
INT,
/**
* Embeds telemetry metadata according to the OAM specification.
*
* @see <a href="https://tools.ietf.org/html/draft-ietf-ippm-ioam-data">
* Data fields for In-situ OAM</a>
*/
IOAM
}
private final IpAddress collectorIp;
private final TpPort collectorPort;
private final MacAddress collectorNextHopMac;
private final IpAddress sinkIp;
private final MacAddress sinkMac;
private final TelemetrySpec spec;
private boolean enabled;
private IntConfig(IpAddress collectorIp, TpPort collectorPort, MacAddress collectorNextHopMac,
IpAddress sinkIp, MacAddress sinkMac, TelemetrySpec spec, boolean enabled) {
this.collectorIp = collectorIp;
this.collectorPort = collectorPort;
this.collectorNextHopMac = collectorNextHopMac;
this.sinkIp = sinkIp;
this.sinkMac = sinkMac;
this.spec = spec;
this.enabled = enabled;
}
/**
* Returns IP address of the collector.
* This is the destination IP address that will be used for all INT reports
* generated by all sink devices.
*
* @return collector IP address
*/
public IpAddress collectorIp() {
return collectorIp;
}
/**
* Returns UDP port number of the collector.
* This is the destination UDP port number that will be used for all INT reports
* generated by all sink devices.
*
* @return collector UDP port number
*/
public TpPort collectorPort() {
return collectorPort;
}
/**
* Returns MAC address of next hop of INT report packets.
* This can be either MAC address of the collector or a router.
* This is an optional parameter, which means that the usage of this
* parameter depends on IntProgrammable implementation.
* (e.g., If a report packet needs to be routed to reach the collector,
* IntProgrammable will ignore this value and choose next hop router's MAC address.
* If a collector itself is the next hop of INT report packets, then
* this value will be used as a destination MAC address for all INT report packets.)
*
* @return MAC address of next hop of INT report packets
*/
public MacAddress collectorNextHopMac() {
return collectorNextHopMac;
}
/**
* Returns IP address of the sink device.
* All sink devices share this address as the source IP address
* for all INT reports.
*
* @return sink device's IP address
*/
public IpAddress sinkIp() {
return sinkIp;
}
/**
* Returns MAC address of the sink device.
* All sink devices share this address as the source MAC address
* for all INT reports.
*
* @return sink device's MAC address
*/
public MacAddress sinkMac() {
return sinkMac;
}
/**
* Returns the type of telemetry spec as per {@link TelemetrySpec}.
*
* @return telemetry spec
*/
public TelemetrySpec spec() {
return spec;
}
/**
* Returns the status of INT functionality.
*
* @return true if INT is enabled; false otherwise.
*/
public boolean enabled() {
return enabled;
}
/**
* Returns a new builder.
*
* @return new builder
*/
public static Builder builder() {
return new Builder();
}
/**
* An IntConfig object builder.
*/
public static final class Builder {
private IpAddress collectorIp;
private TpPort collectorPort;
private MacAddress collectorNextHopMac;
private IpAddress sinkIp;
private MacAddress sinkMac;
private TelemetrySpec spec = TelemetrySpec.INT;
private boolean enabled = false;
/**
* Assigns a collector IP address to the IntConfig object.
*
* @param collectorIp IP address of the collector
* @return an IntConfig builder
*/
public IntConfig.Builder withCollectorIp(IpAddress collectorIp) {
this.collectorIp = collectorIp;
return this;
}
/**
* Assigns a collector UDP port to the IntConfig object.
*
* @param collectorPort UDP port number of the collector
* @return an IntConfig builder
*/
public IntConfig.Builder withCollectorPort(TpPort collectorPort) {
this.collectorPort = collectorPort;
return this;
}
/**
* Assigns a MAC address of the next hop to the collector
* to the IntConfig object.
*
* @param collectorNextHopMac MAC address of the collector
* @return an IntConfig builder
*/
public IntConfig.Builder withCollectorNextHopMac(MacAddress collectorNextHopMac) {
this.collectorNextHopMac = collectorNextHopMac;
return this;
}
/**
* Assigns an IP address of the sink device to the IntConfig object.
*
* @param sinkIp sink device's IP address
* @return an IntConfig builder
*/
public IntConfig.Builder withSinkIp(IpAddress sinkIp) {
this.sinkIp = sinkIp;
return this;
}
/**
* Assigns a MAC address of the sink device to the IntConfig object.
*
* @param sinkMac sink device's MAC address
* @return an IntConfig builder
*/
public IntConfig.Builder withSinkMac(MacAddress sinkMac) {
this.sinkMac = sinkMac;
return this;
}
/**
* Assigns the type of telemetry spec to the IntConfig object.
*
* @param spec telemetry spec
* @return an IntConfig builder
*/
public IntConfig.Builder withTelemetrySpec(TelemetrySpec spec) {
this.spec = spec;
return this;
}
/**
* Assigns the status of INT.
* True to enable INT functionality, false otherwise.
*
* @param enabled the status of INT
* @return an IntConfig builder
*/
public IntConfig.Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Builds the IntConfig object.
*
* @return an IntConfig object
*/
public IntConfig build() {
checkNotNull(collectorIp, "Collector IP should be specified.");
checkNotNull(collectorPort, "Collector port number should be specified.");
checkNotNull(collectorNextHopMac, "Next hop MAC address for report packets should be provided.");
checkNotNull(sinkIp, "Sink IP address for report packets should be specified.");
checkNotNull(sinkMac, "Sink MAC address for report packets should be specified.");
return new IntConfig(collectorIp, collectorPort, collectorNextHopMac,
sinkIp, sinkMac, spec, enabled);
}
}
}
| |
package com.betfair.publicapi.types.exchange.v5;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for RacingSilkV2 complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="RacingSilkV2">
* <complexContent>
* <extension base="{http://www.betfair.com/publicapi/types/exchange/v5/}RacingSilk">
* <sequence>
* <element name="ownerName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="jockeyName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="colour" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="sex" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="bred" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="forecastPriceNumerator" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="forecastPriceDenominator" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="officialRating" type="{http://www.w3.org/2001/XMLSchema}int"/>
* <element name="sire" type="{http://www.betfair.com/publicapi/types/exchange/v5/}Breeding"/>
* <element name="dam" type="{http://www.betfair.com/publicapi/types/exchange/v5/}Breeding"/>
* <element name="damSire" type="{http://www.betfair.com/publicapi/types/exchange/v5/}Breeding"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "RacingSilkV2", propOrder = {
"ownerName",
"jockeyName",
"colour",
"sex",
"bred",
"forecastPriceNumerator",
"forecastPriceDenominator",
"officialRating",
"sire",
"dam",
"damSire"
})
public class RacingSilkV2
extends RacingSilk
{
@XmlElement(required = true, nillable = true)
protected String ownerName;
@XmlElement(required = true, nillable = true)
protected String jockeyName;
@XmlElement(required = true, nillable = true)
protected String colour;
@XmlElement(required = true, nillable = true)
protected String sex;
@XmlElement(required = true, nillable = true)
protected String bred;
protected int forecastPriceNumerator;
protected int forecastPriceDenominator;
protected int officialRating;
@XmlElement(required = true, nillable = true)
protected Breeding sire;
@XmlElement(required = true, nillable = true)
protected Breeding dam;
@XmlElement(required = true, nillable = true)
protected Breeding damSire;
/**
* Gets the value of the ownerName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOwnerName() {
return ownerName;
}
/**
* Sets the value of the ownerName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOwnerName(String value) {
this.ownerName = value;
}
/**
* Gets the value of the jockeyName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getJockeyName() {
return jockeyName;
}
/**
* Sets the value of the jockeyName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setJockeyName(String value) {
this.jockeyName = value;
}
/**
* Gets the value of the colour property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getColour() {
return colour;
}
/**
* Sets the value of the colour property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setColour(String value) {
this.colour = value;
}
/**
* Gets the value of the sex property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSex() {
return sex;
}
/**
* Sets the value of the sex property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSex(String value) {
this.sex = value;
}
/**
* Gets the value of the bred property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBred() {
return bred;
}
/**
* Sets the value of the bred property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBred(String value) {
this.bred = value;
}
/**
* Gets the value of the forecastPriceNumerator property.
*
*/
public int getForecastPriceNumerator() {
return forecastPriceNumerator;
}
/**
* Sets the value of the forecastPriceNumerator property.
*
*/
public void setForecastPriceNumerator(int value) {
this.forecastPriceNumerator = value;
}
/**
* Gets the value of the forecastPriceDenominator property.
*
*/
public int getForecastPriceDenominator() {
return forecastPriceDenominator;
}
/**
* Sets the value of the forecastPriceDenominator property.
*
*/
public void setForecastPriceDenominator(int value) {
this.forecastPriceDenominator = value;
}
/**
* Gets the value of the officialRating property.
*
*/
public int getOfficialRating() {
return officialRating;
}
/**
* Sets the value of the officialRating property.
*
*/
public void setOfficialRating(int value) {
this.officialRating = value;
}
/**
* Gets the value of the sire property.
*
* @return
* possible object is
* {@link Breeding }
*
*/
public Breeding getSire() {
return sire;
}
/**
* Sets the value of the sire property.
*
* @param value
* allowed object is
* {@link Breeding }
*
*/
public void setSire(Breeding value) {
this.sire = value;
}
/**
* Gets the value of the dam property.
*
* @return
* possible object is
* {@link Breeding }
*
*/
public Breeding getDam() {
return dam;
}
/**
* Sets the value of the dam property.
*
* @param value
* allowed object is
* {@link Breeding }
*
*/
public void setDam(Breeding value) {
this.dam = value;
}
/**
* Gets the value of the damSire property.
*
* @return
* possible object is
* {@link Breeding }
*
*/
public Breeding getDamSire() {
return damSire;
}
/**
* Sets the value of the damSire property.
*
* @param value
* allowed object is
* {@link Breeding }
*
*/
public void setDamSire(Breeding value) {
this.damSire = value;
}
}
| |
/*
* Copyright (c) 2009-2017 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.shader;
import com.jme3.math.*;
import com.jme3.util.BufferUtils;
import com.jme3.util.TempVars;
import java.nio.*;
public class Uniform extends ShaderVariable {
private static final Integer ZERO_INT = 0;
private static final Float ZERO_FLT = Float.valueOf(0);
private static final FloatBuffer ZERO_BUF = BufferUtils.createFloatBuffer(4*4);
/**
* Currently set value of the uniform.
*/
protected Object value = null;
/**
* For arrays or matrices, efficient format
* that can be sent to GL faster.
*/
protected FloatBuffer multiData = null;
/**
* Type of uniform
*/
protected VarType varType;
/**
* Binding to a renderer value, or null if user-defined uniform
*/
protected UniformBinding binding;
/**
* Used to track which uniforms to clear to avoid
* values leaking from other materials that use that shader.
*/
protected boolean setByCurrentMaterial = false;
@Override
public int hashCode() {
int hash = 5;
hash = 31 * hash + (this.value != null ? this.value.hashCode() : 0);
hash = 31 * hash + (this.varType != null ? this.varType.hashCode() : 0);
hash = 31 * hash + (this.binding != null ? this.binding.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
final Uniform other = (Uniform) obj;
if (this.value != other.value && (this.value == null || !this.value.equals(other.value))) {
return false;
}
return this.binding == other.binding && this.varType == other.varType;
}
@Override
public String toString(){
StringBuilder sb = new StringBuilder();
sb.append("Uniform[name=");
sb.append(name);
if (varType != null){
sb.append(", type=");
sb.append(varType);
sb.append(", value=");
sb.append(value);
}else{
sb.append(", value=<not set>");
}
sb.append("]");
return sb.toString();
}
public void setBinding(UniformBinding binding){
this.binding = binding;
}
public UniformBinding getBinding(){
return binding;
}
public VarType getVarType() {
return varType;
}
public Object getValue(){
return value;
}
public FloatBuffer getMultiData() {
return multiData;
}
public boolean isSetByCurrentMaterial() {
return setByCurrentMaterial;
}
public void clearSetByCurrentMaterial(){
setByCurrentMaterial = false;
}
public void clearValue(){
updateNeeded = true;
if (multiData != null){
multiData.clear();
while (multiData.remaining() > 0){
ZERO_BUF.clear();
ZERO_BUF.limit( Math.min(multiData.remaining(), 16) );
multiData.put(ZERO_BUF);
}
multiData.clear();
return;
}
if (varType == null) {
return;
}
switch (varType){
case Int:
this.value = ZERO_INT;
break;
case Boolean:
this.value = Boolean.FALSE;
break;
case Float:
this.value = ZERO_FLT;
break;
case Vector2:
if (this.value != null) {
((Vector2f) this.value).set(Vector2f.ZERO);
}
break;
case Vector3:
if (this.value != null) {
((Vector3f) this.value).set(Vector3f.ZERO);
}
break;
case Vector4:
if (this.value != null) {
if (this.value instanceof ColorRGBA) {
((ColorRGBA) this.value).set(ColorRGBA.BlackNoAlpha);
} else if (this.value instanceof Vector4f) {
((Vector4f) this.value).set(Vector4f.ZERO);
} else {
((Quaternion) this.value).set(Quaternion.ZERO);
}
}
break;
default:
// won't happen because those are either textures
// or multidata types
}
}
public void setValue(VarType type, Object value){
if (location == LOC_NOT_DEFINED) {
return;
}
if (varType != null && varType != type) {
throw new IllegalArgumentException("Expected a " + varType.name() + " value!");
}
if (value == null) {
throw new IllegalArgumentException("for uniform " + name + ": value cannot be null");
}
setByCurrentMaterial = true;
switch (type){
case Matrix3:
if (value.equals(this.value)) {
return;
}
Matrix3f m3 = (Matrix3f) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(9);
}
m3.fillFloatBuffer(multiData, true);
multiData.clear();
if (this.value == null) {
this.value = new Matrix3f(m3);
} else {
((Matrix3f)this.value).set(m3);
}
break;
case Matrix4:
if (value.equals(this.value)) {
return;
}
Matrix4f m4 = (Matrix4f) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(16);
}
m4.fillFloatBuffer(multiData, true);
multiData.clear();
if (this.value == null) {
this.value = new Matrix4f(m4);
} else {
((Matrix4f)this.value).copy(m4);
}
break;
case IntArray:
int[] ia = (int[]) value;
if (this.value == null) {
this.value = BufferUtils.createIntBuffer(ia);
} else {
this.value = BufferUtils.ensureLargeEnough((IntBuffer)this.value, ia.length);
}
((IntBuffer)this.value).clear();
break;
case FloatArray:
float[] fa = (float[]) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(fa);
} else {
multiData = BufferUtils.ensureLargeEnough(multiData, fa.length);
}
multiData.put(fa);
multiData.clear();
break;
case Vector2Array:
Vector2f[] v2a = (Vector2f[]) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(v2a);
} else {
multiData = BufferUtils.ensureLargeEnough(multiData, v2a.length * 2);
}
for (int i = 0; i < v2a.length; i++) {
BufferUtils.setInBuffer(v2a[i], multiData, i);
}
multiData.clear();
break;
case Vector3Array:
Vector3f[] v3a = (Vector3f[]) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(v3a);
} else {
multiData = BufferUtils.ensureLargeEnough(multiData, v3a.length * 3);
}
for (int i = 0; i < v3a.length; i++) {
BufferUtils.setInBuffer(v3a[i], multiData, i);
}
multiData.clear();
break;
case Vector4Array:
Vector4f[] v4a = (Vector4f[]) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(v4a);
} else {
multiData = BufferUtils.ensureLargeEnough(multiData, v4a.length * 4);
}
for (int i = 0; i < v4a.length; i++) {
BufferUtils.setInBuffer(v4a[i], multiData, i);
}
multiData.clear();
break;
case Matrix3Array:
Matrix3f[] m3a = (Matrix3f[]) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(m3a.length * 9);
} else {
multiData = BufferUtils.ensureLargeEnough(multiData, m3a.length * 9);
}
for (int i = 0; i < m3a.length; i++) {
m3a[i].fillFloatBuffer(multiData, true);
}
multiData.clear();
break;
case Matrix4Array:
Matrix4f[] m4a = (Matrix4f[]) value;
if (multiData == null) {
multiData = BufferUtils.createFloatBuffer(m4a.length * 16);
} else {
multiData = BufferUtils.ensureLargeEnough(multiData, m4a.length * 16);
}
for (int i = 0; i < m4a.length; i++) {
m4a[i].fillFloatBuffer(multiData, true);
}
multiData.clear();
break;
case Vector2:
if (value.equals(this.value)) {
return;
}
if (this.value == null) {
this.value = new Vector2f((Vector2f) value);
} else {
((Vector2f) this.value).set((Vector2f) value);
}
break;
case Vector3:
if (value.equals(this.value)) {
return;
}
if (this.value == null) {
this.value = new Vector3f((Vector3f) value);
} else {
((Vector3f) this.value).set((Vector3f) value);
}
break;
case Vector4:
if (value.equals(this.value)) {
return;
}
TempVars vars = TempVars.get();
Vector4f vec4 = vars.vect4f1;
//handle the null case
if (this.value == null) {
try {
this.value = value.getClass().newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new IllegalArgumentException("Cannot instanciate param of class " + value.getClass().getCanonicalName());
}
}
//feed the pivot vec 4 with the correct value
if (value instanceof ColorRGBA) {
ColorRGBA c = (ColorRGBA) value;
vec4.set(c.r, c.g, c.b, c.a);
} else if (value instanceof Vector4f) {
vec4.set((Vector4f) value);
} else {
Quaternion q = (Quaternion) value;
vec4.set(q.getX(), q.getY(), q.getZ(), q.getW());
}
//feed this.value with the collected values.
if (this.value instanceof ColorRGBA) {
((ColorRGBA) this.value).set(vec4.x, vec4.y, vec4.z, vec4.w);
} else if (this.value instanceof Vector4f) {
((Vector4f) this.value).set(vec4);
} else {
((Quaternion) this.value).set(vec4.x, vec4.y, vec4.z, vec4.w);
}
vars.release();
break;
// Only use check if equals optimization for primitive values
case Int:
case Float:
case Boolean:
if (value.equals(this.value)) {
return;
}
this.value = value;
break;
default:
this.value = value;
break;
}
// if (multiData != null) {
// this.value = multiData;
// }
varType = type;
updateNeeded = true;
}
public void setVector4Length(int length){
if (location == -1) {
return;
}
multiData = BufferUtils.ensureLargeEnough(multiData, length * 4);
value = multiData;
varType = VarType.Vector4Array;
updateNeeded = true;
setByCurrentMaterial = true;
}
public void setVector4InArray(float x, float y, float z, float w, int index){
if (location == -1) {
return;
}
if (varType != null && varType != VarType.Vector4Array) {
throw new IllegalArgumentException("Expected a " + varType.name() + " value!");
}
multiData.position(index * 4);
multiData.put(x).put(y).put(z).put(w);
multiData.rewind();
updateNeeded = true;
setByCurrentMaterial = true;
}
public boolean isUpdateNeeded(){
return updateNeeded;
}
public void clearUpdateNeeded(){
updateNeeded = false;
}
public void reset(){
setByCurrentMaterial = false;
location = -2;
updateNeeded = true;
}
public void deleteNativeBuffers() {
if (value instanceof Buffer) {
BufferUtils.destroyDirectBuffer((Buffer)value);
value = null; // ????
}
}
}
| |
/*
* Copyright 2015-2016 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.eva.pipeline.jobs.steps.tasklets;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.opencb.biodata.models.variant.VariantSource;
import org.opencb.biodata.models.variant.stats.VariantSourceStats;
import org.opencb.biodata.models.variant.stats.VariantStats;
import org.opencb.datastore.core.ObjectMap;
import org.opencb.datastore.core.QueryOptions;
import org.opencb.datastore.core.QueryResult;
import org.opencb.datastore.core.config.DataStoreServerAddress;
import org.opencb.opencga.lib.auth.IllegalOpenCGACredentialsException;
import org.opencb.opencga.storage.core.variant.VariantStorageManager;
import org.opencb.opencga.storage.core.variant.adaptors.VariantDBAdaptor;
import org.opencb.opencga.storage.core.variant.io.json.VariantStatsJsonMixin;
import org.opencb.opencga.storage.core.variant.stats.VariantStatsWrapper;
import org.opencb.opencga.storage.mongodb.utils.MongoCredentials;
import org.opencb.opencga.storage.mongodb.variant.VariantMongoDBAdaptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import uk.ac.ebi.eva.pipeline.parameters.DatabaseParameters;
import uk.ac.ebi.eva.pipeline.parameters.InputParameters;
import uk.ac.ebi.eva.pipeline.parameters.MongoConnection;
import uk.ac.ebi.eva.pipeline.parameters.OutputParameters;
import uk.ac.ebi.eva.utils.URLHelper;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.UnknownHostException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.GZIPInputStream;
/**
* Tasklet that loads statistics into mongoDB.
* <p>
* Input: file containing statistics (.variants.stats.json.gz)
* Output: stats loaded into mongodb
* <p>
* Example:
* {
* "chromosome":"20",
* "position":67765,
* "cohortStats":{
* "ALL":{
* "refAllele":"C",
* "altAllele":"T",
* "variantType":"SNV",
* "refAlleleCount":4996,
* "altAlleleCount":12,
* "genotypesCount":{"0|0":2492,"0|1":12},
* "missingAlleles":0,
* "missingGenotypes":0,
* "refAlleleFreq":0.99760383,
* "altAlleleFreq":0.002396166,
* "genotypesFreq":{"0/0":0.0,"0/1":0.0,"1/1":0.0,"0|0":0.99520767,"0|1":0.004792332},
* "maf":0.002396166,
* "mgf":0.0,
* "mafAllele":"T",
* "mgfGenotype":"0/0",
* "mendelianErrors":-1,
* "casesPercentDominant":-1.0,
* "controlsPercentDominant":-1.0,
* "casesPercentRecessive":-1.0,
* "controlsPercentRecessive":-1.0,
* "quality":100.0,
* "numSamples":2504
* }
* }
* }
*/
public class PopulationStatisticsLoaderStep implements Tasklet {
private static final Logger logger = LoggerFactory.getLogger(PopulationStatisticsLoaderStep.class);
@Autowired
private InputParameters inputParameters;
@Autowired
private OutputParameters outputParameters;
@Autowired
private DatabaseParameters dbParameters;
private JsonFactory jsonFactory;
private ObjectMapper jsonObjectMapper;
public PopulationStatisticsLoaderStep() {
jsonFactory = new JsonFactory();
jsonObjectMapper = new ObjectMapper(jsonFactory);
jsonObjectMapper.addMixIn(VariantStats.class, VariantStatsJsonMixin.class);
}
@Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
VariantDBAdaptor dbAdaptor = getDbAdaptor();
URI variantStatsOutputUri = URLHelper.getVariantsStatsUri(
outputParameters.getOutputDirStatistics(), inputParameters.getStudyId(), inputParameters.getVcfId());
URI sourceStatsOutputUri = URLHelper.getSourceStatsUri(
outputParameters.getOutputDirStatistics(), inputParameters.getStudyId(), inputParameters.getVcfId());
QueryOptions statsOptions = new QueryOptions(getVariantOptions());
// Load statistics for variants and the file
loadVariantStats(dbAdaptor, variantStatsOutputUri, statsOptions);
loadSourceStats(dbAdaptor, sourceStatsOutputUri);
return RepeatStatus.FINISHED;
}
private ObjectMap getVariantOptions() {
ObjectMap variantOptions = new ObjectMap();
variantOptions.put(VariantStorageManager.VARIANT_SOURCE, getVariantSource());
variantOptions.put(VariantStorageManager.OVERWRITE_STATS, outputParameters.getStatisticsOverwrite());
return variantOptions;
}
private VariantSource getVariantSource() {
return new VariantSource(
Paths.get(inputParameters.getVcf()).getFileName().toString(),
inputParameters.getVcfId(),
inputParameters.getStudyId(),
inputParameters.getStudyName(),
inputParameters.getStudyType(),
inputParameters.getVcfAggregation());
}
private VariantDBAdaptor getDbAdaptor() throws UnknownHostException, IllegalOpenCGACredentialsException {
MongoCredentials credentials = getMongoCredentials();
String variantsCollectionName = dbParameters.getCollectionVariantsName();
String filesCollectionName = dbParameters.getCollectionFilesName();
logger.debug("Getting DBAdaptor to database '{}'", credentials.getMongoDbName());
return new VariantMongoDBAdaptor(credentials, variantsCollectionName, filesCollectionName);
}
private MongoCredentials getMongoCredentials() throws IllegalOpenCGACredentialsException {
MongoConnection mongoConnection = dbParameters.getMongoConnection();
String hosts = mongoConnection.getHosts();
List<DataStoreServerAddress> dataStoreServerAddresses = MongoCredentials.parseDataStoreServerAddresses(hosts);
String dbName = dbParameters.getDatabaseName();
String user = mongoConnection.getUser();
String pass = mongoConnection.getPassword();
MongoCredentials mongoCredentials = new MongoCredentials(dataStoreServerAddresses, dbName, user, pass);
mongoCredentials.setAuthenticationDatabase(mongoConnection.getAuthenticationDatabase());
return mongoCredentials;
}
private void loadVariantStats(VariantDBAdaptor variantDBAdaptor, URI variantsStatsUri, QueryOptions options)
throws IOException {
// Open input stream
InputStream variantInputStream = new GZIPInputStream(new FileInputStream(variantsStatsUri.getPath()));
// Initialize JSON parser
JsonParser parser = jsonFactory.createParser(variantInputStream);
int batchSize = 1000;
int writes = 0;
int variantsNumber = 0;
List<VariantStatsWrapper> statsBatch = new ArrayList<>(batchSize);
// Store variant statistics in Mongo
while (parser.nextToken() != null) {
variantsNumber++;
statsBatch.add(parser.readValueAs(VariantStatsWrapper.class));
if (statsBatch.size() == batchSize) {
QueryResult<?> writeResult = variantDBAdaptor.updateStats(statsBatch, options);
writes += writeResult.getNumResults();
logger.info("stats loaded up to position {}:{}",
statsBatch.get(statsBatch.size() - 1).getChromosome(),
statsBatch.get(statsBatch.size() - 1).getPosition());
statsBatch.clear();
}
}
if (!statsBatch.isEmpty()) {
QueryResult<?> writeResult = variantDBAdaptor.updateStats(statsBatch, options);
writes += writeResult.getNumResults();
logger.info("stats loaded up to position {}:{}",
statsBatch.get(statsBatch.size() - 1).getChromosome(),
statsBatch.get(statsBatch.size() - 1).getPosition());
statsBatch.clear();
}
if (writes < variantsNumber) {
logger.warn("provided statistics of {} variants, but only {} were updated", variantsNumber, writes);
logger.info(
"note: maybe those variants didn't had the proper study? maybe the new and the old stats were the same?");
}
}
private void loadSourceStats(VariantDBAdaptor variantDBAdaptor, URI sourceStatsUri) throws IOException {
// Open input stream
InputStream sourceInputStream = new GZIPInputStream(new FileInputStream(sourceStatsUri.getPath()));
// Read from JSON file
JsonParser sourceParser = jsonFactory.createParser(sourceInputStream);
VariantSourceStats variantSourceStats = sourceParser.readValueAs(VariantSourceStats.class);
// Store source statistics in Mongo
variantDBAdaptor.getVariantSourceDBAdaptor().updateSourceStats(variantSourceStats, null);
}
}
| |
/**
* Copyright (C) 2012 Joe Flynn - Licensing to be determined
*/
package com.heneryh.aquanotes.configure;
/**
* This is a hook into the controllers config database. The user can view, set or edit
* the credentials and timing settings for any controller.
*
* This same activity copied and slightly modified exists for the widget preferences.
* The only difference is that that one sets the widgetID. They can probably be combined
* with minor effort
*/
import java.util.ArrayList;
import java.util.List;
import com.heneryh.aquanotes.R;
import com.heneryh.aquanotes.provider.AquaNotesDbContract;
import com.heneryh.aquanotes.provider.AquaNotesDbContract.Controllers;
import com.heneryh.aquanotes.service.SyncService;
import android.app.Activity;
import android.app.AlertDialog;
import android.appwidget.AppWidgetManager;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.content.Intent;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteConstraintException;
import android.net.Uri;
import android.os.Bundle;
import android.provider.BaseColumns;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
public class ConfigurePrefs extends Activity implements View.OnClickListener {
private static final boolean LOGD = true;
private static final String LOG_TAG = "ConfigurePrefs";
/**
* Graphical elements
*/
private Button mSave;
private Button mDelete;
private EditText mTitle;
private EditText mWanUrl;
private EditText mLanUrl;
private EditText mWiFiSid;
private EditText mUser;
private EditText mPassword;
private EditText mUpdateIntervalMins;
private EditText mPruneAge;
/**
* These three are not carried over via text field so we need to save them off
*/
Long lastUpdated=(long) 0;
String type = null;
/**
* While we are not using the widget id, lets make sure we set it to an invalid value.
*/
Integer widgetId = AppWidgetManager.INVALID_APPWIDGET_ID;
/**
* Access to the database
*/
ContentResolver dbResolverConfigAct;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
dbResolverConfigAct = getContentResolver();
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
// Connect this activity to the configuration layout
setContentView(R.layout.activity_prefs);
// then grab references to the graphical elements
mTitle = (EditText)findViewById(R.id.conf_title);
mWanUrl = (EditText)findViewById(R.id.conf_url);
mLanUrl = (EditText)findViewById(R.id.conf_wifi_url);
mWiFiSid = (EditText)findViewById(R.id.conf_wifi_sid);
mUser = (EditText)findViewById(R.id.conf_user);
mPassword = (EditText)findViewById(R.id.conf_password);
mUpdateIntervalMins = (EditText)findViewById(R.id.conf_update_interval_mins);
mPruneAge = (EditText)findViewById(R.id.conf_prune);
mSave = (Button)findViewById(R.id.conf_save);
mSave.setOnClickListener(this);
mDelete = (Button)findViewById(R.id.conf_delete);
mDelete.setOnClickListener(this);
/**
* First lets get a list of all active controllers then present the list to the user in a dialog.
* Should probably move this to the Fragment Dialog model.
*/
List<String> controllerURLs = new ArrayList<String>();
controllerURLs.add("New");
Cursor cursor = null;
try {
Uri controllersQueryUri = Controllers.buildQueryControllersUri();
cursor = dbResolverConfigAct.query(controllersQueryUri, ControllersQuery.PROJECTION, null, null, null);
if (cursor != null && cursor.moveToFirst()) {
while (!cursor.isAfterLast()) {
String mURL = cursor.getString(ControllersQuery.WAN_URL);
controllerURLs.add(mURL);
cursor.moveToNext();
}
}
} catch (SQLException e) {
Log.e(LOG_TAG, "getting controller list", e);
// need a little more here!
} finally {
if (cursor != null) {
cursor.close();
}
}
final CharSequence[] items = controllerURLs.toArray(new String[controllerURLs.size()]);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Select a new or existing controller:");
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
Toast.makeText(getApplicationContext(), items[item], Toast.LENGTH_SHORT).show();
if(item>0) {
String Url = items[item].toString();
Uri controllerXUri = AquaNotesDbContract.Controllers.buildQueryControllerByUrlUri(Url);
Cursor cursor2 = null;
String username = null;
String password = null;
String apexBaseURL = null;
String apexWiFiURL = null;
String apexWiFiSid = null;
String title = null;
Integer interval=0;
Integer prune_age = 0;
widgetId = -1;
// Poll the database for facts about this controller
// If already set in the db, then pre-populate the fields.
try {
ContentResolver dbResolverDialogAct = getContentResolver();
cursor2 = dbResolverDialogAct.query(controllerXUri, ControllersQuery.PROJECTION, null, null, null);
if (cursor2 != null && cursor2.moveToFirst()) {
title = cursor2.getString(ControllersQuery.TITLE);
username = cursor2.getString(ControllersQuery.USER);
password = cursor2.getString(ControllersQuery.PW);
apexBaseURL = cursor2.getString(ControllersQuery.WAN_URL);
apexWiFiURL = cursor2.getString(ControllersQuery.WIFI_URL);
apexWiFiSid = cursor2.getString(ControllersQuery.WIFI_SSID);
interval = cursor2.getInt(ControllersQuery.UPDATE_INTERVAL);
prune_age = cursor2.getInt(ControllersQuery.DB_SAVE_DAYS);
lastUpdated = cursor2.getLong(ControllersQuery.LAST_UPDATED);
type = cursor2.getString(ControllersQuery.MODEL);
widgetId = cursor2.getInt(ControllersQuery.WIDGET);
mTitle.setText(title);
mWanUrl.setText(apexBaseURL);
mLanUrl.setText(apexWiFiURL);
mWiFiSid.setText(apexWiFiSid);
mUser.setText(username);
mPassword.setText(password);
mUpdateIntervalMins.setText(interval.toString());
mPruneAge.setText(prune_age.toString());
}
} catch (SQLException e) {
Log.e(LOG_TAG, "onCreate: getting controller facts.", e);
} finally {
if (cursor2 != null) {
cursor2.close();
}
}
}
}
});
AlertDialog alert = builder.create();
alert.show();
// If restoring, read location and units from bundle
// I really need to learn more about this concept.
if (savedInstanceState != null) {
// mUnits = savedInstanceState.getInt(ControllersColumns.xxx);
// if (mUnits == ControllersColumns.) {
// ((RadioButton)findViewById(R.id.xxxx)).setSelected(true);
// } else if (mUnits == AppWidgetsColumns.UNITS_CELSIUS) {
// ((RadioButton)findViewById(R.id.conf_units_c)).setSelected(true);
// }
}
}
public void onClick(View v) {
switch (v.getId()) {
case R.id.conf_save: {
// Picked save, so write values to database
ContentValues values = new ContentValues();
// Grab the values, note that they are all strings regardless of
// the format they will be stored in the database.
String title = mTitle.getText().toString();
String url = mWanUrl.getText().toString();
String wiFiUrl = mLanUrl.getText().toString();
String wiFiSid = mWiFiSid.getText().toString();
String user = mUser.getText().toString();
String pword = mPassword.getText().toString();
String updIntervalMins = mUpdateIntervalMins.getText().toString();
String pruneAge = mPruneAge.getText().toString();
// Strings are easily put into the database directly but numbers must
// be parsed. The parsing may throw an exception.
//values.put(BaseColumns._ID, mAppWidgetId);
values.put(AquaNotesDbContract.Controllers.TITLE, title);
values.put(AquaNotesDbContract.Controllers.WAN_URL, url);
values.put(AquaNotesDbContract.Controllers.LAN_URL, wiFiUrl);
values.put(AquaNotesDbContract.Controllers.WIFI_SSID, wiFiSid);
values.put(AquaNotesDbContract.Controllers.USER, user);
values.put(AquaNotesDbContract.Controllers.PW, pword);
if(type==null) values.put(AquaNotesDbContract.Controllers.MODEL, "none set");
try {
Integer updIntervalMinsInt = Integer.parseInt(updIntervalMins.trim());
values.put(AquaNotesDbContract.Controllers.UPDATE_INTERVAL, updIntervalMinsInt);
Integer pruneAgeInt = Integer.parseInt(pruneAge.trim());
values.put(AquaNotesDbContract.Controllers.DB_SAVE_DAYS, pruneAgeInt);
} catch(NumberFormatException nfe) {
if (LOGD) Log.e(LOG_TAG, "ERROR: Non-number entered for update frequency or prune age.");
// How shall we handle this?????
// Maybe a toast message?
}
values.put(AquaNotesDbContract.Controllers.LAST_UPDATED, lastUpdated);
values.put(AquaNotesDbContract.Controllers.WIDGET, widgetId);
// I can't figure out why the insert constraint violation is not properly caught
// when there is an update needed rather than an insert.
// Well, the quick fix is to just try the update first then the insert if needed.
// This is only done rarely anyway so it doesn't matter much.
ContentResolver resolver = getContentResolver();
Uri controllerXUri = Controllers.buildUpdateControllerXUri(url);
int updateStatus = 0;
try {
updateStatus = resolver.update(controllerXUri, values, null, null);
} catch (SQLiteConstraintException e2 ) {
Log.e(LOG_TAG, "Inserting/updating controller data: ", e2);
}
if(updateStatus==0) {
try {
controllerXUri = resolver.insert(Controllers.buildInsertControllerUri(), values);
} catch (SQLiteConstraintException e) {
Log.w(LOG_TAG, "Inserting controller data, maybe updating: ", e);
} catch (SQLException e) {
Log.e(LOG_TAG, "Inserting controller data, maybe updating: ", e);
}
}
// Trigger an update
if(controllerXUri!=null) {
String controllerId = Controllers.getControllerId(controllerXUri);
//SyncService.requestUpdate(new int[] {Integer.valueOf(controllerId)});
Intent updateIntent = new Intent(SyncService.ACTION_UPDATE_ALL);
updateIntent.setClass(this, SyncService.class);
// note that startService() will only really start it if not already running
startService(updateIntent);
}
finish();
break;
} // end of case save:
case R.id.conf_delete: {
String url = mWanUrl.getText().toString();
Uri deleteOne = Controllers.buildDeleteControllerUrlUri(url);
ContentResolver resolver = getContentResolver();
int ct = resolver.delete(deleteOne, null, null);
if(ct!=1)
Log.e(LOG_TAG, "error deleting controller");
finish();
}// end of case delete:
} // end of switch
}
/**
* {@inheritDoc}
*/
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
// //outState.putString(AppWidgetsColumns.TITLE, mTitle); // why not the title?
// outState.putString(ControllersColumns.URL, mWanUrl);
// outState.putString(ControllersColumns.USERNAME, mUser);
// outState.putString(ControllersColumns.PASSWORD, mPassword);
}
private interface ControllersQuery {
String[] PROJECTION = {
// String CONTROLLER_ID = "_id";
// String TITLE = "title";
// String WAN_URL = "wan_url";
// String LAN_URL = "wifi_url";
// String WIFI_SSID = "wifi_ssid";
// String USER = "user";
// String PW = "pw";
// String LAST_UPDATED = "last_updated";
// String UPDATE_INTERVAL = "update_i";
// String DB_SAVE_DAYS = "db_save_days";
// String CONTROLLER_TYPE = "controller_type";
BaseColumns._ID,
AquaNotesDbContract.Controllers.TITLE,
AquaNotesDbContract.Controllers.WAN_URL,
AquaNotesDbContract.Controllers.LAN_URL,
AquaNotesDbContract.Controllers.WIFI_SSID,
AquaNotesDbContract.Controllers.USER,
AquaNotesDbContract.Controllers.PW,
AquaNotesDbContract.Controllers.LAST_UPDATED,
AquaNotesDbContract.Controllers.UPDATE_INTERVAL,
AquaNotesDbContract.Controllers.DB_SAVE_DAYS,
AquaNotesDbContract.Controllers.MODEL,
AquaNotesDbContract.Controllers.WIDGET,
};
int _ID = 0;
int TITLE = 1;
int WAN_URL = 2;
int WIFI_URL = 3;
int WIFI_SSID = 4;
int USER = 5;
int PW = 6;
int LAST_UPDATED = 7;
int UPDATE_INTERVAL = 8;
int DB_SAVE_DAYS = 9;
int MODEL = 10;
int WIDGET = 11;
}
} // end of ConfigurePrefs
| |
/*
Copyright (c) Microsoft Open Technologies, Inc.
All Rights Reserved
Apache 2.0 License
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
See the Apache Version 2.0 License for specific language governing permissions and limitations under the License.
*/
package com.microsoft.windowsazure.mobileservices.zumoe2etestapp.framework;
import android.annotation.TargetApi;
import android.os.Build;
import com.microsoft.windowsazure.mobileservices.MobileServiceClient;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
public abstract class TestGroup {
public static final String AllTestsGroupName = "All tests";
public static final String AllUnattendedTestsGroupName = AllTestsGroupName + " (unattended)";
public static final String ClientVersionKey = "client-version";
public static final String ServerVersionKey = "server-version";
List<TestCase> mTestCases;
Map<String, String> mSourceMap;
String mName;
TestStatus mStatus;
ConcurrentLinkedQueue<TestCase> mTestRunQueue;
boolean mNewTestRun;
private int mFailedTestCount;
private Date mStartTime;
private Date mEndTime;
public TestGroup(String name) {
mName = name;
mStatus = TestStatus.NotRun;
mTestCases = new ArrayList<TestCase>();
mSourceMap = new HashMap<String, String>();
mTestRunQueue = new ConcurrentLinkedQueue<TestCase>();
mNewTestRun = false;
}
public TestStatus getStatus() {
return mStatus;
}
public List<TestCase> getTestCases() {
return mTestCases;
}
public Map<String, String> getSourceMap() {
return mSourceMap;
}
protected void addTest(TestCase testCase) {
addTest(testCase, this.getClass().getName());
}
protected void addTest(TestCase testCase, String source) {
mTestCases.add(testCase);
mSourceMap.put(testCase.getName(), source);
}
public void runTests(MobileServiceClient client, TestExecutionCallback callback) {
List<TestCase> testsToRun = new ArrayList<TestCase>();
for (int i = 0; i < mTestCases.size(); i++) {
if (mTestCases.get(i).isEnabled()) {
testsToRun.add(mTestCases.get(i));
}
}
if (testsToRun.size() > 0) {
runTests(testsToRun, client, callback);
}
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public void runTests(List<TestCase> testsToRun, final MobileServiceClient client, final TestExecutionCallback callback) {
try {
onPreExecute(client);
} catch (Exception e) {
mStatus = TestStatus.Failed;
if (callback != null)
callback.onTestGroupComplete(this, null);
return;
}
final TestRunStatus testRunStatus = new TestRunStatus();
mNewTestRun = true;
int oldQueueSize = mTestRunQueue.size();
mTestRunQueue.clear();
mTestRunQueue.addAll(testsToRun);
cleanTestsState();
testRunStatus.results.clear();
mStatus = TestStatus.NotRun;
if (oldQueueSize == 0) {
for (final TestCase test : mTestRunQueue) {
final CountDownLatch latch = new CountDownLatch(1);
Thread thread = new Thread() {
public void run() {
executeNextTest(test, client, callback, testRunStatus, latch);
}
};
thread.run();
try {
latch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
if (test.getStatus() == TestStatus.Failed) {
mFailedTestCount++;
}
}
// End Run
final CountDownLatch latch = new CountDownLatch(1);
Thread thread = new Thread() {
public void run() {
executeNextTest(null, client, callback, testRunStatus, latch);
}
};
thread.run();
try {
latch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
private void cleanTestsState() {
for (TestCase test : mTestRunQueue) {
test.setStatus(TestStatus.NotRun);
test.clearLog();
}
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private void executeNextTest(final TestCase nextTest, final MobileServiceClient client, final TestExecutionCallback callback,
final TestRunStatus testRunStatus, final CountDownLatch latch) {
mNewTestRun = false;
final TestGroup group = this;
try {
// TestCase nextTest = mTestRunQueue.poll();
if (nextTest != null) {
nextTest.run(client, new TestExecutionCallback() {
@Override
public void onTestStart(TestCase test) {
if (!mNewTestRun && callback != null)
callback.onTestStart(test);
}
@Override
public void onTestGroupComplete(TestGroup group, List<TestResult> results) {
if (!mNewTestRun && callback != null)
callback.onTestGroupComplete(group, results);
}
@Override
public void onTestComplete(TestCase test, TestResult result) {
if (mNewTestRun) {
cleanTestsState();
testRunStatus.results.clear();
mStatus = TestStatus.NotRun;
} else {
if (test.getExpectedExceptionClass() != null) {
if (result.getException() != null && result.getException().getClass() == test.getExpectedExceptionClass()) {
result.setStatus(TestStatus.Passed);
} else {
result.setStatus(TestStatus.Failed);
}
}
test.setStatus(result.getStatus());
testRunStatus.results.add(result);
if (callback != null)
callback.onTestComplete(test, result);
}
latch.countDown();
// executeNextTest(client, callback, testRunStatus);
}
});
} else {
// end run
try {
group.onPostExecute(client);
} catch (Exception e) {
mStatus = TestStatus.Failed;
}
// if at least one test failed, the test group
// failed
if (mStatus != TestStatus.Failed) {
mStatus = TestStatus.Passed;
for (TestResult r : testRunStatus.results) {
if (r.getStatus() == TestStatus.Failed) {
mStatus = TestStatus.Failed;
break;
}
}
}
if (callback != null)
callback.onTestGroupComplete(group, testRunStatus.results);
latch.countDown();
}
} catch (Exception e) {
if (callback != null)
callback.onTestGroupComplete(group, testRunStatus.results);
latch.countDown();
}
}
public String getName() {
return mName;
}
protected void setName(String name) {
mName = name;
}
public int getFailedTestCount() {
return mFailedTestCount;
}
public Date getStartTime() {
return mStartTime;
}
public Date getEndTime() {
return mEndTime;
}
@Override
public String toString() {
return getName();
}
public void onPreExecute(MobileServiceClient client) {
mFailedTestCount = 0;
mStartTime = new Date();
}
public void onPostExecute(MobileServiceClient client) {
mEndTime = new Date();
}
private class TestRunStatus {
public List<TestResult> results;
public TestRunStatus() {
results = new ArrayList<TestResult>();
}
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hystrix;
import java.util.*;
import java.util.concurrent.*;
import org.slf4j.*;
import rx.*;
import rx.Observable;
import rx.functions.*;
import rx.schedulers.Schedulers;
import rx.subjects.ReplaySubject;
import com.netflix.hystrix.HystrixCollapser.CollapsedRequest;
import com.netflix.hystrix.HystrixCommandProperties.ExecutionIsolationStrategy;
import com.netflix.hystrix.collapser.*;
import com.netflix.hystrix.exception.HystrixRuntimeException;
import com.netflix.hystrix.strategy.HystrixPlugins;
import com.netflix.hystrix.strategy.concurrency.HystrixRequestContext;
import com.netflix.hystrix.strategy.properties.HystrixPropertiesStrategy;
/**
* Collapse multiple requests into a single {@link HystrixCommand} execution based on a time window and optionally a max batch size.
* <p>
* This allows an object model to have multiple calls to the command that execute/queue many times in a short period (milliseconds) and have them all get batched into a single backend call.
* <p>
* Typically the time window is something like 10ms give or take.
* <p>
* NOTE: Do NOT retain any state within instances of this class.
* <p>
* It must be stateless or else it will be non-deterministic because most instances are discarded while some are retained and become the
* "collapsers" for all the ones that are discarded.
*
* @param <K>
* The key used to match BatchReturnType and RequestArgumentType
* @param <BatchReturnType>
* The type returned from the {@link HystrixCommand} that will be invoked on batch executions.
* @param <ResponseType>
* The type returned from this command.
* @param <RequestArgumentType>
* The type of the request argument. If multiple arguments are needed, wrap them in another object or a Tuple.
*/
public abstract class HystrixObservableCollapser<K, BatchReturnType, ResponseType, RequestArgumentType> implements HystrixExecutable<ResponseType> {
static final Logger logger = LoggerFactory.getLogger(HystrixObservableCollapser.class);
private final RequestCollapserFactory<BatchReturnType, ResponseType, RequestArgumentType> collapserFactory;
private final HystrixRequestCache requestCache;
private final HystrixCollapserBridge<BatchReturnType, ResponseType, RequestArgumentType> collapserInstanceWrapper;
/**
* The scope of request collapsing.
* <ul>
* <li>REQUEST: Requests within the scope of a {@link HystrixRequestContext} will be collapsed.
* <p>
* Typically this means that requests within a single user-request (ie. HTTP request) are collapsed. No interaction with other user requests. 1 queue per user request.
* </li>
* <li>GLOBAL: Requests from any thread (ie. all HTTP requests) within the JVM will be collapsed. 1 queue for entire app.</li>
* </ul>
*/
public static enum Scope implements RequestCollapserFactory.Scope {
REQUEST, GLOBAL
}
/**
* Collapser with default {@link HystrixCollapserKey} derived from the implementing class name and scoped to {@link Scope#REQUEST} and default configuration.
*/
protected HystrixObservableCollapser() {
this(Setter.withCollapserKey(null).andScope(Scope.REQUEST));
}
/**
* Collapser scoped to {@link Scope#REQUEST} and default configuration.
*
* @param collapserKey
* {@link HystrixCollapserKey} that identifies this collapser and provides the key used for retrieving properties, request caches, publishing metrics etc.
*/
protected HystrixObservableCollapser(HystrixCollapserKey collapserKey) {
this(Setter.withCollapserKey(collapserKey).andScope(Scope.REQUEST));
}
/**
* Construct a {@link HystrixObservableCollapser} with defined {@link Setter} that allows
* injecting property and strategy overrides and other optional arguments.
* <p>
* Null values will result in the default being used.
*
* @param setter
* Fluent interface for constructor arguments
*/
protected HystrixObservableCollapser(Setter setter) {
this(setter.collapserKey, setter.scope, new RealCollapserTimer(), setter.propertiesSetter);
}
/* package for tests */HystrixObservableCollapser(HystrixCollapserKey collapserKey, Scope scope, CollapserTimer timer, HystrixCollapserProperties.Setter propertiesBuilder) {
if (collapserKey == null || collapserKey.name().trim().equals("")) {
String defaultKeyName = getDefaultNameFromClass(getClass());
collapserKey = HystrixCollapserKey.Factory.asKey(defaultKeyName);
}
this.collapserFactory = new RequestCollapserFactory<BatchReturnType, ResponseType, RequestArgumentType>(collapserKey, scope, timer, propertiesBuilder);
this.requestCache = HystrixRequestCache.getInstance(collapserKey, HystrixPlugins.getInstance().getConcurrencyStrategy());
final HystrixObservableCollapser<K, BatchReturnType, ResponseType, RequestArgumentType> self = this;
/**
* Used to pass public method invocation to the underlying implementation in a separate package while leaving the methods 'protected' in this class.
*/
collapserInstanceWrapper = new HystrixCollapserBridge<BatchReturnType, ResponseType, RequestArgumentType>() {
@Override
public Collection<Collection<CollapsedRequest<ResponseType, RequestArgumentType>>> shardRequests(Collection<CollapsedRequest<ResponseType, RequestArgumentType>> requests) {
return self.shardRequests(requests);
}
@Override
public Observable<BatchReturnType> createObservableCommand(Collection<CollapsedRequest<ResponseType, RequestArgumentType>> requests) {
HystrixObservableCommand<BatchReturnType> command = self.createCommand(requests);
// mark the number of requests being collapsed together
command.markAsCollapsedCommand(requests.size());
return command.toObservable();
}
@Override
public Observable<Void> mapResponseToRequests(Observable<BatchReturnType> batchResponse, Collection<CollapsedRequest<ResponseType, RequestArgumentType>> requests) {
Func1<RequestArgumentType, K> requestKeySelector = self.getRequestArgumentKeySelector();
final Func1<BatchReturnType, K> batchResponseKeySelector = self.getBatchReturnTypeKeySelector();
final Func1<BatchReturnType, ResponseType> mapBatchTypeToResponseType = self.getBatchReturnTypeToResponseTypeMapper();
// index the requests by key
final Map<K, CollapsedRequest<ResponseType, RequestArgumentType>> requestsByKey = new HashMap<K, CollapsedRequest<ResponseType, RequestArgumentType>>(requests.size());
for (CollapsedRequest<ResponseType, RequestArgumentType> cr : requests) {
requestsByKey.put(requestKeySelector.call(cr.getArgument()), cr);
}
// observe the responses and join with the requests by key
return batchResponse.flatMap(new Func1<BatchReturnType, Observable<Void>>() {
@Override
public Observable<Void> call(BatchReturnType r) {
K responseKey = batchResponseKeySelector.call(r);
CollapsedRequest<ResponseType, RequestArgumentType> requestForResponse = requestsByKey.get(responseKey);
requestForResponse.setResponse(mapBatchTypeToResponseType.call(r));
// now remove from map so we know what wasn't set at end
requestsByKey.remove(responseKey);
return Observable.empty();
}
}).doOnTerminate(new Action0() {
@Override
public void call() {
for (CollapsedRequest<ResponseType, RequestArgumentType> cr : requestsByKey.values()) {
onMissingResponse(cr);
}
}
});
}
@Override
public HystrixCollapserKey getCollapserKey() {
return self.getCollapserKey();
}
};
}
private HystrixCollapserProperties getProperties() {
return collapserFactory.getProperties();
}
/**
* Key of the {@link HystrixObservableCollapser} used for properties, metrics, caches, reporting etc.
*
* @return {@link HystrixCollapserKey} identifying this {@link HystrixObservableCollapser} instance
*/
public HystrixCollapserKey getCollapserKey() {
return collapserFactory.getCollapserKey();
}
/**
* Scope of collapsing.
* <p>
* <ul>
* <li>REQUEST: Requests within the scope of a {@link HystrixRequestContext} will be collapsed.
* <p>
* Typically this means that requests within a single user-request (ie. HTTP request) are collapsed. No interaction with other user requests. 1 queue per user request.
* </li>
* <li>GLOBAL: Requests from any thread (ie. all HTTP requests) within the JVM will be collapsed. 1 queue for entire app.</li>
* </ul>
* <p>
* Default: {@link Scope#REQUEST} (defined via constructor)
*
* @return {@link Scope} that collapsing should be performed within.
*/
public Scope getScope() {
return Scope.valueOf(collapserFactory.getScope().name());
}
/**
* The request arguments to be passed to the {@link HystrixCommand}.
* <p>
* Typically this means to take the argument(s) provided to the constructor and return it here.
* <p>
* If there are multiple arguments that need to be bundled, create a single object to contain them, or use a Tuple.
*
* @return RequestArgumentType
*/
public abstract RequestArgumentType getRequestArgument();
/**
* Factory method to create a new {@link HystrixObservableCommand}{@code <BatchReturnType>} command object each time a batch needs to be executed.
* <p>
* Do not return the same instance each time. Return a new instance on each invocation.
* <p>
* Process the 'requests' argument into the arguments the command object needs to perform its work.
* <p>
* If a batch or requests needs to be split (sharded) into multiple commands, see {@link #shardRequests} <p>
* IMPLEMENTATION NOTE: Be fast (ie. <1ms) in this method otherwise it can block the Timer from executing subsequent batches. Do not do any processing beyond constructing the command and returning
* it.
*
* @param requests
* {@code Collection<CollapsedRequest<ResponseType, RequestArgumentType>>} containing {@link CollapsedRequest} objects containing the arguments of each request collapsed in this batch.
* @return {@link HystrixObservableCommand}{@code <BatchReturnType>} which when executed will retrieve results for the batch of arguments as found in the Collection of {@link CollapsedRequest}
* objects
*/
protected abstract HystrixObservableCommand<BatchReturnType> createCommand(Collection<CollapsedRequest<ResponseType, RequestArgumentType>> requests);
/**
* Override to split (shard) a batch of requests into multiple batches that will each call <code>createCommand</code> separately.
* <p>
* The purpose of this is to allow collapsing to work for services that have sharded backends and batch executions that need to be shard-aware.
* <p>
* For example, a batch of 100 requests could be split into 4 different batches sharded on name (ie. a-g, h-n, o-t, u-z) that each result in a separate {@link HystrixCommand} being created and
* executed for them.
* <p>
* By default this method does nothing to the Collection and is a pass-thru.
*
* @param requests
* {@code Collection<CollapsedRequest<ResponseType, RequestArgumentType>>} containing {@link CollapsedRequest} objects containing the arguments of each request collapsed in this batch.
* @return Collection of {@code Collection<CollapsedRequest<ResponseType, RequestArgumentType>>} objects sharded according to business rules.
* <p>The CollapsedRequest instances should not be modified or wrapped as the CollapsedRequest instance object contains state information needed to complete the execution.
*/
protected Collection<Collection<CollapsedRequest<ResponseType, RequestArgumentType>>> shardRequests(Collection<CollapsedRequest<ResponseType, RequestArgumentType>> requests) {
return Collections.singletonList(requests);
}
/**
* Function that returns the key used for matching returned objects against request argument types.
* <p>
* The key returned from this function should match up with the key returned from {@link #getRequestArgumentKeySelector()};
*
* @return key selector function
*/
protected abstract Func1<BatchReturnType, K> getBatchReturnTypeKeySelector();
/**
* Function that returns the key used for matching request arguments against returned objects.
* <p>
* The key returned from this function should match up with the key returned from {@link #getBatchReturnTypeKeySelector()};
*
* @return key selector function
*/
protected abstract Func1<RequestArgumentType, K> getRequestArgumentKeySelector();
/**
* Invoked if a {@link CollapsedRequest} in the batch does not have a response set on it.
* <p>
* This allows setting an exception (via {@link CollapsedRequest#setException(Exception)}) or a fallback response (via {@link CollapsedRequest#setResponse(Object)}).
*
* @param CollapsedRequest
* that needs a response or exception set on it.
*/
protected abstract void onMissingResponse(CollapsedRequest<ResponseType, RequestArgumentType> r);
/**
* Function for mapping from BatchReturnType to ResponseType.
* <p>
* Often these two types are exactly the same so it's just a pass-thru.
*
* @return function for mapping from BatchReturnType to ResponseType
*/
protected abstract Func1<BatchReturnType, ResponseType> getBatchReturnTypeToResponseTypeMapper();
/**
* Used for asynchronous execution with a callback by subscribing to the {@link Observable}.
* <p>
* This eagerly starts execution the same as {@link #queue()} and {@link #execute()}.
* A lazy {@link Observable} can be obtained from {@link #toObservable()}.
* <p>
* <b>Callback Scheduling</b>
* <p>
* <ul>
* <li>When using {@link ExecutionIsolationStrategy#THREAD} this defaults to using {@link Schedulers#threadPoolForComputation()} for callbacks.</li>
* <li>When using {@link ExecutionIsolationStrategy#SEMAPHORE} this defaults to using {@link Schedulers#immediate()} for callbacks.</li>
* </ul>
* Use {@link #toObservable(rx.Scheduler)} to schedule the callback differently.
* <p>
* See https://github.com/Netflix/RxJava/wiki for more information.
*
* @return {@code Observable<R>} that executes and calls back with the result of of {@link HystrixCommand}{@code <BatchReturnType>} execution after passing through {@link #mapResponseToRequests}
* to transform the {@code <BatchReturnType>} into {@code <ResponseType>}
*/
public Observable<ResponseType> observe() {
// us a ReplaySubject to buffer the eagerly subscribed-to Observable
ReplaySubject<ResponseType> subject = ReplaySubject.create();
// eagerly kick off subscription
toObservable().subscribe(subject);
// return the subject that can be subscribed to later while the execution has already started
return subject;
}
/**
* A lazy {@link Observable} that will execute when subscribed to.
* <p>
* <b>Callback Scheduling</b>
* <p>
* <ul>
* <li>When using {@link ExecutionIsolationStrategy#THREAD} this defaults to using {@link Schedulers#threadPoolForComputation()} for callbacks.</li>
* <li>When using {@link ExecutionIsolationStrategy#SEMAPHORE} this defaults to using {@link Schedulers#immediate()} for callbacks.</li>
* </ul>
* <p>
* See https://github.com/Netflix/RxJava/wiki for more information.
*
* @return {@code Observable<R>} that lazily executes and calls back with the result of of {@link HystrixCommand}{@code <BatchReturnType>} execution after passing through
* {@link #mapResponseToRequests} to transform the {@code <BatchReturnType>} into {@code <ResponseType>}
*/
public Observable<ResponseType> toObservable() {
// when we callback with the data we want to do the work
// on a separate thread than the one giving us the callback
return toObservable(Schedulers.computation());
}
/**
* A lazy {@link Observable} that will execute when subscribed to.
* <p>
* See https://github.com/Netflix/RxJava/wiki for more information.
*
* @param observeOn
* The {@link Scheduler} to execute callbacks on.
* @return {@code Observable<R>} that lazily executes and calls back with the result of of {@link HystrixCommand}{@code <BatchReturnType>} execution after passing through
* {@link #mapResponseToRequests} to transform the {@code <BatchReturnType>} into {@code <ResponseType>}
*/
public Observable<ResponseType> toObservable(Scheduler observeOn) {
/* try from cache first */
if (getProperties().requestCachingEnabled().get()) {
Observable<ResponseType> fromCache = requestCache.get(getCacheKey());
if (fromCache != null) {
/* mark that we received this response from cache */
// TODO Add collapser metrics so we can capture this information
// we can't add it to the command metrics because the command can change each time (dynamic key for example)
// and we don't have access to it when responding from cache
// collapserMetrics.markResponseFromCache();
return fromCache;
}
}
RequestCollapser<BatchReturnType, ResponseType, RequestArgumentType> requestCollapser = collapserFactory.getRequestCollapser(collapserInstanceWrapper);
Observable<ResponseType> response = requestCollapser.submitRequest(getRequestArgument());
if (getProperties().requestCachingEnabled().get()) {
/*
* A race can occur here with multiple threads queuing but only one will be cached.
* This means we can have some duplication of requests in a thread-race but we're okay
* with having some inefficiency in duplicate requests in the same batch
* and then subsequent requests will retrieve a previously cached Observable.
*
* If this is an issue we can make a lazy-future that gets set in the cache
* then only the winning 'put' will be invoked to actually call 'submitRequest'
*/
Observable<ResponseType> o = response.cache();
Observable<ResponseType> fromCache = requestCache.putIfAbsent(getCacheKey(), o);
if (fromCache == null) {
response = o;
} else {
response = fromCache;
}
}
return response;
}
/**
* Used for synchronous execution.
* <p>
* If {@link Scope#REQUEST} is being used then synchronous execution will only result in collapsing if other threads are running within the same scope.
*
* @return ResponseType
* Result of {@link HystrixCommand}{@code <BatchReturnType>} execution after passing through {@link #mapResponseToRequests} to transform the {@code <BatchReturnType>} into
* {@code <ResponseType>}
* @throws HystrixRuntimeException
* if an error occurs and a fallback cannot be retrieved
*/
public ResponseType execute() {
try {
return queue().get();
} catch (Throwable e) {
if (e instanceof HystrixRuntimeException) {
throw (HystrixRuntimeException) e;
}
// if we have an exception we know about we'll throw it directly without the threading wrapper exception
if (e.getCause() instanceof HystrixRuntimeException) {
throw (HystrixRuntimeException) e.getCause();
}
// we don't know what kind of exception this is so create a generic message and throw a new HystrixRuntimeException
String message = getClass().getSimpleName() + " HystrixCollapser failed while executing.";
logger.debug(message, e); // debug only since we're throwing the exception and someone higher will do something with it
//TODO should this be made a HystrixRuntimeException?
throw new RuntimeException(message, e);
}
}
/**
* Used for asynchronous execution.
* <p>
* This will queue up the command and return a Future to get the result once it completes.
*
* @return ResponseType
* Result of {@link HystrixCommand}{@code <BatchReturnType>} execution after passing through {@link #mapResponseToRequests} to transform the {@code <BatchReturnType>} into
* {@code <ResponseType>}
* @throws HystrixRuntimeException
* within an <code>ExecutionException.getCause()</code> (thrown by {@link Future#get}) if an error occurs and a fallback cannot be retrieved
*/
public Future<ResponseType> queue() {
final Observable<ResponseType> o = toObservable();
return o.toBlockingObservable().toFuture();
}
/**
* Key to be used for request caching.
* <p>
* By default this returns null which means "do not cache".
* <p>
* To enable caching override this method and return a string key uniquely representing the state of a command instance.
* <p>
* If multiple command instances in the same request scope match keys then only the first will be executed and all others returned from cache.
*
* @return String cacheKey or null if not to cache
*/
protected String getCacheKey() {
return null;
}
/**
* Clears all state. If new requests come in instances will be recreated and metrics started from scratch.
*/
/* package */static void reset() {
RequestCollapserFactory.reset();
}
private static String getDefaultNameFromClass(@SuppressWarnings("rawtypes") Class<? extends HystrixObservableCollapser> cls) {
String fromCache = defaultNameCache.get(cls);
if (fromCache != null) {
return fromCache;
}
// generate the default
// default HystrixCommandKey to use if the method is not overridden
String name = cls.getSimpleName();
if (name.equals("")) {
// we don't have a SimpleName (anonymous inner class) so use the full class name
name = cls.getName();
name = name.substring(name.lastIndexOf('.') + 1, name.length());
}
defaultNameCache.put(cls, name);
return name;
}
/**
* Fluent interface for arguments to the {@link HystrixObservableCollapser} constructor.
* <p>
* The required arguments are set via the 'with' factory method and optional arguments via the 'and' chained methods.
* <p>
* Example:
* <pre> {@code
* Setter.withCollapserKey(HystrixCollapserKey.Factory.asKey("CollapserName"))
.andScope(Scope.REQUEST);
* } </pre>
*
* @NotThreadSafe
*/
public static class Setter {
private final HystrixCollapserKey collapserKey;
private Scope scope = Scope.REQUEST; // default if nothing is set
private HystrixCollapserProperties.Setter propertiesSetter;
private Setter(HystrixCollapserKey collapserKey) {
this.collapserKey = collapserKey;
}
/**
* Setter factory method containing required values.
* <p>
* All optional arguments can be set via the chained methods.
*
* @param collapserKey
* {@link HystrixCollapserKey} that identifies this collapser and provides the key used for retrieving properties, request caches, publishing metrics etc.
* @return Setter for fluent interface via method chaining
*/
public static Setter withCollapserKey(HystrixCollapserKey collapserKey) {
return new Setter(collapserKey);
}
/**
* {@link Scope} defining what scope the collapsing should occur within
*
* @param scope
*
* @return Setter for fluent interface via method chaining
*/
public Setter andScope(Scope scope) {
this.scope = scope;
return this;
}
/**
* @param propertiesSetter
* {@link HystrixCollapserProperties.Setter} that allows instance specific property overrides (which can then be overridden by dynamic properties, see
* {@link HystrixPropertiesStrategy} for
* information on order of precedence).
* <p>
* Will use defaults if left NULL.
* @return Setter for fluent interface via method chaining
*/
public Setter andCollapserPropertiesDefaults(HystrixCollapserProperties.Setter propertiesSetter) {
this.propertiesSetter = propertiesSetter;
return this;
}
}
// this is a micro-optimization but saves about 1-2microseconds (on 2011 MacBook Pro)
// on the repetitive string processing that will occur on the same classes over and over again
@SuppressWarnings("rawtypes")
private static ConcurrentHashMap<Class<? extends HystrixObservableCollapser>, String> defaultNameCache = new ConcurrentHashMap<Class<? extends HystrixObservableCollapser>, String>();
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.yms.app.ydt;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.onosproject.yms.app.ydt.exceptions.YdtException;
import static org.onosproject.yms.app.ydt.YdtTestConstants.A;
import static org.onosproject.yms.app.ydt.YdtTestConstants.B;
import static org.onosproject.yms.app.ydt.YdtTestConstants.C;
import static org.onosproject.yms.app.ydt.YdtTestConstants.E;
import static org.onosproject.yms.app.ydt.YdtTestConstants.F;
import static org.onosproject.yms.app.ydt.YdtTestConstants.G;
import static org.onosproject.yms.app.ydt.YdtTestConstants.H;
import static org.onosproject.yms.app.ydt.YdtTestConstants.MAXIWR;
import static org.onosproject.yms.app.ydt.YdtTestConstants.MIDIWR;
import static org.onosproject.yms.app.ydt.YdtTestConstants.MINIWR;
import static org.onosproject.yms.app.ydt.YdtTestConstants.MRV;
import static org.onosproject.yms.app.ydt.YdtTestConstants.NIWMF;
import static org.onosproject.yms.app.ydt.YdtTestConstants.NWF;
import static org.onosproject.yms.app.ydt.YdtTestConstants.PIWMF;
import static org.onosproject.yms.app.ydt.YdtTestConstants.PWF;
import static org.onosproject.yms.app.ydt.YdtTestConstants.TYPE;
import static org.onosproject.yms.app.ydt.YdtTestUtils.decimal64Ydt;
import static org.onosproject.yms.app.ydt.YdtTestUtils.validateLeafContents;
import static org.onosproject.yms.app.ydt.YdtTestUtils.validateNodeContents;
import static org.onosproject.yms.ydt.YdtContextOperationType.MERGE;
public class YdtDecimal64Test {
@Rule
public ExpectedException thrown = ExpectedException.none();
/*
Positive scenario
input at boundary for decimal64 with fraction 2
i. min value
ii. max value
input at boundry for decimal64 with minimum fraction
i. min value
ii. mid value
iii. max value
input at boundry for decimal64 with maximum fraction
i. min value
ii. mid value
iii. max value
input with in range
if range is 10 to 100 for integer
i.1. input 11
i.2. min value 10
i.3. max value 100
input with multi interval range
if range is 10..40 | 50..100 for decimal64
i.1. input 11
i.2. input 10
i.3. input 40
i.4. input 50
i.5. input 55
i.6. input 100
if range is "min .. 3.14 | 10 | 20..max" for decimal64
i.1. input min
i.2. input 2.505
i.3. input 3.14
i.4. input 10
i.5. input 20
i.6. input 92233720368547757
i.7. input 92233720368547758.07
*/
/**
* Creates and validates decimal64 ydt covering different positive scenario.
*/
@Test
public void positiveTest() throws YdtException {
YangRequestWorkBench ydtBuilder = decimal64Ydt();
validateTree(ydtBuilder);
//TODO need to be handled later
// YangRequestWorkBench sbiYdt = validateYangObject(
// ydtBuilder, "builtInType", "ydt.decimal64");
// validateTree(sbiYdt);
}
/**
* Validates the given built ydt.
*/
private void validateTree(YangRequestWorkBench ydtBuilder) {
// assign root node to ydtNode for validating purpose.
YdtNode ydtNode = (YdtNode) ydtBuilder.getRootNode();
// Logical root node does not have operation type
validateNodeContents(ydtNode, TYPE, null);
ydtNode = ydtNode.getFirstChild();
validateNodeContents(ydtNode, "decimal64", MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "negInt", C);
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, "posInt", A);
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, NIWMF, F);
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, PIWMF, G);
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, NWF, H);
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, PWF, E);
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, MIDIWR, "11");
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, MINIWR, "10");
ydtNode = ydtNode.getNextSibling();
validateLeafContents(ydtNode, MAXIWR, "100");
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "decimal", "11");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "decimal", "10");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "decimal", "40");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "decimal", "50");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "decimal", "55");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "decimal", "100");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", C);
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", "2.505");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", "3.14");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", "10");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", "20");
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", B);
ydtNode = ydtNode.getParent();
ydtNode = ydtNode.getNextSibling();
validateNodeContents(ydtNode, MRV, MERGE);
ydtNode = ydtNode.getFirstChild();
validateLeafContents(ydtNode, "revDecimal", A);
}
//TODO negative scenario will be handled later
/*
Negative scenario
input with position 0
input with position 1
input with position 2
*/
// /**
// * Tests all the negative scenario's for bit data type.
// */
// @Test
// public void negativeTest() {
// thrown.expect(IllegalArgumentException.class);
// thrown.expectMessage(E_D64);
// YangRequestWorkBench ydtBuilder;
// ydtBuilder = getYdtBuilder("builtInType", "decimal64", "ydt.decimal64",
// MERGE);
// ydtBuilder.addLeaf("l1", null, "-9.1999999999e17");
// ydtBuilder.traverseToParent();
// }
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.core.ssomgt.model;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.model.ModelWrapper;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* <p>
* This class is a wrapper for {@link AppRole2Employee}.
* </p>
*
* @author trungdk
* @see AppRole2Employee
* @generated
*/
public class AppRole2EmployeeWrapper implements AppRole2Employee,
ModelWrapper<AppRole2Employee> {
public AppRole2EmployeeWrapper(AppRole2Employee appRole2Employee) {
_appRole2Employee = appRole2Employee;
}
@Override
public Class<?> getModelClass() {
return AppRole2Employee.class;
}
@Override
public String getModelClassName() {
return AppRole2Employee.class.getName();
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("appRole2EmployeeId", getAppRole2EmployeeId());
attributes.put("userId", getUserId());
attributes.put("groupId", getGroupId());
attributes.put("companyId", getCompanyId());
attributes.put("createDate", getCreateDate());
attributes.put("appRoleId", getAppRoleId());
attributes.put("employeeId", getEmployeeId());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long appRole2EmployeeId = (Long)attributes.get("appRole2EmployeeId");
if (appRole2EmployeeId != null) {
setAppRole2EmployeeId(appRole2EmployeeId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Long appRoleId = (Long)attributes.get("appRoleId");
if (appRoleId != null) {
setAppRoleId(appRoleId);
}
Long employeeId = (Long)attributes.get("employeeId");
if (employeeId != null) {
setEmployeeId(employeeId);
}
}
/**
* Returns the primary key of this app role2 employee.
*
* @return the primary key of this app role2 employee
*/
@Override
public long getPrimaryKey() {
return _appRole2Employee.getPrimaryKey();
}
/**
* Sets the primary key of this app role2 employee.
*
* @param primaryKey the primary key of this app role2 employee
*/
@Override
public void setPrimaryKey(long primaryKey) {
_appRole2Employee.setPrimaryKey(primaryKey);
}
/**
* Returns the app role2 employee ID of this app role2 employee.
*
* @return the app role2 employee ID of this app role2 employee
*/
@Override
public long getAppRole2EmployeeId() {
return _appRole2Employee.getAppRole2EmployeeId();
}
/**
* Sets the app role2 employee ID of this app role2 employee.
*
* @param appRole2EmployeeId the app role2 employee ID of this app role2 employee
*/
@Override
public void setAppRole2EmployeeId(long appRole2EmployeeId) {
_appRole2Employee.setAppRole2EmployeeId(appRole2EmployeeId);
}
/**
* Returns the user ID of this app role2 employee.
*
* @return the user ID of this app role2 employee
*/
@Override
public long getUserId() {
return _appRole2Employee.getUserId();
}
/**
* Sets the user ID of this app role2 employee.
*
* @param userId the user ID of this app role2 employee
*/
@Override
public void setUserId(long userId) {
_appRole2Employee.setUserId(userId);
}
/**
* Returns the user uuid of this app role2 employee.
*
* @return the user uuid of this app role2 employee
* @throws SystemException if a system exception occurred
*/
@Override
public java.lang.String getUserUuid()
throws com.liferay.portal.kernel.exception.SystemException {
return _appRole2Employee.getUserUuid();
}
/**
* Sets the user uuid of this app role2 employee.
*
* @param userUuid the user uuid of this app role2 employee
*/
@Override
public void setUserUuid(java.lang.String userUuid) {
_appRole2Employee.setUserUuid(userUuid);
}
/**
* Returns the group ID of this app role2 employee.
*
* @return the group ID of this app role2 employee
*/
@Override
public long getGroupId() {
return _appRole2Employee.getGroupId();
}
/**
* Sets the group ID of this app role2 employee.
*
* @param groupId the group ID of this app role2 employee
*/
@Override
public void setGroupId(long groupId) {
_appRole2Employee.setGroupId(groupId);
}
/**
* Returns the company ID of this app role2 employee.
*
* @return the company ID of this app role2 employee
*/
@Override
public long getCompanyId() {
return _appRole2Employee.getCompanyId();
}
/**
* Sets the company ID of this app role2 employee.
*
* @param companyId the company ID of this app role2 employee
*/
@Override
public void setCompanyId(long companyId) {
_appRole2Employee.setCompanyId(companyId);
}
/**
* Returns the create date of this app role2 employee.
*
* @return the create date of this app role2 employee
*/
@Override
public java.util.Date getCreateDate() {
return _appRole2Employee.getCreateDate();
}
/**
* Sets the create date of this app role2 employee.
*
* @param createDate the create date of this app role2 employee
*/
@Override
public void setCreateDate(java.util.Date createDate) {
_appRole2Employee.setCreateDate(createDate);
}
/**
* Returns the app role ID of this app role2 employee.
*
* @return the app role ID of this app role2 employee
*/
@Override
public long getAppRoleId() {
return _appRole2Employee.getAppRoleId();
}
/**
* Sets the app role ID of this app role2 employee.
*
* @param appRoleId the app role ID of this app role2 employee
*/
@Override
public void setAppRoleId(long appRoleId) {
_appRole2Employee.setAppRoleId(appRoleId);
}
/**
* Returns the employee ID of this app role2 employee.
*
* @return the employee ID of this app role2 employee
*/
@Override
public long getEmployeeId() {
return _appRole2Employee.getEmployeeId();
}
/**
* Sets the employee ID of this app role2 employee.
*
* @param employeeId the employee ID of this app role2 employee
*/
@Override
public void setEmployeeId(long employeeId) {
_appRole2Employee.setEmployeeId(employeeId);
}
@Override
public boolean isNew() {
return _appRole2Employee.isNew();
}
@Override
public void setNew(boolean n) {
_appRole2Employee.setNew(n);
}
@Override
public boolean isCachedModel() {
return _appRole2Employee.isCachedModel();
}
@Override
public void setCachedModel(boolean cachedModel) {
_appRole2Employee.setCachedModel(cachedModel);
}
@Override
public boolean isEscapedModel() {
return _appRole2Employee.isEscapedModel();
}
@Override
public java.io.Serializable getPrimaryKeyObj() {
return _appRole2Employee.getPrimaryKeyObj();
}
@Override
public void setPrimaryKeyObj(java.io.Serializable primaryKeyObj) {
_appRole2Employee.setPrimaryKeyObj(primaryKeyObj);
}
@Override
public com.liferay.portlet.expando.model.ExpandoBridge getExpandoBridge() {
return _appRole2Employee.getExpandoBridge();
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portal.model.BaseModel<?> baseModel) {
_appRole2Employee.setExpandoBridgeAttributes(baseModel);
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portlet.expando.model.ExpandoBridge expandoBridge) {
_appRole2Employee.setExpandoBridgeAttributes(expandoBridge);
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portal.service.ServiceContext serviceContext) {
_appRole2Employee.setExpandoBridgeAttributes(serviceContext);
}
@Override
public java.lang.Object clone() {
return new AppRole2EmployeeWrapper((AppRole2Employee)_appRole2Employee.clone());
}
@Override
public int compareTo(
org.oep.core.ssomgt.model.AppRole2Employee appRole2Employee) {
return _appRole2Employee.compareTo(appRole2Employee);
}
@Override
public int hashCode() {
return _appRole2Employee.hashCode();
}
@Override
public com.liferay.portal.model.CacheModel<org.oep.core.ssomgt.model.AppRole2Employee> toCacheModel() {
return _appRole2Employee.toCacheModel();
}
@Override
public org.oep.core.ssomgt.model.AppRole2Employee toEscapedModel() {
return new AppRole2EmployeeWrapper(_appRole2Employee.toEscapedModel());
}
@Override
public org.oep.core.ssomgt.model.AppRole2Employee toUnescapedModel() {
return new AppRole2EmployeeWrapper(_appRole2Employee.toUnescapedModel());
}
@Override
public java.lang.String toString() {
return _appRole2Employee.toString();
}
@Override
public java.lang.String toXmlString() {
return _appRole2Employee.toXmlString();
}
@Override
public void persist()
throws com.liferay.portal.kernel.exception.SystemException {
_appRole2Employee.persist();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof AppRole2EmployeeWrapper)) {
return false;
}
AppRole2EmployeeWrapper appRole2EmployeeWrapper = (AppRole2EmployeeWrapper)obj;
if (Validator.equals(_appRole2Employee,
appRole2EmployeeWrapper._appRole2Employee)) {
return true;
}
return false;
}
/**
* @deprecated As of 6.1.0, replaced by {@link #getWrappedModel}
*/
public AppRole2Employee getWrappedAppRole2Employee() {
return _appRole2Employee;
}
@Override
public AppRole2Employee getWrappedModel() {
return _appRole2Employee;
}
@Override
public void resetOriginalValues() {
_appRole2Employee.resetOriginalValues();
}
private AppRole2Employee _appRole2Employee;
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.applicationhistoryservice;
import java.io.IOException;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerReport;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.metrics.AppAttemptMetricsConstants;
import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
import org.apache.hadoop.yarn.server.metrics.ContainerMetricsConstants;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.timeline.NameValuePair;
import org.apache.hadoop.yarn.server.timeline.TimelineDataManager;
import org.apache.hadoop.yarn.server.timeline.TimelineReader.Field;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ApplicationHistoryManagerOnTimelineStore extends AbstractService
implements
ApplicationHistoryManager {
private static final Logger LOG = LoggerFactory
.getLogger(ApplicationHistoryManagerOnTimelineStore.class);
@VisibleForTesting
static final String UNAVAILABLE = "N/A";
private TimelineDataManager timelineDataManager;
private ApplicationACLsManager aclsManager;
private String serverHttpAddress;
private long maxLoadedApplications;
public ApplicationHistoryManagerOnTimelineStore(
TimelineDataManager timelineDataManager,
ApplicationACLsManager aclsManager) {
super(ApplicationHistoryManagerOnTimelineStore.class.getName());
this.timelineDataManager = timelineDataManager;
this.aclsManager = aclsManager;
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
serverHttpAddress = WebAppUtils.getHttpSchemePrefix(conf) +
WebAppUtils.getAHSWebAppURLWithoutScheme(conf);
maxLoadedApplications =
conf.getLong(YarnConfiguration.APPLICATION_HISTORY_MAX_APPS,
YarnConfiguration.DEFAULT_APPLICATION_HISTORY_MAX_APPS);
super.serviceInit(conf);
}
@Override
public ApplicationReport getApplication(ApplicationId appId)
throws YarnException, IOException {
return getApplication(appId, ApplicationReportField.ALL).appReport;
}
@Override
public Map<ApplicationId, ApplicationReport> getApplications(long appsNum,
long appStartedTimeBegin, long appStartedTimeEnd) throws YarnException,
IOException {
TimelineEntities entities =
timelineDataManager.getEntities(
ApplicationMetricsConstants.ENTITY_TYPE, null, null,
appStartedTimeBegin, appStartedTimeEnd, null, null,
appsNum == Long.MAX_VALUE ? this.maxLoadedApplications : appsNum,
EnumSet.allOf(Field.class), UserGroupInformation.getLoginUser());
Map<ApplicationId, ApplicationReport> apps =
new LinkedHashMap<ApplicationId, ApplicationReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
try {
ApplicationReportExt app =
generateApplicationReport(entity, ApplicationReportField.ALL);
apps.put(app.appReport.getApplicationId(), app.appReport);
} catch (Exception e) {
LOG.error("Error on generating application report for " +
entity.getEntityId(), e);
}
}
}
return apps;
}
@Override
public Map<ApplicationAttemptId, ApplicationAttemptReport>
getApplicationAttempts(ApplicationId appId)
throws YarnException, IOException {
ApplicationReportExt app = getApplication(
appId, ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(
AppAttemptMetricsConstants.ENTITY_TYPE,
new NameValuePair(
AppAttemptMetricsConstants.PARENT_PRIMARY_FILTER, appId
.toString()), null, null, null, null, null,
Long.MAX_VALUE, EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
Map<ApplicationAttemptId, ApplicationAttemptReport> appAttempts =
new LinkedHashMap<ApplicationAttemptId, ApplicationAttemptReport>();
for (TimelineEntity entity : entities.getEntities()) {
ApplicationAttemptReport appAttempt =
convertToApplicationAttemptReport(entity);
appAttempts.put(appAttempt.getApplicationAttemptId(), appAttempt);
}
return appAttempts;
}
@Override
public ApplicationAttemptReport getApplicationAttempt(
ApplicationAttemptId appAttemptId) throws YarnException, IOException {
return getApplicationAttempt(appAttemptId, true);
}
private ApplicationAttemptReport getApplicationAttempt(
ApplicationAttemptId appAttemptId, boolean checkACLs)
throws YarnException, IOException {
if (checkACLs) {
ApplicationReportExt app = getApplication(
appAttemptId.getApplicationId(),
ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
}
TimelineEntity entity = timelineDataManager.getEntity(
AppAttemptMetricsConstants.ENTITY_TYPE,
appAttemptId.toString(), EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
if (entity == null) {
throw new ApplicationAttemptNotFoundException(
"The entity for application attempt " + appAttemptId +
" doesn't exist in the timeline store");
} else {
return convertToApplicationAttemptReport(entity);
}
}
@Override
public ContainerReport getContainer(ContainerId containerId)
throws YarnException, IOException {
ApplicationReportExt app = getApplication(
containerId.getApplicationAttemptId().getApplicationId(),
ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntity entity = timelineDataManager.getEntity(
ContainerMetricsConstants.ENTITY_TYPE,
containerId.toString(), EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
if (entity == null) {
throw new ContainerNotFoundException(
"The entity for container " + containerId +
" doesn't exist in the timeline store");
} else {
return convertToContainerReport(
entity, serverHttpAddress, app.appReport.getUser());
}
}
@Override
public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId)
throws YarnException, IOException {
ApplicationAttemptReport appAttempt =
getApplicationAttempt(appAttemptId, false);
return getContainer(appAttempt.getAMContainerId());
}
@Override
public Map<ContainerId, ContainerReport> getContainers(
ApplicationAttemptId appAttemptId) throws YarnException, IOException {
ApplicationReportExt app = getApplication(
appAttemptId.getApplicationId(), ApplicationReportField.USER_AND_ACLS);
checkAccess(app);
TimelineEntities entities = timelineDataManager.getEntities(
ContainerMetricsConstants.ENTITY_TYPE,
new NameValuePair(
ContainerMetricsConstants.PARENT_PRIMARIY_FILTER,
appAttemptId.toString()), null, null, null,
null, null, Long.MAX_VALUE, EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
Map<ContainerId, ContainerReport> containers =
new LinkedHashMap<ContainerId, ContainerReport>();
if (entities != null && entities.getEntities() != null) {
for (TimelineEntity entity : entities.getEntities()) {
ContainerReport container = convertToContainerReport(
entity, serverHttpAddress, app.appReport.getUser());
containers.put(container.getContainerId(), container);
}
}
return containers;
}
private static ApplicationReportExt convertToApplicationReport(
TimelineEntity entity, ApplicationReportField field) {
String user = null;
String queue = null;
String name = null;
String type = null;
boolean unmanagedApplication = false;
long createdTime = 0;
long finishedTime = 0;
float progress = 0.0f;
int applicationPriority = 0;
ApplicationAttemptId latestApplicationAttemptId = null;
String diagnosticsInfo = null;
FinalApplicationStatus finalStatus = FinalApplicationStatus.UNDEFINED;
YarnApplicationState state = YarnApplicationState.ACCEPTED;
ApplicationResourceUsageReport appResources = null;
Set<String> appTags = null;
Map<ApplicationAccessType, String> appViewACLs =
new HashMap<ApplicationAccessType, String>();
String appNodeLabelExpression = null;
String amNodeLabelExpression = null;
Map<String, Object> entityInfo = entity.getOtherInfo();
if (entityInfo != null) {
if (entityInfo.containsKey(ApplicationMetricsConstants.USER_ENTITY_INFO)) {
user =
entityInfo.get(ApplicationMetricsConstants.USER_ENTITY_INFO)
.toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO)) {
String appViewACLsStr = entityInfo.get(
ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO).toString();
if (appViewACLsStr.length() > 0) {
appViewACLs.put(ApplicationAccessType.VIEW_APP, appViewACLsStr);
}
}
if (field == ApplicationReportField.USER_AND_ACLS) {
return new ApplicationReportExt(ApplicationReport.newInstance(
ApplicationId.fromString(entity.getEntityId()),
latestApplicationAttemptId, user, queue, name, null, -1, null,
state, diagnosticsInfo, null, createdTime, finishedTime,
finalStatus, null, null, progress, type, null, appTags,
unmanagedApplication, Priority.newInstance(applicationPriority),
appNodeLabelExpression, amNodeLabelExpression), appViewACLs);
}
if (entityInfo.containsKey(ApplicationMetricsConstants.QUEUE_ENTITY_INFO)) {
queue =
entityInfo.get(ApplicationMetricsConstants.QUEUE_ENTITY_INFO)
.toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.NAME_ENTITY_INFO)) {
name =
entityInfo.get(ApplicationMetricsConstants.NAME_ENTITY_INFO)
.toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.TYPE_ENTITY_INFO)) {
type =
entityInfo.get(ApplicationMetricsConstants.TYPE_ENTITY_INFO)
.toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.TYPE_ENTITY_INFO)) {
type =
entityInfo.get(ApplicationMetricsConstants.TYPE_ENTITY_INFO)
.toString();
}
if (entityInfo
.containsKey(ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO)) {
unmanagedApplication =
Boolean.parseBoolean(entityInfo.get(
ApplicationMetricsConstants.UNMANAGED_APPLICATION_ENTITY_INFO)
.toString());
}
if (entityInfo
.containsKey(ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO)) {
applicationPriority = Integer.parseInt(entityInfo.get(
ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO).toString());
}
if (entityInfo
.containsKey(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION)) {
appNodeLabelExpression = entityInfo
.get(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION).toString();
}
if (entityInfo
.containsKey(ApplicationMetricsConstants.AM_NODE_LABEL_EXPRESSION)) {
amNodeLabelExpression =
entityInfo.get(ApplicationMetricsConstants.AM_NODE_LABEL_EXPRESSION)
.toString();
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_CPU_METRICS)) {
long vcoreSeconds = parseLong(entityInfo,
ApplicationMetricsConstants.APP_CPU_METRICS);
long memorySeconds = parseLong(entityInfo,
ApplicationMetricsConstants.APP_MEM_METRICS);
long preemptedMemorySeconds = parseLong(entityInfo,
ApplicationMetricsConstants.APP_MEM_PREEMPT_METRICS);
long preemptedVcoreSeconds = parseLong(entityInfo,
ApplicationMetricsConstants.APP_CPU_PREEMPT_METRICS);
appResources = ApplicationResourceUsageReport.newInstance(0, 0, null,
null, null, memorySeconds, vcoreSeconds, 0, 0,
preemptedMemorySeconds, preemptedVcoreSeconds);
}
if (entityInfo.containsKey(ApplicationMetricsConstants.APP_TAGS_INFO)) {
appTags = new HashSet<String>();
Object obj = entityInfo.get(ApplicationMetricsConstants.APP_TAGS_INFO);
if (obj != null && obj instanceof Collection<?>) {
for(Object o : (Collection<?>)obj) {
if (o != null) {
appTags.add(o.toString());
}
}
}
}
}
List<TimelineEvent> events = entity.getEvents();
long updatedTimeStamp = 0L;
if (events != null) {
for (TimelineEvent event : events) {
if (event.getEventType().equals(
ApplicationMetricsConstants.CREATED_EVENT_TYPE)) {
createdTime = event.getTimestamp();
} else if (event.getEventType().equals(
ApplicationMetricsConstants.UPDATED_EVENT_TYPE)) {
// This type of events are parsed in time-stamp descending order
// which means the previous event could override the information
// from the later same type of event. Hence compare timestamp
// before over writing.
if (event.getTimestamp() > updatedTimeStamp) {
updatedTimeStamp = event.getTimestamp();
} else {
continue;
}
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
applicationPriority = Integer
.parseInt(eventInfo.get(
ApplicationMetricsConstants.APPLICATION_PRIORITY_INFO)
.toString());
queue = eventInfo.get(ApplicationMetricsConstants.QUEUE_ENTITY_INFO)
.toString();
} else if (event.getEventType().equals(
ApplicationMetricsConstants.STATE_UPDATED_EVENT_TYPE)) {
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo.containsKey(
ApplicationMetricsConstants.STATE_EVENT_INFO)) {
if (!isFinalState(state)) {
state = YarnApplicationState.valueOf(eventInfo.get(
ApplicationMetricsConstants.STATE_EVENT_INFO).toString());
}
}
} else if (event.getEventType().equals(
ApplicationMetricsConstants.FINISHED_EVENT_TYPE)) {
progress=1.0F;
finishedTime = event.getTimestamp();
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo
.containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) {
latestApplicationAttemptId = ApplicationAttemptId.fromString(
eventInfo.get(
ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)
.toString());
}
if (eventInfo
.containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) {
diagnosticsInfo =
eventInfo.get(
ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)
.toString();
}
if (eventInfo
.containsKey(ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO)) {
finalStatus =
FinalApplicationStatus.valueOf(eventInfo.get(
ApplicationMetricsConstants.FINAL_STATUS_EVENT_INFO)
.toString());
}
if (eventInfo
.containsKey(ApplicationMetricsConstants.STATE_EVENT_INFO)) {
state =
YarnApplicationState.valueOf(eventInfo.get(
ApplicationMetricsConstants.STATE_EVENT_INFO).toString());
}
}
}
}
return new ApplicationReportExt(ApplicationReport.newInstance(
ApplicationId.fromString(entity.getEntityId()),
latestApplicationAttemptId, user, queue, name, null, -1, null, state,
diagnosticsInfo, null, createdTime, finishedTime, finalStatus,
appResources, null, progress, type, null, appTags, unmanagedApplication,
Priority.newInstance(applicationPriority), appNodeLabelExpression,
amNodeLabelExpression), appViewACLs);
}
private static long parseLong(Map<String, Object> entityInfo,
String infoKey) {
long result = 0;
Object infoValue = entityInfo.get(infoKey);
if (infoValue != null) {
result = Long.parseLong(infoValue.toString());
}
return result;
}
private static boolean isFinalState(YarnApplicationState state) {
return state == YarnApplicationState.FINISHED
|| state == YarnApplicationState.FAILED
|| state == YarnApplicationState.KILLED;
}
private static ApplicationAttemptReport convertToApplicationAttemptReport(
TimelineEntity entity) {
String host = null;
int rpcPort = -1;
ContainerId amContainerId = null;
String trackingUrl = null;
String originalTrackingUrl = null;
String diagnosticsInfo = null;
YarnApplicationAttemptState state = null;
List<TimelineEvent> events = entity.getEvents();
if (events != null) {
for (TimelineEvent event : events) {
if (event.getEventType().equals(
AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE)) {
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo.containsKey(AppAttemptMetricsConstants.HOST_INFO)) {
host =
eventInfo.get(AppAttemptMetricsConstants.HOST_INFO)
.toString();
}
if (eventInfo
.containsKey(AppAttemptMetricsConstants.RPC_PORT_INFO)) {
rpcPort = (Integer) eventInfo.get(
AppAttemptMetricsConstants.RPC_PORT_INFO);
}
if (eventInfo
.containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO)) {
amContainerId =
ContainerId.fromString(eventInfo.get(
AppAttemptMetricsConstants.MASTER_CONTAINER_INFO)
.toString());
}
} else if (event.getEventType().equals(
AppAttemptMetricsConstants.FINISHED_EVENT_TYPE)) {
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo
.containsKey(AppAttemptMetricsConstants.TRACKING_URL_INFO)) {
trackingUrl =
eventInfo.get(
AppAttemptMetricsConstants.TRACKING_URL_INFO)
.toString();
}
if (eventInfo
.containsKey(
AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO)) {
originalTrackingUrl =
eventInfo
.get(
AppAttemptMetricsConstants.ORIGINAL_TRACKING_URL_INFO)
.toString();
}
if (eventInfo
.containsKey(AppAttemptMetricsConstants.DIAGNOSTICS_INFO)) {
diagnosticsInfo =
eventInfo.get(
AppAttemptMetricsConstants.DIAGNOSTICS_INFO)
.toString();
}
if (eventInfo
.containsKey(AppAttemptMetricsConstants.STATE_INFO)) {
state =
YarnApplicationAttemptState.valueOf(eventInfo.get(
AppAttemptMetricsConstants.STATE_INFO)
.toString());
}
if (eventInfo
.containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_INFO)) {
amContainerId =
ContainerId.fromString(eventInfo.get(
AppAttemptMetricsConstants.MASTER_CONTAINER_INFO)
.toString());
}
}
}
}
return ApplicationAttemptReport.newInstance(
ApplicationAttemptId.fromString(entity.getEntityId()),
host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo,
state, amContainerId);
}
private static ContainerReport convertToContainerReport(
TimelineEntity entity, String serverHttpAddress, String user) {
int allocatedMem = 0;
int allocatedVcore = 0;
String allocatedHost = null;
int allocatedPort = -1;
int allocatedPriority = 0;
long createdTime = 0;
long finishedTime = 0;
String diagnosticsInfo = null;
int exitStatus = ContainerExitStatus.INVALID;
ContainerState state = null;
String nodeHttpAddress = null;
Map<String, Object> entityInfo = entity.getOtherInfo();
if (entityInfo != null) {
if (entityInfo
.containsKey(ContainerMetricsConstants.ALLOCATED_MEMORY_INFO)) {
allocatedMem = (Integer) entityInfo.get(
ContainerMetricsConstants.ALLOCATED_MEMORY_INFO);
}
if (entityInfo
.containsKey(ContainerMetricsConstants.ALLOCATED_VCORE_INFO)) {
allocatedVcore = (Integer) entityInfo.get(
ContainerMetricsConstants.ALLOCATED_VCORE_INFO);
}
if (entityInfo
.containsKey(ContainerMetricsConstants.ALLOCATED_HOST_INFO)) {
allocatedHost =
entityInfo
.get(ContainerMetricsConstants.ALLOCATED_HOST_INFO)
.toString();
}
if (entityInfo
.containsKey(ContainerMetricsConstants.ALLOCATED_PORT_INFO)) {
allocatedPort = (Integer) entityInfo.get(
ContainerMetricsConstants.ALLOCATED_PORT_INFO);
}
if (entityInfo
.containsKey(ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO)) {
allocatedPriority = (Integer) entityInfo.get(
ContainerMetricsConstants.ALLOCATED_PRIORITY_INFO);
}
if (entityInfo.containsKey(
ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO)) {
nodeHttpAddress =
(String) entityInfo
.get(ContainerMetricsConstants.ALLOCATED_HOST_HTTP_ADDRESS_INFO);
}
}
List<TimelineEvent> events = entity.getEvents();
if (events != null) {
for (TimelineEvent event : events) {
if (event.getEventType().equals(
ContainerMetricsConstants.CREATED_EVENT_TYPE)) {
createdTime = event.getTimestamp();
} else if (event.getEventType().equals(
ContainerMetricsConstants.FINISHED_EVENT_TYPE)) {
finishedTime = event.getTimestamp();
Map<String, Object> eventInfo = event.getEventInfo();
if (eventInfo == null) {
continue;
}
if (eventInfo
.containsKey(ContainerMetricsConstants.DIAGNOSTICS_INFO)) {
diagnosticsInfo =
eventInfo.get(
ContainerMetricsConstants.DIAGNOSTICS_INFO)
.toString();
}
if (eventInfo
.containsKey(ContainerMetricsConstants.EXIT_STATUS_INFO)) {
exitStatus = (Integer) eventInfo.get(
ContainerMetricsConstants.EXIT_STATUS_INFO);
}
if (eventInfo
.containsKey(ContainerMetricsConstants.STATE_INFO)) {
state =
ContainerState.valueOf(eventInfo.get(
ContainerMetricsConstants.STATE_INFO).toString());
}
}
}
}
ContainerId containerId =
ContainerId.fromString(entity.getEntityId());
String logUrl = null;
NodeId allocatedNode = null;
if (allocatedHost != null) {
allocatedNode = NodeId.newInstance(allocatedHost, allocatedPort);
logUrl = WebAppUtils.getAggregatedLogURL(
serverHttpAddress,
allocatedNode.toString(),
containerId.toString(),
containerId.toString(),
user);
}
return ContainerReport.newInstance(
ContainerId.fromString(entity.getEntityId()),
Resource.newInstance(allocatedMem, allocatedVcore), allocatedNode,
Priority.newInstance(allocatedPriority),
createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state,
nodeHttpAddress);
}
private ApplicationReportExt generateApplicationReport(TimelineEntity entity,
ApplicationReportField field) throws YarnException, IOException {
ApplicationReportExt app = convertToApplicationReport(entity, field);
// If only user and acls are pulled to check attempt(s)/container(s) access
// control, we can return immediately
if (field == ApplicationReportField.USER_AND_ACLS) {
return app;
}
try {
checkAccess(app);
if (app.appReport.getCurrentApplicationAttemptId() != null) {
ApplicationAttemptReport appAttempt = getApplicationAttempt(
app.appReport.getCurrentApplicationAttemptId(), false);
app.appReport.setHost(appAttempt.getHost());
app.appReport.setRpcPort(appAttempt.getRpcPort());
app.appReport.setTrackingUrl(appAttempt.getTrackingUrl());
app.appReport.setOriginalTrackingUrl(appAttempt.getOriginalTrackingUrl());
}
} catch (AuthorizationException | ApplicationAttemptNotFoundException e) {
// AuthorizationException is thrown because the user doesn't have access
if (e instanceof AuthorizationException) {
LOG.warn("Failed to authorize when generating application report for "
+ app.appReport.getApplicationId()
+ ". Use a placeholder for its latest attempt id. ", e);
} else { // Attempt not found
LOG.info("No application attempt found for "
+ app.appReport.getApplicationId()
+ ". Use a placeholder for its latest attempt id. ", e);
}
// It's possible that the app is finished before the first attempt is created.
app.appReport.setDiagnostics(null);
app.appReport.setCurrentApplicationAttemptId(null);
}
if (app.appReport.getCurrentApplicationAttemptId() == null) {
app.appReport.setCurrentApplicationAttemptId(
ApplicationAttemptId.newInstance(app.appReport.getApplicationId(), -1));
}
if (app.appReport.getHost() == null) {
app.appReport.setHost(UNAVAILABLE);
}
if (app.appReport.getRpcPort() < 0) {
app.appReport.setRpcPort(-1);
}
if (app.appReport.getTrackingUrl() == null) {
app.appReport.setTrackingUrl(UNAVAILABLE);
}
if (app.appReport.getOriginalTrackingUrl() == null) {
app.appReport.setOriginalTrackingUrl(UNAVAILABLE);
}
if (app.appReport.getDiagnostics() == null) {
app.appReport.setDiagnostics("");
}
return app;
}
private ApplicationReportExt getApplication(ApplicationId appId,
ApplicationReportField field) throws YarnException, IOException {
TimelineEntity entity = timelineDataManager.getEntity(
ApplicationMetricsConstants.ENTITY_TYPE,
appId.toString(), EnumSet.allOf(Field.class),
UserGroupInformation.getLoginUser());
if (entity == null) {
throw new ApplicationNotFoundException("The entity for application " +
appId + " doesn't exist in the timeline store");
} else {
return generateApplicationReport(entity, field);
}
}
private void checkAccess(ApplicationReportExt app)
throws YarnException, IOException {
if (app.appViewACLs != null) {
aclsManager.addApplication(
app.appReport.getApplicationId(), app.appViewACLs);
try {
if (!aclsManager.checkAccess(UserGroupInformation.getCurrentUser(),
ApplicationAccessType.VIEW_APP, app.appReport.getUser(),
app.appReport.getApplicationId())) {
throw new AuthorizationException("User "
+ UserGroupInformation.getCurrentUser().getShortUserName()
+ " does not have privilege to see this application "
+ app.appReport.getApplicationId());
}
} finally {
aclsManager.removeApplication(app.appReport.getApplicationId());
}
}
}
private enum ApplicationReportField {
ALL, // retrieve all the fields
USER_AND_ACLS // retrieve user and ACLs info only
}
private static class ApplicationReportExt {
private ApplicationReport appReport;
private Map<ApplicationAccessType, String> appViewACLs;
public ApplicationReportExt(
ApplicationReport appReport,
Map<ApplicationAccessType, String> appViewACLs) {
this.appReport = appReport;
this.appViewACLs = appViewACLs;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.*;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.MinimumScoreCollector;
import org.elasticsearch.common.lucene.MultiCollector;
import org.elasticsearch.common.lucene.search.FilteredCollector;
import org.elasticsearch.common.lucene.search.XCollector;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.search.dfs.CachedDfSource;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Context-aware extension of {@link IndexSearcher}.
*/
public class ContextIndexSearcher extends IndexSearcher implements Releasable {
public static enum Stage {
NA,
MAIN_QUERY
}
/** The wrapped {@link IndexSearcher}. The reason why we sometimes prefer delegating to this searcher instead of <tt>super</tt> is that
* this instance may have more assertions, for example if it comes from MockInternalEngine which wraps the IndexSearcher into an
* AssertingIndexSearcher. */
private final IndexSearcher in;
private final SearchContext searchContext;
private CachedDfSource dfSource;
private List<Collector> queryCollectors;
private Stage currentState = Stage.NA;
private boolean enableMainDocIdSetCollector;
private DocIdSetCollector mainDocIdSetCollector;
public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
super(searcher.reader());
in = searcher.searcher();
this.searchContext = searchContext;
setSimilarity(searcher.searcher().getSimilarity());
}
@Override
public void close() {
Releasables.close(mainDocIdSetCollector);
}
public void dfSource(CachedDfSource dfSource) {
this.dfSource = dfSource;
}
/**
* Adds a query level collector that runs at {@link Stage#MAIN_QUERY}. Note, supports
* {@link org.elasticsearch.common.lucene.search.XCollector} allowing for a callback
* when collection is done.
*/
public void addMainQueryCollector(Collector collector) {
if (queryCollectors == null) {
queryCollectors = new ArrayList<>();
}
queryCollectors.add(collector);
}
public DocIdSetCollector mainDocIdSetCollector() {
return this.mainDocIdSetCollector;
}
public void enableMainDocIdSetCollector() {
this.enableMainDocIdSetCollector = true;
}
public void inStage(Stage stage) {
this.currentState = stage;
}
public void finishStage(Stage stage) {
assert currentState == stage : "Expected stage " + stage + " but was stage " + currentState;
this.currentState = Stage.NA;
}
@Override
public Query rewrite(Query original) throws IOException {
if (original == searchContext.query() || original == searchContext.parsedQuery().query()) {
// optimize in case its the top level search query and we already rewrote it...
if (searchContext.queryRewritten()) {
return searchContext.query();
}
Query rewriteQuery = in.rewrite(original);
searchContext.updateRewriteQuery(rewriteQuery);
return rewriteQuery;
} else {
return in.rewrite(original);
}
}
@Override
public Weight createNormalizedWeight(Query query) throws IOException {
try {
// if its the main query, use we have dfs data, only then do it
if (dfSource != null && (query == searchContext.query() || query == searchContext.parsedQuery().query())) {
return dfSource.createNormalizedWeight(query);
}
return in.createNormalizedWeight(query);
} catch (Throwable t) {
searchContext.clearReleasables(Lifetime.COLLECTION);
throw new RuntimeException(t);
}
}
@Override
public void search(List<AtomicReaderContext> leaves, Weight weight, Collector collector) throws IOException {
if (searchContext.timeoutInMillis() != -1) {
// TODO: change to use our own counter that uses the scheduler in ThreadPool
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), searchContext.timeoutInMillis());
}
if (currentState == Stage.MAIN_QUERY) {
if (enableMainDocIdSetCollector) {
// TODO should we create a cache of segment->docIdSets so we won't create one each time?
collector = this.mainDocIdSetCollector = new DocIdSetCollector(searchContext.docSetCache(), collector);
}
if (searchContext.parsedPostFilter() != null) {
// this will only get applied to the actual search collector and not
// to any scoped collectors, also, it will only be applied to the main collector
// since that is where the filter should only work
collector = new FilteredCollector(collector, searchContext.parsedPostFilter().filter());
}
if (queryCollectors != null && !queryCollectors.isEmpty()) {
collector = new MultiCollector(collector, queryCollectors.toArray(new Collector[queryCollectors.size()]));
}
// apply the minimum score after multi collector so we filter facets as well
if (searchContext.minimumScore() != null) {
collector = new MinimumScoreCollector(collector, searchContext.minimumScore());
}
}
// we only compute the doc id set once since within a context, we execute the same query always...
try {
if (searchContext.timeoutInMillis() != -1) {
try {
super.search(leaves, weight, collector);
} catch (TimeLimitingCollector.TimeExceededException e) {
searchContext.queryResult().searchTimedOut(true);
}
} else {
super.search(leaves, weight, collector);
}
if (currentState == Stage.MAIN_QUERY) {
if (enableMainDocIdSetCollector) {
enableMainDocIdSetCollector = false;
mainDocIdSetCollector.postCollection();
}
if (queryCollectors != null && !queryCollectors.isEmpty()) {
for (Collector queryCollector : queryCollectors) {
if (queryCollector instanceof XCollector) {
((XCollector) queryCollector).postCollection();
}
}
}
}
} finally {
searchContext.clearReleasables(Lifetime.COLLECTION);
}
}
@Override
public Explanation explain(Query query, int doc) throws IOException {
try {
if (searchContext.aliasFilter() == null) {
return super.explain(query, doc);
}
XFilteredQuery filteredQuery = new XFilteredQuery(query, searchContext.aliasFilter());
return super.explain(filteredQuery, doc);
} finally {
searchContext.clearReleasables(Lifetime.COLLECTION);
}
}
}
| |
/*
* Copyright 2012-2014 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENSE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package nl.esciencecenter.xnattool.ui;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextArea;
import javax.swing.SwingUtilities;
import javax.swing.border.BevelBorder;
import nl.esciencecenter.ptk.ui.widgets.LocationSelectionField;
import nl.esciencecenter.ptk.ui.widgets.LocationSelectionField.LocationType;
import nl.esciencecenter.xnattool.ui.UIUtil.UISettings;
import nl.esciencecenter.xnattool.ui.UIUtil.UISettings.UIType;
import com.jgoodies.forms.factories.FormFactory;
import com.jgoodies.forms.layout.ColumnSpec;
import com.jgoodies.forms.layout.FormLayout;
import com.jgoodies.forms.layout.RowSpec;
public class ConfigDialog extends javax.swing.JDialog
{
private static final long serialVersionUID = -5558298493342571756L;
public static ConfigDialog createConfigDialog(JFrame frame, XnatToolPanelController mainController, boolean firstRun)
{
ConfigDialog dialog = new ConfigDialog(frame, mainController, firstRun);
// dialog.pack();
// dialog.setLocationRelativeTo(frame);
// dialog.setVisible(true);
return dialog;
}
// ===
public String firstRunText = "Important: Before you start the tool please check the settings below.\n\n";
public String configDialogText =
"Specify DataSet Configuration directory and Image Cache Dir.\n"
+ "The DataSet Configuration directory stores your DataSet profiles and identity mappings.\n"
+ "The Image Cache directory holds temporary processed dicom images, which after uploading can be deleted.\n"
+ "If the directories below do not exist yet, please create them by pressing the 'Create' button.\n";
// ===
private JPanel MainPanel;
private JPanel topPanel;
private JLabel dataSetConfigLbl;
protected JButton createImageCacheDirBut;
protected JButton createDatasetsConfigDirBut;
private JButton okBut;
protected JButton clearCacheBut;
private LocationSelectionField imageCacheDirTF;
private JLabel imageCacheDirectoryLbl;
private JButton cancelBut;
private JPanel buttonPanel;
private LocationSelectionField datasetConfigDirTF;
private JTextArea helpTextArea;
private ConfigDialogController controller;
private XnatToolPanelController mainController;
private boolean firstRun = false;
protected JCheckBox keepProcessedDicomCB;
protected JCheckBox autoCreateIdMappingsCB;
protected JCheckBox autoExtractMetaDataCB;
/**
* Auto-generated main method to display this JDialog
*/
public static void main(String[] args)
{
SwingUtilities.invokeLater(new Runnable()
{
public void run()
{
JFrame frame = new JFrame();
ConfigDialog inst = new ConfigDialog(frame);
inst.setVisible(true);
}
});
}
/**
* @wbp.parser.constructor
*/
public ConfigDialog(JFrame frame)
{
super(frame);
initGUI();
}
public ConfigDialog(JFrame frame, XnatToolPanelController mainController, boolean firstRun)
{
super(frame);
this.mainController = mainController;
this.firstRun = firstRun;
initGUI();
}
public XnatToolPanelController getMasterController()
{
return mainController;
}
private void initGUI()
{
this.controller = new ConfigDialogController(this);
UISettings uiSettings = UIUtil.getUISettings();
try
{
{
topPanel = new JPanel();
getContentPane().add(topPanel, BorderLayout.NORTH);
topPanel.setBorder(BorderFactory.createEtchedBorder(BevelBorder.LOWERED));
topPanel.setLayout(new FlowLayout(FlowLayout.CENTER, 5, 5));
{
JLabel topPanelLbl = new JLabel("Tool Configuration Options.");
topPanelLbl.setFont(new Font("DejaVu Sans Mono", Font.BOLD, 14));
topPanel.add(topPanelLbl);
}
}
{
MainPanel = new JPanel();
getContentPane().add(MainPanel, BorderLayout.CENTER);
FormLayout MainPanelLayout = new FormLayout(new ColumnSpec[] {
ColumnSpec.decode("8dlu"),
ColumnSpec.decode("12dlu"),
ColumnSpec.decode("42dlu"),
ColumnSpec.decode("17dlu:grow"),
ColumnSpec.decode("74dlu"),
ColumnSpec.decode("17dlu:grow"),
FormFactory.UNRELATED_GAP_COLSPEC,
ColumnSpec.decode("55dlu"),
ColumnSpec.decode("8dlu"),
},
new RowSpec[] {
RowSpec.decode("5dlu"),
RowSpec.decode("6dlu"),
RowSpec.decode("max(32dlu;default):grow"),
FormFactory.RELATED_GAP_ROWSPEC,
RowSpec.decode("16dlu"),
RowSpec.decode("5dlu"),
RowSpec.decode("max(15dlu;pref)"),
RowSpec.decode("5dlu"),
FormFactory.DEFAULT_ROWSPEC,
FormFactory.RELATED_GAP_ROWSPEC,
FormFactory.DEFAULT_ROWSPEC,
FormFactory.RELATED_GAP_ROWSPEC,
RowSpec.decode("16dlu"),
RowSpec.decode("5dlu"),
RowSpec.decode("14dlu"),
FormFactory.RELATED_GAP_ROWSPEC,
FormFactory.DEFAULT_ROWSPEC,
FormFactory.RELATED_GAP_ROWSPEC,
RowSpec.decode("13dlu"),
RowSpec.decode("16dlu"),
RowSpec.decode("5dlu:grow"),
});
MainPanel.setLayout(MainPanelLayout);
MainPanel.setBorder(BorderFactory.createEtchedBorder(BevelBorder.LOWERED));
// topSubBorderPnl.setPreferredSize(new java.awt.Dimension(504, 77));
{
helpTextArea = new JTextArea();
MainPanel.add(helpTextArea, "2, 3, 7, 1");
helpTextArea.setText(getHelpText(this.firstRun));
// helpTextArea.setPreferredSize(new java.awt.Dimension(244, 100));
helpTextArea.setEditable(false);
helpTextArea.setLineWrap(true);
helpTextArea.setBorder(BorderFactory.createEtchedBorder(BevelBorder.LOWERED));
uiSettings.applySetting(helpTextArea, UIType.INFO_TEXTFIELD);
}
// MainPanel.setPreferredSize(new java.awt.Dimension(518, 205));
{
dataSetConfigLbl = new JLabel();
MainPanel.add(dataSetConfigLbl, "2, 5, 7, 1");
dataSetConfigLbl.setText("Dataset Configuration Directory and Options:");
dataSetConfigLbl.setBorder(BorderFactory.createEtchedBorder(BevelBorder.LOWERED));
}
{
datasetConfigDirTF = new LocationSelectionField(LocationType.DirType);
MainPanel.add(datasetConfigDirTF, "3, 7, 4, 1");
datasetConfigDirTF.setLocationText("<Dataset Config Directory>");
datasetConfigDirTF.addLocationActionListener(controller);
datasetConfigDirTF.addFocusListener(new FocusListener() {
public void focusGained(FocusEvent e)
{
}
public void focusLost(FocusEvent e)
{
getController().updateDatasetConfigDirChanged();
}
});
datasetConfigDirTF.setLocationActionCommand("" + UIAction.FIELD_CONFIGDIR_CHANGED);
}
{
autoCreateIdMappingsCB = new JCheckBox("Auto create ID Mappings file.");
MainPanel.add(autoCreateIdMappingsCB, "3, 9, 3, 1");
autoCreateIdMappingsCB.setActionCommand("" + UIAction.OPTION_AUTO_CREATE_ID_MAPPINGS);
autoCreateIdMappingsCB.addActionListener(controller);
}
{
autoExtractMetaDataCB = new JCheckBox("Auto extract meta-data from dicom files.");
MainPanel.add(autoExtractMetaDataCB, "3, 11, 3, 1");
autoExtractMetaDataCB.setActionCommand("" + UIAction.OPTION_AUTO_EXTRACT_META_DATA);
autoExtractMetaDataCB.addActionListener(controller);
}
{
imageCacheDirectoryLbl = new JLabel();
MainPanel.add(imageCacheDirectoryLbl, "2, 13, 7, 1");
imageCacheDirectoryLbl.setText("Image Cache Directory and Caching Options");
imageCacheDirectoryLbl.setBorder(BorderFactory.createEtchedBorder(BevelBorder.LOWERED));
}
{
imageCacheDirTF = new LocationSelectionField(LocationType.DirType);
MainPanel.add(imageCacheDirTF, "3, 15, 4, 1");
imageCacheDirTF.setLocationText("<Image Cache Directory>");
imageCacheDirTF.addLocationActionListener(controller);
imageCacheDirTF.addFocusListener(new FocusListener() {
public void focusGained(FocusEvent e)
{
}
public void focusLost(FocusEvent e)
{
getController().updateImageCacheDirChanged();
}
});
imageCacheDirTF.setLocationActionCommand("" + UIAction.FIELD_IMAGECACHEDIR_CHANGED);
}
{
keepProcessedDicomCB = new JCheckBox("Keep Processed Dicom files.");
MainPanel.add(keepProcessedDicomCB, "3, 17, 3, 1");
keepProcessedDicomCB.setActionCommand("" + UIAction.OPTION_CACHE_KEEP_PROCESSED_DICOM);
keepProcessedDicomCB.addActionListener(controller);
}
{
clearCacheBut = new JButton();
MainPanel.add(clearCacheBut, "5, 20");
clearCacheBut.setText("Clear Cache");
clearCacheBut.setActionCommand("" + UIAction.CLEAR_IMAGECACHEDIR);
clearCacheBut.addActionListener(controller);
}
{
createDatasetsConfigDirBut = new JButton();
MainPanel.add(createDatasetsConfigDirBut, "8, 7");
createDatasetsConfigDirBut.setText("Create");
createDatasetsConfigDirBut.setActionCommand("" + UIAction.CONFIG_CREATE_CONFIGSDIR);
createDatasetsConfigDirBut.addActionListener(controller);
}
{
createImageCacheDirBut = new JButton();
MainPanel.add(createImageCacheDirBut, "8, 15");
createImageCacheDirBut.setText("Create");
createImageCacheDirBut.setActionCommand("" + UIAction.CONFIG_CREATE_IMAGECACHEDIR);
createImageCacheDirBut.addActionListener(controller);
}
}
{
buttonPanel = new JPanel();
getContentPane().add(buttonPanel, BorderLayout.SOUTH);
buttonPanel.setEnabled(false);
buttonPanel.setBorder(BorderFactory.createEtchedBorder(BevelBorder.LOWERED));
{
cancelBut = new JButton();
buttonPanel.add(cancelBut);
cancelBut.setText("Cancel");
cancelBut.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e)
{
controller.doCancel();
}
});
}
{
okBut = new JButton();
buttonPanel.add(okBut);
okBut.setText("Ok");
okBut.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e)
{
controller.doOk();
}
});
}
}
this.validate();
Dimension size = this.getPreferredSize();
size.width = 800;
size.height += 64;
this.setSize(size);
}
catch (Exception e)
{
e.printStackTrace();
}
}
private String getHelpText(boolean firstRun)
{
String txt = "";
if (firstRun)
{
txt += txt + firstRunText;
}
txt += this.configDialogText;
return txt;
}
public String getImageCacheDirText()
{
return this.imageCacheDirTF.getLocationText();
}
public String getDataSetConfigDirText()
{
return datasetConfigDirTF.getLocationText();
}
public ConfigDialogController getController()
{
return this.controller;
}
public void setDatasetsConfigDir(String location)
{
datasetConfigDirTF.setLocationText(location);
}
public void setImageCacheDir(String location)
{
this.imageCacheDirTF.setLocationText(location);
}
public boolean getAutoCreateDirs()
{
return true;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
package fixtures.http;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in HttpRetrys.
*/
public interface HttpRetrys {
/**
* Return 408 status code, then 200 after retry.
*
*/
void head408();
/**
* Return 408 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> head408Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 408 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> head408Async();
/**
* Return 408 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> head408WithServiceResponseAsync();
/**
* Return 500 status code, then 200 after retry.
*
*/
void put500();
/**
* Return 500 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> put500Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 500 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> put500Async();
/**
* Return 500 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> put500WithServiceResponseAsync();
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
*/
void put500(Boolean booleanValue);
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> put500Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> put500Async(Boolean booleanValue);
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> put500WithServiceResponseAsync(Boolean booleanValue);
/**
* Return 500 status code, then 200 after retry.
*
*/
void patch500();
/**
* Return 500 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> patch500Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 500 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> patch500Async();
/**
* Return 500 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> patch500WithServiceResponseAsync();
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
*/
void patch500(Boolean booleanValue);
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> patch500Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> patch500Async(Boolean booleanValue);
/**
* Return 500 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> patch500WithServiceResponseAsync(Boolean booleanValue);
/**
* Return 502 status code, then 200 after retry.
*
*/
void get502();
/**
* Return 502 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> get502Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 502 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> get502Async();
/**
* Return 502 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> get502WithServiceResponseAsync();
/**
* Return 503 status code, then 200 after retry.
*
*/
void post503();
/**
* Return 503 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> post503Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 503 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> post503Async();
/**
* Return 503 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> post503WithServiceResponseAsync();
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
*/
void post503(Boolean booleanValue);
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> post503Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> post503Async(Boolean booleanValue);
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> post503WithServiceResponseAsync(Boolean booleanValue);
/**
* Return 503 status code, then 200 after retry.
*
*/
void delete503();
/**
* Return 503 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> delete503Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 503 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> delete503Async();
/**
* Return 503 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> delete503WithServiceResponseAsync();
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
*/
void delete503(Boolean booleanValue);
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> delete503Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> delete503Async(Boolean booleanValue);
/**
* Return 503 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> delete503WithServiceResponseAsync(Boolean booleanValue);
/**
* Return 504 status code, then 200 after retry.
*
*/
void put504();
/**
* Return 504 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> put504Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 504 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> put504Async();
/**
* Return 504 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> put504WithServiceResponseAsync();
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
*/
void put504(Boolean booleanValue);
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> put504Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> put504Async(Boolean booleanValue);
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> put504WithServiceResponseAsync(Boolean booleanValue);
/**
* Return 504 status code, then 200 after retry.
*
*/
void patch504();
/**
* Return 504 status code, then 200 after retry.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> patch504Async(final ServiceCallback<Void> serviceCallback);
/**
* Return 504 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> patch504Async();
/**
* Return 504 status code, then 200 after retry.
*
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> patch504WithServiceResponseAsync();
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
*/
void patch504(Boolean booleanValue);
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @return the {@link ServiceFuture} object
*/
ServiceFuture<Void> patch504Async(Boolean booleanValue, final ServiceCallback<Void> serviceCallback);
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<Void> patch504Async(Boolean booleanValue);
/**
* Return 504 status code, then 200 after retry.
*
* @param booleanValue Simple boolean value true
* @return the {@link ServiceResponse} object if successful.
*/
Observable<ServiceResponse<Void>> patch504WithServiceResponseAsync(Boolean booleanValue);
}
| |
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.AbstractIntegrationTest;
import org.junit.Test;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.percolator.PercolatorTests.convertFromTextArray;
import static org.elasticsearch.test.hamcrest.ElasticSearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.*;
/**
*
*/
public class ConcurrentPercolatorTests extends AbstractIntegrationTest {
@Test
public void testSimpleConcurrentPercolator() throws Exception {
client().admin().indices().prepareCreate("index").setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build()
).execute().actionGet();
ensureGreen();
final BytesReference onlyField1 = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field1", 1)
.endObject().endObject().bytes();
final BytesReference onlyField2 = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field2", "value")
.endObject().endObject().bytes();
final BytesReference bothFields = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field1", 1)
.field("field2", "value")
.endObject().endObject().bytes();
// We need to index a document / define mapping, otherwise field1 doesn't get reconized as number field.
// If we don't do this, then 'test2' percolate query gets parsed as a TermQuery and not a RangeQuery.
// The percolate api doesn't parse the doc if no queries have registered, so it can't lazily create a mapping
client().prepareIndex("index", "type", "1").setSource(XContentFactory.jsonBuilder().startObject()
.field("field1", 1)
.field("field2", "value")
.endObject()).execute().actionGet();
client().prepareIndex("index", "_percolator", "test1")
.setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field2", "value")).endObject())
.execute().actionGet();
client().prepareIndex("index", "_percolator", "test2")
.setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field1", 1)).endObject())
.execute().actionGet();
final CountDownLatch start = new CountDownLatch(1);
final AtomicBoolean stop = new AtomicBoolean(false);
final AtomicInteger counts = new AtomicInteger(0);
final AtomicBoolean assertionFailure = new AtomicBoolean(false);
Thread[] threads = new Thread[5];
for (int i = 0; i < threads.length; i++) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
start.await();
while (!stop.get()) {
int count = counts.incrementAndGet();
if ((count > 10000)) {
stop.set(true);
}
PercolateResponse percolate;
if (count % 3 == 0) {
percolate = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(bothFields)
.execute().actionGet();
assertThat(percolate.getMatches(), arrayWithSize(2));
assertThat(convertFromTextArray(percolate.getMatches(), "index"), arrayContainingInAnyOrder("test1", "test2"));
} else if (count % 3 == 1) {
percolate = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(onlyField2)
.execute().actionGet();
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "index"), arrayContaining("test1"));
} else {
percolate = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(onlyField1)
.execute().actionGet();
assertThat(percolate.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(percolate.getMatches(), "index"), arrayContaining("test2"));
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (AssertionError e) {
assertionFailure.set(true);
Thread.currentThread().interrupt();
}
}
};
threads[i] = new Thread(r);
threads[i].start();
}
start.countDown();
for (Thread thread : threads) {
thread.join();
}
assertThat(assertionFailure.get(), equalTo(false));
}
@Test
public void testConcurrentAddingAndPercolating() throws Exception {
client().admin().indices().prepareCreate("index").setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.build()
).execute().actionGet();
ensureGreen();
final int numIndexThreads = 3;
final int numPercolateThreads = 6;
final int numPercolatorOperationsPerThread = 1000;
final AtomicBoolean assertionFailure = new AtomicBoolean(false);
final CountDownLatch start = new CountDownLatch(1);
final AtomicInteger runningPercolateThreads = new AtomicInteger(numPercolateThreads);
final AtomicInteger type1 = new AtomicInteger();
final AtomicInteger type2 = new AtomicInteger();
final AtomicInteger type3 = new AtomicInteger();
final AtomicInteger idGen = new AtomicInteger();
Thread[] indexThreads = new Thread[numIndexThreads];
for (int i = 0; i < numIndexThreads; i++) {
final Random rand = new Random(getRandom().nextLong());
Runnable r = new Runnable() {
@Override
public void run() {
try {
XContentBuilder onlyField1 = XContentFactory.jsonBuilder().startObject()
.field("query", termQuery("field1", "value")).endObject();
XContentBuilder onlyField2 = XContentFactory.jsonBuilder().startObject()
.field("query", termQuery("field2", "value")).endObject();
XContentBuilder field1And2 = XContentFactory.jsonBuilder().startObject()
.field("query", boolQuery().must(termQuery("field1", "value")).must(termQuery("field2", "value"))).endObject();
start.await();
while (runningPercolateThreads.get() > 0) {
Thread.sleep(100);
int x = rand.nextInt(3);
String id = Integer.toString(idGen.incrementAndGet());
IndexResponse response;
switch (x) {
case 0:
response = client().prepareIndex("index", "_percolator", id)
.setSource(onlyField1)
.execute().actionGet();
type1.incrementAndGet();
break;
case 1:
response = client().prepareIndex("index", "_percolator", id)
.setSource(onlyField2)
.execute().actionGet();
type2.incrementAndGet();
break;
case 2:
response = client().prepareIndex("index", "_percolator", id)
.setSource(field1And2)
.execute().actionGet();
type3.incrementAndGet();
break;
default:
throw new IllegalStateException("Illegal x=" + x);
}
assertThat(response.getId(), equalTo(id));
assertThat(response.getVersion(), equalTo(1l));
}
} catch (Throwable t) {
assertionFailure.set(true);
logger.error("Error in indexing thread...", t);
}
}
};
indexThreads[i] = new Thread(r);
indexThreads[i].start();
}
Thread[] percolateThreads = new Thread[numPercolateThreads];
for (int i = 0; i < numPercolateThreads; i++) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
XContentBuilder onlyField1Doc = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field1", "value")
.endObject().endObject();
XContentBuilder onlyField2Doc = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field2", "value")
.endObject().endObject();
XContentBuilder field1AndField2Doc = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field1", "value")
.field("field2", "value")
.endObject().endObject();
Random random = getRandom();
start.await();
for (int counter = 0; counter < numPercolatorOperationsPerThread; counter++) {
int x = random.nextInt(3);
int atLeastExpected;
PercolateResponse response;
switch (x) {
case 0:
atLeastExpected = type1.get();
response = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(onlyField1Doc).execute().actionGet();
assertNoFailures(response);
assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards()));
assertThat(response.getMatches().length, greaterThanOrEqualTo(atLeastExpected));
break;
case 1:
atLeastExpected = type2.get();
response = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(onlyField2Doc).execute().actionGet();
assertNoFailures(response);
assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards()));
assertThat(response.getMatches().length, greaterThanOrEqualTo(atLeastExpected));
break;
case 2:
atLeastExpected = type3.get();
response = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(field1AndField2Doc).execute().actionGet();
assertNoFailures(response);
assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards()));
assertThat(response.getMatches().length, greaterThanOrEqualTo(atLeastExpected));
break;
}
}
} catch (Throwable t) {
assertionFailure.set(true);
logger.error("Error in percolate thread...", t);
} finally {
runningPercolateThreads.decrementAndGet();
}
}
};
percolateThreads[i] = new Thread(r);
percolateThreads[i].start();
}
start.countDown();
for (Thread thread : indexThreads) {
thread.join();
}
for (Thread thread : percolateThreads) {
thread.join();
}
assertThat(assertionFailure.get(), equalTo(false));
}
@Test
public void testConcurrentAddingAndRemovingWhilePercolating() throws Exception {
client().admin().indices().prepareCreate("index").setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.build()
).execute().actionGet();
ensureGreen();
final int numIndexThreads = 3;
final int numberPercolateOperation = 100;
final AtomicReference<Throwable> exceptionHolder = new AtomicReference<Throwable>(null);
final AtomicInteger idGen = new AtomicInteger(0);
final Set<String> liveIds = ConcurrentCollections.newConcurrentSet();
final AtomicBoolean run = new AtomicBoolean(true);
Thread[] indexThreads = new Thread[numIndexThreads];
final Semaphore semaphore = new Semaphore(numIndexThreads, true);
for (int i = 0; i < indexThreads.length; i++) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
XContentBuilder doc = XContentFactory.jsonBuilder().startObject()
.field("query", termQuery("field1", "value")).endObject();
outer: while (run.get()) {
semaphore.acquire();
try {
if (!liveIds.isEmpty() && getRandom().nextInt(100) < 19) {
String id;
do {
if (liveIds.isEmpty()) {
continue outer;
}
id = Integer.toString(randomInt(idGen.get()));
} while (!liveIds.remove(id));
DeleteResponse response = client().prepareDelete("index", "_percolator", id)
.execute().actionGet();
assertThat(response.getId(), equalTo(id));
assertThat("doc[" + id + "] should have been deleted, but isn't", response.isNotFound(), equalTo(false));
} else {
String id = Integer.toString(idGen.getAndIncrement());
IndexResponse response = client().prepareIndex("index", "_percolator", id)
.setSource(doc)
.execute().actionGet();
liveIds.add(id);
assertThat(response.isCreated(), equalTo(true)); // We only add new docs
assertThat(response.getId(), equalTo(id));
}
} finally {
semaphore.release();
}
}
} catch (InterruptedException iex) {
logger.error("indexing thread was interrupted...", iex);
run.set(false);
} catch (Throwable t) {
run.set(false);
exceptionHolder.set(t);
logger.error("Error in indexing thread...", t);
}
}
};
indexThreads[i] = new Thread(r);
indexThreads[i].start();
}
XContentBuilder percolateDoc = XContentFactory.jsonBuilder().startObject().startObject("doc")
.field("field1", "value")
.endObject().endObject();
for (int counter = 0; counter < numberPercolateOperation; counter++) {
Thread.sleep(5);
semaphore.acquire(numIndexThreads);
try {
if (!run.get()) {
break;
}
int atLeastExpected = liveIds.size();
PercolateResponse response = client().preparePercolate().setIndices("index").setDocumentType("type")
.setSource(percolateDoc).execute().actionGet();
assertThat(response.getShardFailures(), emptyArray());
assertThat(response.getSuccessfulShards(), equalTo(response.getTotalShards()));
assertThat(response.getMatches().length, equalTo(atLeastExpected));
} finally {
semaphore.release(numIndexThreads);
}
}
run.set(false);
for (Thread thread : indexThreads) {
thread.join();
}
assertThat("exceptionHolder should have been empty, but holds: " + exceptionHolder.toString(), exceptionHolder.get(), nullValue());
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app.data;
import android.annotation.TargetApi;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
public class WeatherProvider extends ContentProvider {
// The URI Matcher used by this content provider.
private static final UriMatcher sUriMatcher = buildUriMatcher();
private WeatherDbHelper mOpenHelper;
static final int WEATHER = 100;
static final int WEATHER_WITH_LOCATION = 101;
static final int WEATHER_WITH_LOCATION_AND_DATE = 102;
static final int LOCATION = 300;
private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder;
/*private static final SQLiteQueryBuilder sWeatherQueryBuilder;
private static final SQLiteQueryBuilder sLocationQueryBuilder;
part of my solution to Quiz: Implement Weather And Location Queries */
static{
sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder();
//This is an inner join which looks like
//weather INNER JOIN location ON weather.location_id = location._id
sWeatherByLocationSettingQueryBuilder.setTables(
WeatherContract.WeatherEntry.TABLE_NAME + " INNER JOIN " +
WeatherContract.LocationEntry.TABLE_NAME +
" ON " + WeatherContract.WeatherEntry.TABLE_NAME +
"." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY +
" = " + WeatherContract.LocationEntry.TABLE_NAME +
"." + WeatherContract.LocationEntry._ID);
}
/*static {
sWeatherQueryBuilder = new SQLiteQueryBuilder();
sWeatherQueryBuilder.setTables(
WeatherContract.WeatherEntry.TABLE_NAME
);
}
static {
sLocationQueryBuilder = new SQLiteQueryBuilder();
sLocationQueryBuilder.setTables(
WeatherContract.LocationEntry.TABLE_NAME
);
}part of my solution to Quiz: Implement Weather And Location Queries */
//location.location_setting = ?
private static final String sLocationSettingSelection =
WeatherContract.LocationEntry.TABLE_NAME+
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? ";
//location.location_setting = ? AND date >= ?
private static final String sLocationSettingWithStartDateSelection =
WeatherContract.LocationEntry.TABLE_NAME+
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " +
WeatherContract.WeatherEntry.COLUMN_DATE + " >= ? ";
//location.location_setting = ? AND date = ?
private static final String sLocationSettingAndDaySelection =
WeatherContract.LocationEntry.TABLE_NAME +
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " +
WeatherContract.WeatherEntry.COLUMN_DATE + " = ? ";
private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) {
String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri);
long startDate = WeatherContract.WeatherEntry.getStartDateFromUri(uri);
String[] selectionArgs;
String selection;
if (startDate == 0) {
selection = sLocationSettingSelection;
selectionArgs = new String[]{locationSetting};
} else {
selectionArgs = new String[]{locationSetting, Long.toString(startDate)};
selection = sLocationSettingWithStartDateSelection;
}
return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
}
private Cursor getWeatherByLocationSettingAndDate(
Uri uri, String[] projection, String sortOrder) {
String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri);
long date = WeatherContract.WeatherEntry.getDateFromUri(uri);
return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
sLocationSettingAndDaySelection,
new String[]{locationSetting, Long.toString(date)},
null,
null,
sortOrder
);
}
/*
Students: Here is where you need to create the UriMatcher. This UriMatcher will
match each URI to the WEATHER, WEATHER_WITH_LOCATION, WEATHER_WITH_LOCATION_AND_DATE,
and LOCATION integer constants defined above. You can test this by uncommenting the
testUriMatcher test within TestUriMatcher.
*/
static UriMatcher buildUriMatcher() {
// 1) The code passed into the constructor represents the code to return for the root
// URI. It's common to use NO_MATCH as the code for this case. Add the constructor below.
final UriMatcher uriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
// 2) Use the addURI function to match each of the types. Use the constants from
// WeatherContract to help define the types to the UriMatcher.
uriMatcher.addURI(WeatherContract.CONTENT_AUTHORITY, WeatherContract.PATH_WEATHER, WEATHER);
uriMatcher.addURI(WeatherContract.CONTENT_AUTHORITY,
WeatherContract.PATH_WEATHER+"/*", WEATHER_WITH_LOCATION);
uriMatcher.addURI(WeatherContract.CONTENT_AUTHORITY,
WeatherContract.PATH_WEATHER+"/*/#", WEATHER_WITH_LOCATION_AND_DATE);
uriMatcher.addURI(WeatherContract.CONTENT_AUTHORITY, WeatherContract.PATH_LOCATION, LOCATION);
// 3) Return the new matcher!
return uriMatcher;
}
/*
Students: We've coded this for you. We just create a new WeatherDbHelper for later use
here.
*/
@Override
public boolean onCreate() {
mOpenHelper = new WeatherDbHelper(getContext());
return true;
}
/*
Students: Here's where you'll code the getType function that uses the UriMatcher. You can
test this by uncommenting testGetType in TestProvider.
*/
@Override
public String getType(Uri uri) {
// Use the Uri Matcher to determine what kind of URI this is.
final int match = sUriMatcher.match(uri);
switch (match) {
case WEATHER:
return WeatherContract.WeatherEntry.CONTENT_TYPE;
case LOCATION:
return WeatherContract.LocationEntry.CONTENT_TYPE;
case WEATHER_WITH_LOCATION:
return WeatherContract.WeatherEntry.CONTENT_TYPE;
case WEATHER_WITH_LOCATION_AND_DATE:
return WeatherContract.WeatherEntry.CONTENT_ITEM_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
// Here's the switch statement that, given a URI, will determine what kind of request it is,
// and query the database accordingly.
Cursor retCursor;
switch (sUriMatcher.match(uri)) {
// "weather/*/*"
case WEATHER_WITH_LOCATION_AND_DATE:
{
retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder);
break;
}
// "weather/*"
case WEATHER_WITH_LOCATION: {
retCursor = getWeatherByLocationSetting(uri, projection, sortOrder);
break;
}
// "weather"
case WEATHER: {
/*retCursor = sWeatherQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
null,
selectionArgs,
null,
null,
sortOrder);
passed the tests but it's different from solution
part of my solution to Quiz: Implement Weather And Location Queries*/
retCursor = mOpenHelper.getReadableDatabase().query(
WeatherContract.WeatherEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
// "location"
case LOCATION: {
/*retCursor = sLocationQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
null,
selectionArgs,
null,
null,
sortOrder);passed the tests but it's different from solution
part of my solution to Quiz: Implement Weather And Location Queries*/
retCursor = mOpenHelper.getReadableDatabase().query(
WeatherContract.LocationEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
retCursor.setNotificationUri(getContext().getContentResolver(), uri);
return retCursor;
}
/*
Student: Add the ability to insert Locations to the implementation of this function.
*/
@Override
public Uri insert(Uri uri, ContentValues values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
switch (match) {
case WEATHER: {
normalizeDate(values);
long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = WeatherContract.WeatherEntry.buildWeatherUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
case LOCATION: {
long _id = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, values);
if ( _id > 0 ){
returnUri = WeatherContract.LocationEntry.buildLocationUri(_id);
}
else
throw new android.database.SQLException("Failed o insert row into " + uri);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return returnUri;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
// Student: Start by getting a writable database
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int del;
if (selection == null) { selection = "1"; }
// Student: Use the uriMatcher to match the WEATHER and LOCATION URI's we are going to
// handle. If it doesn't match these, throw an UnsupportedOperationException.
switch (match){
case WEATHER: {
del = db.delete(WeatherContract.WeatherEntry.TABLE_NAME, selection, selectionArgs);
break;
}
case LOCATION:
del = db.delete(WeatherContract.LocationEntry.TABLE_NAME, selection, selectionArgs);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
// Student: A null value deletes all rows. In my implementation of this, I only notified
// the uri listeners (using the content resolver) if the rowsDeleted != 0 or the selection
// is null.
// Oh, and you should notify the listeners here.
// Student: return the actual rows deleted
if (del != 0) {
getContext().getContentResolver().notifyChange(uri, null);
}
return del;
}
private void normalizeDate(ContentValues values) {
// normalize the date value
if (values.containsKey(WeatherContract.WeatherEntry.COLUMN_DATE)) {
long dateValue = values.getAsLong(WeatherContract.WeatherEntry.COLUMN_DATE);
values.put(WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue));
}
}
@Override
public int update(
Uri uri, ContentValues values, String selection, String[] selectionArgs) {
// Student: This is a lot like the delete function. We return the number of rows impacted
// by the update.
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int ups;
switch (match){
case WEATHER: {
ups = db.update(WeatherContract.WeatherEntry.TABLE_NAME, values, selection, selectionArgs);
break;
}
case LOCATION:
ups = db.update(WeatherContract.LocationEntry.TABLE_NAME, values, selection, selectionArgs);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (ups != 0) {
getContext().getContentResolver().notifyChange(uri, null);
}
return ups;
}
@Override
public int bulkInsert(Uri uri, ContentValues[] values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
switch (match) {
case WEATHER:
db.beginTransaction();
int returnCount = 0;
try {
for (ContentValues value : values) {
normalizeDate(value);
long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, value);
if (_id != -1) {
returnCount++;
}
}
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
getContext().getContentResolver().notifyChange(uri, null);
return returnCount;
default:
return super.bulkInsert(uri, values);
}
}
// You do not need to call this method. This is a method specifically to assist the testing
// framework in running smoothly. You can read more at:
// http://developer.android.com/reference/android/content/ContentProvider.html#shutdown()
@Override
@TargetApi(11)
public void shutdown() {
mOpenHelper.close();
super.shutdown();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.api;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.apache.sysml.api.jmlc.JMLCUtils;
import org.apache.sysml.api.mlcontext.MLContextUtil;
import org.apache.sysml.api.mlcontext.ScriptType;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.conf.DMLConfig;
import org.apache.sysml.hops.codegen.SpoofCompiler;
import org.apache.sysml.hops.rewrite.ProgramRewriter;
import org.apache.sysml.hops.rewrite.RewriteRemovePersistentReadWrite;
import org.apache.sysml.parser.DMLProgram;
import org.apache.sysml.parser.DMLTranslator;
import org.apache.sysml.parser.LanguageException;
import org.apache.sysml.parser.ParseException;
import org.apache.sysml.parser.ParserFactory;
import org.apache.sysml.parser.ParserWrapper;
import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
import org.apache.sysml.runtime.controlprogram.Program;
import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContextFactory;
import org.apache.sysml.runtime.instructions.cp.Data;
import org.apache.sysml.runtime.instructions.gpu.context.GPUContext;
import org.apache.sysml.runtime.instructions.gpu.context.GPUObject;
import org.apache.sysml.runtime.util.UtilFunctions;
import org.apache.sysml.utils.Explain;
import org.apache.sysml.utils.Statistics;
import org.apache.sysml.utils.Explain.ExplainCounts;
import org.apache.sysml.utils.Explain.ExplainType;
import org.apache.sysml.yarn.DMLAppMasterUtils;
import org.apache.sysml.yarn.DMLYarnClientProxy;
import org.apache.sysml.runtime.DMLRuntimeException;
public class ScriptExecutorUtils {
public static final boolean IS_JCUDA_AVAILABLE;
static {
// Early detection of JCuda libraries avoids synchronization overhead for common JMLC scenario:
// i.e. CPU-only multi-threaded execution
boolean isJCudaAvailable = false;
try {
Class.forName("jcuda.Pointer");
isJCudaAvailable = true;
}
catch (ClassNotFoundException e) { }
IS_JCUDA_AVAILABLE = isJCudaAvailable;
}
public static enum SystemMLAPI {
DMLScript,
MLContext,
JMLC
}
public static Program compileRuntimeProgram(String script, Map<String,String> nsscripts, Map<String, String> args,
String[] inputs, String[] outputs, ScriptType scriptType, DMLConfig dmlconf, SystemMLAPI api) {
return compileRuntimeProgram(script, nsscripts, args, null, null, inputs, outputs,
scriptType, dmlconf, api, true, false, false);
}
public static Program compileRuntimeProgram(String script, Map<String, String> args, String[] allArgs,
ScriptType scriptType, DMLConfig dmlconf, SystemMLAPI api) {
return compileRuntimeProgram(script, Collections.emptyMap(), args, allArgs, null, null, null,
scriptType, dmlconf, api, true, false, false);
}
/**
* Compile a runtime program
*
* @param script string representing of the DML or PyDML script
* @param nsscripts map (name, script) of the DML or PyDML namespace scripts
* @param args map of input parameters ($) and their values
* @param allArgs commandline arguments
* @param symbolTable symbol table associated with MLContext
* @param inputs string array of input variables to register
* @param outputs string array of output variables to register
* @param scriptType is this script DML or PyDML
* @param dmlconf configuration provided by the user
* @param api API used to execute the runtime program
* @param performHOPRewrites should perform hop rewrites
* @param maintainSymbolTable whether or not all values should be maintained in the symbol table after execution.
* @return compiled runtime program
*/
public static Program compileRuntimeProgram(String script, Map<String,String> nsscripts, Map<String, String> args, String[] allArgs,
// Input/Outputs registered in MLContext and JMLC. These are set to null by DMLScript
LocalVariableMap symbolTable, String[] inputs, String[] outputs,
ScriptType scriptType, DMLConfig dmlconf, SystemMLAPI api,
// MLContext-specific flags
boolean performHOPRewrites, boolean maintainSymbolTable,
boolean init) {
DMLScript.SCRIPT_TYPE = scriptType;
Program rtprog;
if (ConfigurationManager.isGPU() && !IS_JCUDA_AVAILABLE)
throw new RuntimeException("Incorrect usage: Cannot use the GPU backend without JCuda libraries. Hint: Include systemml-*-extra.jar (compiled using mvn package -P distribution) into the classpath.");
else if (!ConfigurationManager.isGPU() && ConfigurationManager.isForcedGPU())
throw new RuntimeException("Incorrect usage: Cannot force a GPU-execution without enabling GPU");
if(api == SystemMLAPI.JMLC) {
//check for valid names of passed arguments
String[] invalidArgs = args.keySet().stream()
.filter(k -> k==null || !k.startsWith("$")).toArray(String[]::new);
if( invalidArgs.length > 0 )
throw new LanguageException("Invalid argument names: "+Arrays.toString(invalidArgs));
//check for valid names of input and output variables
String[] invalidVars = UtilFunctions.asSet(inputs, outputs).stream()
.filter(k -> k==null || k.startsWith("$")).toArray(String[]::new);
if( invalidVars.length > 0 )
throw new LanguageException("Invalid variable names: "+Arrays.toString(invalidVars));
}
String dmlParserFilePath = (api == SystemMLAPI.JMLC) ? null : DMLScript.DML_FILE_PATH_ANTLR_PARSER;
try {
//Step 1: set local/remote memory if requested (for compile in AM context)
if(api == SystemMLAPI.DMLScript && dmlconf.getBooleanValue(DMLConfig.YARN_APPMASTER) ){
DMLAppMasterUtils.setupConfigRemoteMaxMemory(dmlconf);
}
// Start timer (disabled for JMLC)
if(api != SystemMLAPI.JMLC)
Statistics.startCompileTimer();
//Step 2: parse dml script
ParserWrapper parser = ParserFactory.createParser(scriptType, nsscripts);
DMLProgram prog = parser.parse(dmlParserFilePath, script, args);
//Step 3: construct HOP DAGs (incl LVA, validate, and setup)
DMLTranslator dmlt = new DMLTranslator(prog);
dmlt.liveVariableAnalysis(prog);
dmlt.validateParseTree(prog);
dmlt.constructHops(prog);
//init working directories (before usage by following compilation steps)
if(api != SystemMLAPI.JMLC)
if (api != SystemMLAPI.MLContext || init)
DMLScript.initHadoopExecution( dmlconf );
//Step 4: rewrite HOP DAGs (incl IPA and memory estimates)
if(performHOPRewrites)
dmlt.rewriteHopsDAG(prog);
//Step 5: Remove Persistent Read/Writes
if(api == SystemMLAPI.JMLC) {
//rewrite persistent reads/writes
RewriteRemovePersistentReadWrite rewrite = new RewriteRemovePersistentReadWrite(inputs, outputs);
ProgramRewriter rewriter2 = new ProgramRewriter(rewrite);
rewriter2.rewriteProgramHopDAGs(prog);
}
else if(api == SystemMLAPI.MLContext) {
//rewrite persistent reads/writes
RewriteRemovePersistentReadWrite rewrite = new RewriteRemovePersistentReadWrite(inputs, outputs, symbolTable);
ProgramRewriter rewriter2 = new ProgramRewriter(rewrite);
rewriter2.rewriteProgramHopDAGs(prog);
}
//Step 6: construct lops (incl exec type and op selection)
dmlt.constructLops(prog);
if(DMLScript.LOG.isDebugEnabled()) {
DMLScript.LOG.debug("\n********************** LOPS DAG *******************");
dmlt.printLops(prog);
dmlt.resetLopsDAGVisitStatus(prog);
}
//Step 7: generate runtime program, incl codegen
rtprog = dmlt.getRuntimeProgram(prog, dmlconf);
// Step 8: Cleanup/post-processing
if(api == SystemMLAPI.JMLC) {
JMLCUtils.cleanupRuntimeProgram(rtprog, outputs);
}
else if(api == SystemMLAPI.DMLScript) {
//launch SystemML appmaster (if requested and not already in launched AM)
if( dmlconf.getBooleanValue(DMLConfig.YARN_APPMASTER) ){
if( !DMLScript.isActiveAM() && DMLYarnClientProxy.launchDMLYarnAppmaster(script, dmlconf, allArgs, rtprog) )
return null; //if AM launch unsuccessful, fall back to normal execute
if( DMLScript.isActiveAM() ) //in AM context (not failed AM launch)
DMLAppMasterUtils.setupProgramMappingRemoteMaxMemory(rtprog);
}
}
else if(api == SystemMLAPI.MLContext) {
if (maintainSymbolTable) {
MLContextUtil.deleteRemoveVariableInstructions(rtprog);
} else {
JMLCUtils.cleanupRuntimeProgram(rtprog, outputs);
}
}
//Step 9: prepare statistics [and optional explain output]
//count number compiled MR jobs / SP instructions
if(api != SystemMLAPI.JMLC) {
ExplainCounts counts = Explain.countDistributedOperations(rtprog);
Statistics.resetNoOfCompiledJobs( counts.numJobs );
//explain plan of program (hops or runtime)
if( ConfigurationManager.getDMLOptions().explainType != ExplainType.NONE )
System.out.println(
Explain.display(prog, rtprog, ConfigurationManager.getDMLOptions().explainType, counts));
Statistics.stopCompileTimer();
}
}
catch(ParseException pe) {
// don't chain ParseException (for cleaner error output)
throw pe;
}
catch(Exception ex) {
throw new DMLException(ex);
}
return rtprog;
}
/**
* Execute the runtime program. This involves execution of the program
* blocks that make up the runtime program and may involve dynamic
* recompilation.
*
* @param rtprog
* runtime program
* @param statisticsMaxHeavyHitters
* maximum number of statistics to print
* @param symbolTable
* symbol table (that were registered as input as part of MLContext)
* @param outputVariables
* output variables (that were registered as output as part of MLContext)
* @param api
* API used to execute the runtime program
* @param gCtxs
* list of GPU contexts
* @return execution context
*/
public static ExecutionContext executeRuntimeProgram(Program rtprog, int statisticsMaxHeavyHitters,
LocalVariableMap symbolTable, HashSet<String> outputVariables,
SystemMLAPI api, List<GPUContext> gCtxs) {
boolean exceptionThrown = false;
// Start timer
Statistics.startRunTimer();
// Create execution context and attach registered outputs
ExecutionContext ec = ExecutionContextFactory.createContext(symbolTable, rtprog);
if(outputVariables != null)
ec.getVariables().setRegisteredOutputs(outputVariables);
// Assign GPUContext to the current ExecutionContext
if(gCtxs != null) {
gCtxs.get(0).initializeThread();
ec.setGPUContexts(gCtxs);
}
Exception finalizeException = null;
try {
// run execute (w/ exception handling to ensure proper shutdown)
rtprog.execute(ec);
} catch (Throwable e) {
exceptionThrown = true;
throw e;
} finally { // ensure cleanup/shutdown
if (ConfigurationManager.isGPU() && !ec.getGPUContexts().isEmpty()) {
try {
HashSet<MatrixObject> outputMatrixObjects = new HashSet<>();
// -----------------------------------------------------------------
// The below code pulls the output variables on the GPU to the host. This is required especially when:
// The output variable was generated as part of a MLContext session with GPU enabled
// and was passed to another MLContext with GPU disabled
// The above scenario occurs in our gpu test suite (eg: BatchNormTest).
if(outputVariables != null) {
for(String outVar : outputVariables) {
Data data = ec.getVariable(outVar);
if(data instanceof MatrixObject) {
for(GPUContext gCtx : ec.getGPUContexts()) {
GPUObject gpuObj = ((MatrixObject)data).getGPUObject(gCtx);
if(gpuObj != null && gpuObj.isDirty()) {
gpuObj.acquireHostRead(null);
}
}
outputMatrixObjects.add(((MatrixObject)data));
}
}
}
// -----------------------------------------------------------------
for(GPUContext gCtx : ec.getGPUContexts()) {
gCtx.clearTemporaryMemory(outputMatrixObjects);
}
} catch (Exception e1) {
exceptionThrown = true;
finalizeException = e1; // do not throw exception while cleanup
}
}
if( ConfigurationManager.isCodegenEnabled() )
SpoofCompiler.cleanupCodeGenerator();
//cleanup unnecessary outputs
if (outputVariables != null)
symbolTable.removeAllNotIn(outputVariables);
// Display statistics (disabled for JMLC)
Statistics.stopRunTimer();
if(api != SystemMLAPI.JMLC) {
(exceptionThrown ? System.err : System.out)
.println(Statistics.display(statisticsMaxHeavyHitters > 0 ?
statisticsMaxHeavyHitters :
ConfigurationManager.getDMLOptions().getStatisticsMaxHeavyHitters()));
}
}
if(finalizeException != null) {
throw new DMLRuntimeException("Error occured while GPU memory cleanup.", finalizeException);
}
return ec;
}
}
| |
package jp.co.flect.javascript.jqgrid;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import java.text.SimpleDateFormat;
import jp.co.flect.util.DateFormatHolder;
import jp.co.flect.log.Logger;
import jp.co.flect.log.LoggerFactory;
public class RdbQuery {
private static final String DEFAULT_DATETIME_FORMAT = "yyyy/MM/dd HH:mm:ss.SSS";
private static final String DEFAULT_DATE_FORMAT = "yyyy/MM/dd";
private static final String DEFAULT_TIME_FORMAT = "HH:mm:ss.SSS";
private static final Logger log = LoggerFactory.getLogger(RdbQuery.class);
private PreparedStatement queryStmt;
private PreparedStatement countStmt;
private boolean useOffset;
private DateFormatHolder formats = new DateFormatHolder();
public RdbQuery(Connection con, String query) throws SQLException {
this(con, query, null, false);
}
public RdbQuery(Connection con, String query, String countQuery) throws SQLException {
this(con, query, countQuery, false);
}
public RdbQuery(Connection con, String query, String countQuery, boolean useOffset) throws SQLException {
log.debug("RdbQuery: query: {0}", query);
log.debug("RdbQuery: count: {0}", countQuery);
log.debug("RdbQuery: useOffset: " + useOffset);
this.useOffset = useOffset;
if (useOffset) {
query += " LIMIT ? OFFSET ?";
}
this.queryStmt = con.prepareStatement(query);
if (countQuery != null) {
try {
this.countStmt = con.prepareStatement(countQuery);
} catch (SQLException e) {
try {
this.queryStmt.close();
} catch (SQLException e2) {
}
throw e;
}
}
}
public DateFormatHolder getDateFormatHolder() { return this.formats;}
public void setDateFormatHolder(DateFormatHolder h) { this.formats = h;}
public GridData getGridData(int page, int rowCount) throws SQLException {
return getGridData(page, rowCount, null);
}
public GridData getGridData(int page, int rowCount, List<?> params) throws SQLException {
int offset = (page - 1) * rowCount;
int cnt = 0;
if (this.countStmt != null) {
if (params != null) {
countStmt.clearParameters();
int idx = 1;
for (Object o : params) {
setParameter(countStmt, idx++, o);
}
}
ResultSet rs = countStmt.executeQuery();
try {
if (rs.next()) {
cnt = rs.getInt(1);
}
} finally {
rs.close();
}
}
if (params != null || this.useOffset) {
queryStmt.clearParameters();
int idx = 1;
if (params != null) {
for (Object o : params) {
setParameter(queryStmt, idx++, o);
}
}
if (this.useOffset) {
queryStmt.setInt(idx++, rowCount);
queryStmt.setInt(idx++, offset);
}
}
ResultSet rs = queryStmt.executeQuery();
try {
ResultSetMetaData meta = rs.getMetaData();
int len = meta.getColumnCount();
GridData result = new GridData(rowCount, page);
while (rs.next()) {
if (offset > 0 && !this.useOffset) {
offset--;
if (this.countStmt == null) {
cnt++;
}
continue;
}
List<Object> list = new ArrayList<Object>();
for (int i=0; i<len; i++) {
list.add(getObject(rs, i+1, meta.getColumnType(i+1)));
}
result.addRow(list);
if (this.countStmt == null) {
cnt++;
}
if (result.getRows().size() >= rowCount) {
break;
}
}
if (this.countStmt == null) {
while (rs.next()) {
cnt++;
}
}
result.setRecordCount(cnt);
return result;
} finally {
rs.close();
}
}
public void close() throws SQLException {
SQLException ex = null;
if (this.countStmt != null) {
try {
countStmt.close();
} catch (SQLException e) {
ex = e;
}
}
this.queryStmt.close();
if (ex != null) {
throw ex;
}
}
public static void setParameter(PreparedStatement stmt, int idx, Object o) throws SQLException {
if (o == null) {
throw new IllegalArgumentException("Not supported null");
} else if (o instanceof String) {
stmt.setString(idx, (String)o);
} else if (o instanceof Integer) {
stmt.setInt(idx, (Integer)o);
} else if (o instanceof Boolean) {
stmt.setBoolean(idx, (Boolean)o);
} else if (o instanceof Timestamp) {
stmt.setTimestamp(idx, (Timestamp)o);
} else if (o instanceof Date) {
stmt.setDate(idx, (Date)o);
} else if (o instanceof byte[]) {
stmt.setBytes(idx, (byte[])o);
} else if (o instanceof Double) {
stmt.setDouble(idx, (Double)o);
} else if (o instanceof Long) {
stmt.setLong(idx, (Long)o);
} else if (o instanceof BigDecimal) {
stmt.setBigDecimal(idx, (BigDecimal)o);
} else if (o instanceof Float) {
stmt.setFloat(idx, (Float)o);
} else if (o instanceof Byte) {
stmt.setByte(idx, (Byte)o);
} else if (o instanceof Short) {
stmt.setShort(idx, (Short)o);
} else if (o instanceof Time) {
stmt.setTime(idx, (Time)o);
} else {
throw new IllegalArgumentException("Not supported: " + o.getClass());
}
}
public Object getObject(ResultSet rs, int idx, int type) throws SQLException {
Object ret = null;
switch (type) {
case Types.ARRAY:
ret = rs.getArray(idx);
break;
case Types.BIGINT:
case Types.DECIMAL:
case Types.NUMERIC:
ret = rs.getBigDecimal(idx);
break;
case Types.BINARY:
case Types.LONGVARBINARY:
case Types.VARBINARY:
ret = rs.getBytes(idx);
break;
case Types.BIT:
case Types.BOOLEAN:
ret = rs.getBoolean(idx);
break;
case Types.BLOB:
ret = rs.getBlob(idx);
break;
case Types.CHAR:
case Types.LONGNVARCHAR:
case Types.LONGVARCHAR:
case Types.NCHAR:
case Types.NVARCHAR:
case Types.VARCHAR:
case Types.OTHER:
ret = rs.getString(idx);
break;
case Types.CLOB:
ret = rs.getClob(idx);
break;
case Types.NCLOB:
ret = rs.getNClob(idx);
break;
case Types.DOUBLE:
case Types.REAL:
ret = rs.getDouble(idx);
break;
case Types.FLOAT:
ret = rs.getFloat(idx);
break;
case Types.INTEGER:
case Types.SMALLINT:
case Types.TINYINT:
ret = rs.getInt(idx);
break;
case Types.JAVA_OBJECT:
ret = rs.getObject(idx);
break;
case Types.NULL:
ret = null;
break;
case Types.REF:
ret = rs.getRef(idx);
break;
case Types.ROWID:
ret = rs.getRowId(idx);
break;
case Types.SQLXML:
ret = rs.getSQLXML(idx);
break;
case Types.DATE:
Date d = rs.getDate(idx);
if (d != null) {
if (this.formats.getTimeDifference() != 0) {
d = new Date(d.getTime() + this.formats.getTimeDifference());
}
ret = this.formats.getDateFormat().format(d);
}
break;
case Types.TIME:
Time t = rs.getTime(idx);
if (t != null) {
if (this.formats.getTimeDifference() != 0) {
t = new Time(t.getTime() + this.formats.getTimeDifference());
}
ret = this.formats.getTimeFormat().format(t);
}
break;
case Types.TIMESTAMP:
Timestamp ts = rs.getTimestamp(idx);
if (ts != null) {
if (this.formats.getTimeDifference() != 0) {
ts = new Timestamp(ts.getTime() + this.formats.getTimeDifference());
}
ret = this.formats.getDatetimeFormat().format(ts);
}
break;
case Types.DATALINK:
case Types.DISTINCT:
case Types.STRUCT:
default:
log.warn("Not supported: column=" + idx + ", type=" + type);
ret = rs.getString(idx);
break;
}
if (rs.wasNull()) {
ret = null;
}
return ret;
}
}
| |
package org.onetwo.common.spring;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.onetwo.common.date.DateUtils;
import org.onetwo.common.spring.entity.RoleEntity;
import org.onetwo.common.spring.entity.UserEntity;
import org.onetwo.common.utils.LangUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.PropertyAccessorFactory;
public class BeanWrapperTest {
private static class UserData {
private Map<String, String> attrs = LangUtils.newHashMap();
public Map<String, String> getAttrs() {
return attrs;
}
}
private BeanWrapper bw;
@Test
public void testMap(){
UserData u = new UserData();
bw = PropertyAccessorFactory.forBeanPropertyAccess(u);
// bw = SpringUtils.newBeanWrapper(u);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("attrs[id]", "11");
Assert.assertEquals(u.getAttrs().get("id"), "11");
try {
bw.setPropertyValue("attrs.id2", "11");
Assert.fail("must be fail!");
} catch (Exception e) {
Assert.assertNotNull(e);
}
Map<String, String> attrs = LangUtils.newHashMap();
/*bw = SpringUtils.newBeanWrapper(attrs);
bw.setAutoGrowNestedPaths(true);
try {
bw.setPropertyValue("id", "11");
Assert.fail();
} catch (Exception e) {
}*/
bw = SpringUtils.newBeanMapWrapper(attrs);
bw.setPropertyValue("id", "11");
Assert.assertEquals(attrs.get("id"), "11");
}
@Test
public void testBw(){
UserEntity user = new UserEntity();
// bw = PropertyAccessorFactory.forBeanPropertyAccess(user);
bw = new JFishBeanWrapper(user);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("birthDay", DateUtils.parseByPatterns("2018/07/01", "yyyy/MM/dd"));
bw.setPropertyValue("id", "11");
Assert.assertTrue(user.getId()==11L);
bw.setPropertyValue("bust[0]", 99);
Assert.assertEquals(99, user.getBust()[0]);;
bw.setPropertyValue("bust[1]", 90);
Assert.assertEquals(90, user.getBust()[1]);;
bw.setPropertyValue("roles[0].id", 12L);
bw.setPropertyValue("roles[0].name", "way");
RoleEntity role = user.getRoles().get(0);
Assert.assertNotNull(role);
Assert.assertTrue(role.getId()==12L);
Assert.assertEquals("way", role.getName());
bw.setPropertyValue("roles[0][name]", "test");
Assert.assertEquals("test", role.getName());
}
@Test
public void testBw2(){
UserEntity user = new UserEntity();
bw = new JFishBeanWrapper(user);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("id", "11");
Assert.assertTrue(user.getId()==11L);
bw.setPropertyValue("roles[0][name]", "test");
RoleEntity role = user.getRoles().get(0);
Assert.assertEquals("test", role.getName());
}
@Test
public void testBwMap(){
Map<String, Object> map = LangUtils.newHashMap();
UserEntity user = new UserEntity();
user.setUserName("userName1");
map.put("user", user);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanMapWrapper(map);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("id", 11L);
Assert.assertEquals(map.get("id"), 11L);
bw.setPropertyValue("name", "test");
Assert.assertEquals("test", map.get("name"));
}
@Test
public void testBwMap2(){
Map<String, Object> map = LangUtils.newHashMap();
UserEntity user = new UserEntity();
user.setUserName("userName1");
map.put("user", user);
map.put("userMap", map);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanMapWrapper(map);
bw.setAutoGrowNestedPaths(true);
Object userName = bw.getPropertyValue("user.userName");
System.out.println("userName:" + userName);
Assert.assertEquals("userName1", userName);
userName = bw.getPropertyValue("userMap.user.userName");
System.out.println("userName:" + userName);
Assert.assertEquals("userName1", userName);
}
@Test
public void testBwList(){
List<RoleEntity> list = LangUtils.newArrayList();
UserEntity user = new UserEntity();
user.setUserName("userName1");
RoleEntity role = new RoleEntity();
role.setName("roleName1");
list.add(role);
user.setRoles(list);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanWrapper(user);
bw.setAutoGrowNestedPaths(true);
Object userName = bw.getPropertyValue("roles[0].name");
System.out.println("roleName1:" + userName);
Assert.assertEquals("roleName1", userName);
}
@Test
public void testBwList2(){
List<UserEntity> userList = LangUtils.newArrayList();
UserEntity user = new UserEntity();
user.setUserName("userName1");
userList.add(user);
Map<String, Object> map = LangUtils.newHashMap();
map.put("users", userList);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanMapWrapper(map);
bw.setAutoGrowNestedPaths(true);
Object userName = bw.getPropertyValue("users[0].userName");
System.out.println("userName:" + userName);
Assert.assertEquals("userName1", userName);
}
@Test
public void testBwList3(){
List<UserEntity> userList = LangUtils.newArrayList();
Map<String, Object> map = LangUtils.newHashMap();
map.put("users", userList);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanMapWrapper(map, "users", UserEntity.class);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("users[0].userName", "userName1");
bw.setPropertyValue("users[0].age", 17);
Object userName = bw.getPropertyValue("users[0].userName");
System.out.println("userName:" + userName);
Assert.assertEquals("userName1", userName);
Assert.assertEquals(17, bw.getPropertyValue("users[0].age"));
Assert.assertEquals(1, userList.size());
}
@Test
public void testBwListWithSimple(){
List<Long> userList = LangUtils.newArrayList();
Map<String, Object> map = LangUtils.newHashMap();
map.put("numbs", userList);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanMapWrapper(map);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("numbs[0]", 1L);
Long number = (Long)bw.getPropertyValue("numbs[0]");
System.out.println("number:" + number);
Assert.assertTrue(number.equals(1L));
userList = null;
map.put("numbs", userList);
// bw = PropertyAccessorFactory.forBeanPropertyAccess(map);
bw = SpringUtils.newBeanMapWrapper(map);
bw.setAutoGrowNestedPaths(true);
bw.setPropertyValue("numbs[1]", 1L);
number = (Long)bw.getPropertyValue("numbs[1]");
System.out.println("number:" + number);
Assert.assertTrue(number.equals(1L));
List<?> list = (List<?>)bw.getPropertyValue("numbs");
Assert.assertEquals(2, list.size());
}
}
| |
/*
* Copyright 2015-2020 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin2.v1;
import org.junit.Test;
import zipkin2.Endpoint;
import zipkin2.Span;
import zipkin2.Span.Kind;
import static org.assertj.core.api.Assertions.assertThat;
import static zipkin2.TestObjects.BACKEND;
import static zipkin2.TestObjects.FRONTEND;
import static zipkin2.TestObjects.TODAY;
public class SpanConverterTest {
Endpoint kafka = Endpoint.newBuilder().serviceName("kafka").build();
V2SpanConverter v2SpanConverter = new V2SpanConverter();
V1SpanConverter v1SpanConverter = new V1SpanConverter();
@Test
public void client() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.remoteEndpoint(BACKEND)
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996238000L, "ws")
.addAnnotation(1472470996403000L, "wr")
.putTag("http.path", "/api")
.putTag("clnt/finagle.version", "6.45.0")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996238000L, "ws", FRONTEND) // ts order retained
.addAnnotation(1472470996403000L, "wr", FRONTEND)
.addAnnotation(1472470996406000L, "cr", FRONTEND)
.addBinaryAnnotation("http.path", "/api", FRONTEND)
.addBinaryAnnotation("clnt/finagle.version", "6.45.0", FRONTEND)
.addBinaryAnnotation("sa", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void client_unfinished() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.addAnnotation(1472470996238000L, "ws")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996238000L, "ws", FRONTEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void client_kindInferredFromAnnotation() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(1472470996238000L - 1472470996199000L)
.addAnnotation(1472470996199000L, "cs")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(1472470996238000L - 1472470996199000L)
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996238000L, "cr", FRONTEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
}
@Test
public void lateRemoteEndpoint_cr() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.remoteEndpoint(BACKEND)
.addAnnotation(1472470996199000L, "cr")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.addAnnotation(1472470996199000L, "cr", FRONTEND)
.addBinaryAnnotation("sa", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void lateRemoteEndpoint_sa() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.remoteEndpoint(BACKEND)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.addBinaryAnnotation("sa", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void noAnnotationsExceptAddresses() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.localEndpoint(FRONTEND)
.remoteEndpoint(BACKEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addBinaryAnnotation("lc", "", FRONTEND)
.addBinaryAnnotation("sa", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void server() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.kind(Kind.SERVER)
.localEndpoint(BACKEND)
.remoteEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(207000L)
.putTag("http.path", "/api")
.putTag("finagle.version", "6.45.0")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.id(2L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "sr", BACKEND)
.addAnnotation(1472470996406000L, "ss", BACKEND)
.addBinaryAnnotation("http.path", "/api", BACKEND)
.addBinaryAnnotation("finagle.version", "6.45.0", BACKEND)
.addBinaryAnnotation("ca", FRONTEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** This shows a historical finagle span, which has client-side socket info. */
@Test
public void server_clientAddress() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.kind(Kind.SERVER)
.localEndpoint(BACKEND)
.remoteEndpoint(FRONTEND.toBuilder().port(63840).build())
.timestamp(TODAY)
.duration(207000L)
.addAnnotation(TODAY + 500L,
"Gc(9,0.PSScavenge,2015-09-17 12:37:02 +0000,304.milliseconds+762.microseconds)")
.putTag("srv/finagle.version", "6.28.0")
.shared(true)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.addAnnotation(v2.timestampAsLong(), "sr", v2.localEndpoint())
.addAnnotation(
v2.timestampAsLong() + 500L,
"Gc(9,0.PSScavenge,2015-09-17 12:37:02 +0000,304.milliseconds+762.microseconds)",
v2.localEndpoint())
.addAnnotation(v2.timestampAsLong() + v2.durationAsLong(), "ss", v2.localEndpoint())
// Sometimes, finagle does not add port info on binary annotations/tags, but does elsewhere
.addBinaryAnnotation("srv/finagle.version", "6.28.0",
v2.localEndpoint().toBuilder().port(0).build())
.addBinaryAnnotation("sa", v2.localEndpoint())
.addBinaryAnnotation("ca", v2.remoteEndpoint())
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** Buggy instrumentation can send data with missing endpoints. Make sure we can record it. */
@Test
public void missingEndpoints() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("1")
.id("2")
.name("foo")
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(1L)
.id(2L)
.name("foo")
.timestamp(1472470996199000L)
.duration(207000L)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** No special treatment for invalid core annotations: missing endpoint */
@Test
public void missingEndpoints_coreAnnotation() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("1")
.id("2")
.name("foo")
.timestamp(1472470996199000L)
.addAnnotation(1472470996199000L, "sr")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(1L)
.id(2L)
.name("foo")
.timestamp(1472470996199000L)
.addAnnotation(1472470996199000L, "sr", null)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void server_shared_v1_no_timestamp_duration() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId('2')
.id("3")
.name("get")
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(BACKEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId("1")
.parentId('2')
.id("3")
.name("get")
.addAnnotation(1472470996199000L, "sr", BACKEND)
.addAnnotation(1472470996406000L, "ss", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void server_incomplete_shared() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId('2')
.id("3")
.name("get")
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(BACKEND)
.timestamp(1472470996199000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId("1")
.parentId('2')
.id("3")
.name("get")
.addAnnotation(1472470996199000L, "sr", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** Late flushed data on a v2 span */
@Test
public void lateRemoteEndpoint_ss() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.kind(Kind.SERVER)
.localEndpoint(BACKEND)
.remoteEndpoint(FRONTEND)
.addAnnotation(1472470996199000L, "ss")
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.id(2L)
.name("get")
.addAnnotation(1472470996199000L, "ss", BACKEND)
.addBinaryAnnotation("ca", FRONTEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** Late flushed data on a v1 v1 */
@Test
public void lateRemoteEndpoint_ca() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.kind(Kind.SERVER)
.remoteEndpoint(FRONTEND)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.id(2L)
.addBinaryAnnotation("ca", FRONTEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void localSpan_emptyComponent() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.name("local")
.localEndpoint(Endpoint.newBuilder().serviceName("frontend").build())
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.id(2L)
.name("local")
.timestamp(1472470996199000L)
.duration(207000L)
.addBinaryAnnotation("lc", "", Endpoint.newBuilder().serviceName("frontend").build())
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void producer_remote() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("send")
.kind(Kind.PRODUCER)
.localEndpoint(FRONTEND)
.remoteEndpoint(kafka)
.timestamp(1472470996199000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("send")
.timestamp(1472470996199000L)
.addAnnotation(1472470996199000L, "ms", FRONTEND)
.addBinaryAnnotation("ma", kafka)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void producer_duration() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("send")
.kind(Kind.PRODUCER)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(51000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("send")
.timestamp(1472470996199000L)
.duration(51000L)
.addAnnotation(1472470996199000L, "ms", FRONTEND)
.addAnnotation(1472470996250000L, "ws", FRONTEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void consumer() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("next-message")
.kind(Kind.CONSUMER)
.localEndpoint(BACKEND)
.timestamp(1472470996199000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("next-message")
.timestamp(1472470996199000L)
.addAnnotation(1472470996199000L, "mr", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void consumer_remote() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("next-message")
.kind(Kind.CONSUMER)
.localEndpoint(BACKEND)
.remoteEndpoint(kafka)
.timestamp(1472470996199000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("next-message")
.timestamp(1472470996199000L)
.addAnnotation(1472470996199000L, "mr", BACKEND)
.addBinaryAnnotation("ma", kafka)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void consumer_duration() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("next-message")
.kind(Kind.CONSUMER)
.localEndpoint(BACKEND)
.timestamp(1472470996199000L)
.duration(51000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("next-message")
.timestamp(1472470996199000L)
.duration(51000L)
.addAnnotation(1472470996199000L, "wr", BACKEND)
.addAnnotation(1472470996250000L, "mr", BACKEND)
.build();
assertThat(v2SpanConverter.convert(v2)).usingRecursiveComparison().isEqualTo(v1);
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void clientAndServer() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996238000L, "ws", FRONTEND)
.addAnnotation(1472470996250000L, "sr", BACKEND)
.addAnnotation(1472470996350000L, "ss", BACKEND)
.addAnnotation(1472470996403000L, "wr", FRONTEND)
.addAnnotation(1472470996406000L, "cr", FRONTEND)
.addBinaryAnnotation("http.path", "/api", FRONTEND)
.addBinaryAnnotation("http.path", "/BACKEND", BACKEND)
.addBinaryAnnotation("clnt/finagle.version", "6.45.0", FRONTEND)
.addBinaryAnnotation("srv/finagle.version", "6.44.0", BACKEND)
.addBinaryAnnotation("ca", FRONTEND)
.addBinaryAnnotation("sa", BACKEND)
.build();
Span.Builder newBuilder = Span.newBuilder().traceId("1").parentId("2").id("3").name("get");
// the v1 side owns timestamp and duration
Span clientV2 =
newBuilder
.clone()
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.remoteEndpoint(BACKEND)
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996238000L, "ws")
.addAnnotation(1472470996403000L, "wr")
.putTag("http.path", "/api")
.putTag("clnt/finagle.version", "6.45.0")
.build();
// notice v1 tags are different than the v1, and the v1's annotations aren't here
Span serverV2 =
newBuilder
.clone()
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(BACKEND)
.remoteEndpoint(FRONTEND)
.timestamp(1472470996250000L)
.duration(100000L)
.putTag("http.path", "/BACKEND")
.putTag("srv/finagle.version", "6.44.0")
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(clientV2, serverV2);
}
/**
* The old v1 format had no means of saying it is shared or not. This uses lack of timestamp as a
* signal
*/
@Test
public void assumesServerWithoutTimestampIsShared() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.addAnnotation(1472470996250000L, "sr", BACKEND)
.addAnnotation(1472470996350000L, "ss", BACKEND)
.build();
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(BACKEND)
.timestamp(1472470996250000L)
.duration(100000L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void clientAndServer_loopback() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996250000L, "sr", FRONTEND)
.addAnnotation(1472470996350000L, "ss", FRONTEND)
.addAnnotation(1472470996406000L, "cr", FRONTEND)
.build();
Span.Builder newBuilder = Span.newBuilder().traceId("1").parentId("2").id("3").name("get");
Span clientV2 =
newBuilder
.clone()
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
Span serverV2 =
newBuilder
.clone()
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(FRONTEND)
.timestamp(1472470996250000L)
.duration(100000L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(clientV2, serverV2);
}
@Test
public void oneway_loopback() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996250000L, "sr", FRONTEND)
.build();
Span.Builder newBuilder = Span.newBuilder().traceId("1").parentId("2").id("3").name("get");
Span clientV2 =
newBuilder
.clone()
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.build();
Span serverV2 =
newBuilder
.clone()
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(FRONTEND)
.timestamp(1472470996250000L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(clientV2, serverV2);
}
@Test
public void producer() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("send")
.addAnnotation(1472470996199000L, "ms", FRONTEND)
.build();
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("send")
.kind(Kind.PRODUCER)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** Fix a v1 reported half in new style and half in old style, ex via a bridge */
@Test
public void client_missingCs() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.kind(Kind.CLIENT)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996406000L, "cs", FRONTEND)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void server_missingSr() {
Span v2 =
Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.kind(Kind.SERVER)
.localEndpoint(BACKEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId("1")
.id("2")
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996406000L, "ss", BACKEND)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/**
* Intentionally create service loopback endpoints as dependency linker can correct it later if
* incorrect, provided the server is instrumented.
*/
@Test public void redundantAddressAnnotations_client() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.kind(Kind.CLIENT)
.name("get")
.localEndpoint(FRONTEND)
.remoteEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "cs", FRONTEND)
.addAnnotation(1472470996406000L, "cr", FRONTEND)
.addBinaryAnnotation("ca", FRONTEND)
.addBinaryAnnotation("sa", FRONTEND)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/**
* On server spans, ignore service name on remote address binary annotation that appear loopback
* based on the service name. This could happen when finagle service labels are used incorrectly,
* which as common in early instrumentation.
*
* <p>This prevents an uncorrectable scenario which results in extra (loopback) links on server
* spans.
*/
@Test
public void redundantServiceNameOnAddressAnnotations_server() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.kind(Kind.SERVER)
.name("get")
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "sr", FRONTEND)
.addAnnotation(1472470996406000L, "ss", FRONTEND)
.addBinaryAnnotation("ca", FRONTEND)
.addBinaryAnnotation("sa", FRONTEND)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void redundantServiceNameOnAddressAnnotations_serverRetainsClientSocket() {
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.kind(Kind.SERVER)
.name("get")
.localEndpoint(BACKEND)
.remoteEndpoint(FRONTEND.toBuilder().serviceName(null).build())
.timestamp(1472470996199000L)
.duration(207000L)
.build();
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(1472470996199000L)
.duration(207000L)
.addAnnotation(1472470996199000L, "sr", BACKEND)
.addAnnotation(1472470996406000L, "ss", BACKEND)
.addBinaryAnnotation("ca", FRONTEND.toBuilder().serviceName("backend").build())
.addBinaryAnnotation("sa", BACKEND)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
/** shared v1 IDs for messaging spans isn't supported, but shouldn't break */
@Test
public void producerAndConsumer() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("whatev")
.addAnnotation(1472470996199000L, "ms", FRONTEND)
.addAnnotation(1472470996238000L, "ws", FRONTEND)
.addAnnotation(1472470996403000L, "wr", BACKEND)
.addAnnotation(1472470996406000L, "mr", BACKEND)
.addBinaryAnnotation("ma", kafka)
.build();
Span.Builder newBuilder = Span.newBuilder().traceId("1").parentId("2").id("3").name("whatev");
Span producer =
newBuilder
.clone()
.kind(Kind.PRODUCER)
.localEndpoint(FRONTEND)
.remoteEndpoint(kafka)
.timestamp(1472470996199000L)
.duration(1472470996238000L - 1472470996199000L)
.build();
Span consumer =
newBuilder
.clone()
.kind(Kind.CONSUMER)
.shared(true)
.localEndpoint(BACKEND)
.remoteEndpoint(kafka)
.timestamp(1472470996403000L)
.duration(1472470996406000L - 1472470996403000L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(producer, consumer);
}
/** shared v1 IDs for messaging spans isn't supported, but shouldn't break */
@Test
public void producerAndConsumer_loopback_shared() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1)
.parentId(2)
.id(3)
.name("message")
.addAnnotation(1472470996199000L, "ms", FRONTEND)
.addAnnotation(1472470996238000L, "ws", FRONTEND)
.addAnnotation(1472470996403000L, "wr", FRONTEND)
.addAnnotation(1472470996406000L, "mr", FRONTEND)
.build();
Span.Builder newBuilder = Span.newBuilder().traceId("1").parentId("2").id("3").name("message");
Span producer =
newBuilder
.clone()
.kind(Kind.PRODUCER)
.localEndpoint(FRONTEND)
.timestamp(1472470996199000L)
.duration(1472470996238000L - 1472470996199000L)
.build();
Span consumer =
newBuilder
.clone()
.kind(Kind.CONSUMER)
.shared(true)
.localEndpoint(FRONTEND)
.timestamp(1472470996403000L)
.duration(1472470996406000L - 1472470996403000L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(producer, consumer);
}
@Test
public void onlyAddressAnnotations() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1)
.parentId(2)
.id(3)
.name("rpc")
.addBinaryAnnotation("ca", FRONTEND)
.addBinaryAnnotation("sa", BACKEND)
.build();
Span v2 = Span.newBuilder().traceId("1").parentId("2").id("3").name("rpc")
.localEndpoint(FRONTEND)
.remoteEndpoint(BACKEND)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
@Test
public void dataMissingEndpointGoesOnFirstSpan() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1)
.id(2)
.name("missing")
.addAnnotation(1472470996199000L, "foo", FRONTEND)
.addAnnotation(1472470996238000L, "bar", FRONTEND)
.addAnnotation(1472470996250000L, "baz", BACKEND)
.addAnnotation(1472470996350000L, "qux", BACKEND)
.addAnnotation(1472470996403000L, "missing", null)
.addBinaryAnnotation("foo", "bar", FRONTEND)
.addBinaryAnnotation("baz", "qux", BACKEND)
.addBinaryAnnotation("missing", "", null)
.build();
Span.Builder newBuilder = Span.newBuilder().traceId("1").id("2").name("missing");
Span first =
newBuilder
.clone()
.localEndpoint(FRONTEND)
.addAnnotation(1472470996199000L, "foo")
.addAnnotation(1472470996238000L, "bar")
.addAnnotation(1472470996403000L, "missing")
.putTag("foo", "bar")
.putTag("missing", "")
.build();
Span second =
newBuilder
.clone()
.localEndpoint(BACKEND)
.addAnnotation(1472470996250000L, "baz")
.addAnnotation(1472470996350000L, "qux")
.putTag("baz", "qux")
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(first, second);
}
/**
* This emulates a situation in mysql where the row representing a span has the client's timestamp
*/
@Test
public void parsesSharedFlagFromRPCSpan() {
V1Span v1 =
V1Span.newBuilder()
.traceId(1L)
.parentId(2L)
.id(3L)
.name("get")
.timestamp(10)
.addAnnotation(20, "sr", BACKEND)
.addAnnotation(30, "ss", BACKEND)
.build();
Span v2 =
Span.newBuilder()
.traceId("1")
.parentId("2")
.id("3")
.name("get")
.kind(Kind.SERVER)
.shared(true)
.localEndpoint(BACKEND)
.timestamp(20)
.duration(10L)
.build();
assertThat(v1SpanConverter.convert(v1)).containsExactly(v2);
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.nullToEmpty;
import com.google.javascript.rhino.InputId;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
/**
* NodeTraversal allows an iteration through the nodes in the parse tree,
* and facilitates the optimizations on the parse tree.
*
*/
public class NodeTraversal {
private final AbstractCompiler compiler;
private final Callback callback;
/** Contains the current node*/
private Node curNode;
/** The change scope for the current node being visiteds */
private Node currentChangeScope;
/**
* Stack containing the Scopes that have been created. The Scope objects
* are lazily created; so the {@code scopeRoots} stack contains the
* Nodes for all Scopes that have not been created yet.
*/
private final Deque<Scope> scopes = new ArrayDeque<>();
/**
* A stack of scope roots. All scopes that have not been created
* are represented in this Deque.
*/
private final ArrayList<Node> scopeRoots = new ArrayList<>();
/**
* Stack containing the control flow graphs (CFG) that have been created. There are fewer CFGs
* than scopes, since block-level scopes are not valid CFG roots. The CFG objects are lazily
* populated: elements are simply the CFG root node until requested by {@link
* #getControlFlowGraph()}.
*/
private final ArrayDeque<Object> cfgs = new ArrayDeque<>();
/** The current source file name */
private String sourceName;
/** The current input */
private InputId inputId;
private CompilerInput compilerInput;
/** The scope creator */
private final ScopeCreator scopeCreator;
private final boolean useBlockScope;
/** Possible callback for scope entry and exist **/
private ScopedCallback scopeCallback;
/** Callback for passes that iterate over a list of change scope roots (FUNCTIONs and SCRIPTs) */
public interface ChangeScopeRootCallback {
void enterChangeScopeRoot(AbstractCompiler compiler, Node root);
}
/**
* Callback for tree-based traversals
*/
public interface Callback {
/**
* <p>Visits a node in pre order (before visiting its children) and decides
* whether this node's children should be traversed. If children are
* traversed, they will be visited by
* {@link #visit(NodeTraversal, Node, Node)} in postorder.</p>
* <p>Implementations can have side effects (e.g. modifying the parse
* tree).</p>
* @return whether the children of this node should be visited
*/
boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent);
/**
* <p>Visits a node in postorder (after its children have been visited).
* A node is visited only if all its parents should be traversed
* ({@link #shouldTraverse(NodeTraversal, Node, Node)}).</p>
* <p>Implementations can have side effects (e.g. modifying the parse
* tree).</p>
*/
void visit(NodeTraversal t, Node n, Node parent);
}
/**
* Callback that also knows about scope changes
*/
public interface ScopedCallback extends Callback {
/**
* Called immediately after entering a new scope. The new scope can
* be accessed through t.getScope()
*/
void enterScope(NodeTraversal t);
/**
* Called immediately before exiting a scope. The ending scope can
* be accessed through t.getScope()
*/
void exitScope(NodeTraversal t);
}
/**
* Abstract callback to visit all nodes in postorder.
*/
public abstract static class AbstractPostOrderCallback implements Callback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
return true;
}
}
/** Abstract callback to visit all nodes in preorder. */
public abstract static class AbstractPreOrderCallback implements Callback {
@Override
public final void visit(NodeTraversal t, Node n, Node parent) {}
}
/** Abstract scoped callback to visit all nodes in postorder. */
public abstract static class AbstractScopedCallback implements ScopedCallback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
return true;
}
@Override
public void enterScope(NodeTraversal t) {}
@Override
public void exitScope(NodeTraversal t) {}
}
/**
* Abstract callback to visit all nodes but not traverse into function
* bodies.
*/
public abstract static class AbstractShallowCallback implements Callback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
// We do want to traverse the name of a named function, but we don't
// want to traverse the arguments or body.
return parent == null || !parent.isFunction() || n == parent.getFirstChild();
}
}
/**
* Abstract callback to visit all structure and statement nodes but doesn't traverse into
* functions or expressions.
*/
public abstract static class AbstractShallowStatementCallback implements Callback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
return parent == null
|| NodeUtil.isControlStructure(parent)
|| NodeUtil.isStatementBlock(parent);
}
}
/**
* Abstract callback that knows when goog.modules (and in the future ES6 modules) are entered
* and exited. This includes both whole file modules and bundled modules.
*/
public abstract static class AbstractModuleCallback implements ScopedCallback {
/**
* Called immediately after entering a module.
*/
public abstract void enterModule(NodeTraversal t, Node scopeRoot);
/**
* Called immediately before exiting a module.
*/
public abstract void exitModule(NodeTraversal t, Node scopeRoot);
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
return true;
}
@Override
public final void enterScope(NodeTraversal t) {
Node scopeRoot = t.getScopeRoot();
if (NodeUtil.isModuleScopeRoot(scopeRoot)) {
enterModule(t, scopeRoot);
}
}
@Override
public final void exitScope(NodeTraversal t) {
Node scopeRoot = t.getScopeRoot();
if (NodeUtil.isModuleScopeRoot(scopeRoot)) {
exitModule(t, scopeRoot);
}
}
}
/**
* Abstract callback to visit a pruned set of nodes.
*/
public abstract static class AbstractNodeTypePruningCallback
implements Callback {
private final Set<Token> nodeTypes;
private final boolean include;
/**
* Creates an abstract pruned callback.
* @param nodeTypes the nodes to include in the traversal
*/
public AbstractNodeTypePruningCallback(Set<Token> nodeTypes) {
this(nodeTypes, true);
}
/**
* Creates an abstract pruned callback.
* @param nodeTypes the nodes to include/exclude in the traversal
* @param include whether to include or exclude the nodes in the traversal
*/
public AbstractNodeTypePruningCallback(Set<Token> nodeTypes, boolean include) {
this.nodeTypes = nodeTypes;
this.include = include;
}
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
return include == nodeTypes.contains(n.getToken());
}
}
/**
* Creates a node traversal using the specified callback interface
* and the scope creator.
*/
public NodeTraversal(AbstractCompiler compiler, Callback cb, ScopeCreator scopeCreator) {
this.callback = cb;
if (cb instanceof ScopedCallback) {
this.scopeCallback = (ScopedCallback) cb;
}
this.compiler = compiler;
this.scopeCreator = scopeCreator;
this.useBlockScope = scopeCreator.hasBlockScope();
}
private void throwUnexpectedException(Throwable unexpectedException) {
// If there's an unexpected exception, try to get the
// line number of the code that caused it.
String message = unexpectedException.getMessage();
// TODO(user): It is possible to get more information if curNode or
// its parent is missing. We still have the scope stack in which it is still
// very useful to find out at least which function caused the exception.
if (inputId != null) {
message =
unexpectedException.getMessage() + "\n"
+ formatNodeContext("Node", curNode)
+ (curNode == null ? "" : formatNodeContext("Parent", curNode.getParent()));
}
compiler.throwInternalError(message, unexpectedException);
}
private String formatNodeContext(String label, Node n) {
if (n == null) {
return " " + label + ": NULL";
}
return " " + label + "(" + n.toString(false, false, false) + "): "
+ formatNodePosition(n);
}
/**
* Traverses a parse tree recursively.
*/
public void traverse(Node root) {
try {
initTraversal(root);
curNode = root;
pushScope(root);
// null parent ensures that the shallow callbacks will traverse root
traverseBranch(root, null);
popScope();
} catch (Error | Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
void traverseRoots(Node externs, Node root) {
try {
Node scopeRoot = externs.getParent();
checkNotNull(scopeRoot);
initTraversal(scopeRoot);
curNode = scopeRoot;
pushScope(scopeRoot);
traverseBranch(externs, scopeRoot);
checkState(root.getParent() == scopeRoot);
traverseBranch(root, scopeRoot);
popScope();
} catch (Error | Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
private static final String MISSING_SOURCE = "[source unknown]";
private String formatNodePosition(Node n) {
String sourceFileName = getBestSourceFileName(n);
if (sourceFileName == null) {
return MISSING_SOURCE + "\n";
}
int lineNumber = n.getLineno();
int columnNumber = n.getCharno();
String src = compiler.getSourceLine(sourceFileName, lineNumber);
if (src == null) {
src = MISSING_SOURCE;
}
return sourceFileName + ":" + lineNumber + ":" + columnNumber + "\n"
+ src + "\n";
}
/**
* Traverses a parse tree recursively with a scope, starting with the given
* root. This should only be used in the global scope or module scopes. Otherwise, use
* {@link #traverseAtScope}.
*/
void traverseWithScope(Node root, Scope s) {
checkState(s.isGlobal() || s.isModuleScope(), s);
try {
initTraversal(root);
curNode = root;
pushScope(s);
traverseBranch(root, null);
popScope();
} catch (Error | Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
/**
* Traverses a parse tree recursively with a scope, starting at that scope's
* root.
*/
void traverseAtScope(Scope s) {
Node n = s.getRootNode();
initTraversal(n);
curNode = n;
Deque<Scope> parentScopes = new ArrayDeque<>();
Scope temp = s.getParent();
while (temp != null) {
parentScopes.push(temp);
temp = temp.getParent();
}
while (!parentScopes.isEmpty()) {
pushScope(parentScopes.pop(), true);
}
if (n.isFunction()) {
pushScope(s);
Node args = n.getSecondChild();
Node body = args.getNext();
traverseBranch(args, n);
traverseBranch(body, n);
popScope();
} else if (n.isNormalBlock()) {
pushScope(s);
// traverseBranch is not called here to avoid re-creating the block scope.
traverseChildren(n);
popScope();
} else if (NodeUtil.isAnyFor(n)) {
// ES6 Creates a separate for scope and for-body scope
checkState(scopeCreator.hasBlockScope());
pushScope(s);
Node forAssignmentParam = n.getFirstChild();
Node forIterableParam = forAssignmentParam.getNext();
Node forBodyScope = forIterableParam.getNext();
traverseBranch(forAssignmentParam, n);
traverseBranch(forIterableParam, n);
traverseBranch(forBodyScope, n);
popScope();
} else if (n.isSwitch()) {
// ES6 creates a separate switch scope with cases
checkState(scopeCreator.hasBlockScope());
pushScope(s);
traverseChildren(n);
popScope();
} else {
checkState(s.isGlobal() || s.isModuleScope(), "Expected global or module scope. Got:", s);
traverseWithScope(n, s);
}
}
private void traverseScopeRoot(Node scopeRoot) {
try {
initTraversal(scopeRoot);
curNode = scopeRoot;
initScopeRoots(scopeRoot.getParent());
traverseBranch(scopeRoot, scopeRoot.getParent());
} catch (Error | Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
/**
* Traverses *just* the contents of provided scope nodes (and optionally scopes nested within
* them) but will fall back on traversing the entire AST from root if a null scope nodes list is
* provided.
* @param root If scopeNodes is null, this method will just traverse 'root' instead. If scopeNodes
* is not null, this parameter is ignored.
*/
public static void traverseEs6ScopeRoots(
AbstractCompiler compiler,
@Nullable Node root,
@Nullable List<Node> scopeNodes,
final Callback cb,
final boolean traverseNested) {
traverseEs6ScopeRoots(compiler, root, scopeNodes, cb, null, traverseNested);
}
/**
* Traverses *just* the contents of provided scope nodes (and optionally scopes nested within
* them) but will fall back on traversing the entire AST from root if a null scope nodes list is
* provided. Also allows for a callback to notify when starting on one of the provided scope
* nodes.
* @param root If scopeNodes is null, this method will just traverse 'root' instead. If scopeNodes
* is not null, this parameter is ignored.
*/
public static void traverseEs6ScopeRoots(
AbstractCompiler compiler,
@Nullable Node root,
@Nullable List<Node> scopeNodes,
final Callback cb,
@Nullable final ChangeScopeRootCallback changeCallback,
final boolean traverseNested) {
if (scopeNodes == null) {
NodeTraversal.traverseEs6(compiler, root, cb);
} else {
MemoizedScopeCreator scopeCreator =
new MemoizedScopeCreator(new Es6SyntacticScopeCreator(compiler));
for (final Node scopeNode : scopeNodes) {
traverseSingleEs6ScopeRoot(
compiler, cb, changeCallback, traverseNested, scopeCreator, scopeNode);
}
}
}
private static void traverseSingleEs6ScopeRoot(
AbstractCompiler compiler,
final Callback cb,
@Nullable ChangeScopeRootCallback changeCallback,
final boolean traverseNested,
MemoizedScopeCreator scopeCreator,
final Node scopeNode) {
if (changeCallback != null) {
changeCallback.enterChangeScopeRoot(compiler, scopeNode);
}
ScopedCallback scb = new ScopedCallback() {
boolean insideScopeNode = false;
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
if (scopeNode == n) {
insideScopeNode = true;
}
return (traverseNested || scopeNode == n || !NodeUtil.isChangeScopeRoot(n))
&& cb.shouldTraverse(t, n, parent);
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (scopeNode == n) {
insideScopeNode = false;
}
cb.visit(t, n, parent);
}
@Override
public void enterScope(NodeTraversal t) {
if (insideScopeNode && cb instanceof ScopedCallback) {
((ScopedCallback) cb).enterScope(t);
}
}
@Override
public void exitScope(NodeTraversal t) {
if (insideScopeNode && cb instanceof ScopedCallback) {
((ScopedCallback) cb).exitScope(t);
}
}
};
NodeTraversal.traverseEs6ScopeRoot(compiler, scopeNode, scb, scopeCreator);
}
/**
* Traverse a function out-of-band of normal traversal.
*
* @param node The function node.
* @param scope The scope the function is contained in. Does not fire enter/exit
* callback events for this scope.
*/
public void traverseFunctionOutOfBand(Node node, Scope scope) {
checkNotNull(scope);
checkState(node.isFunction(), node);
checkNotNull(scope.getRootNode());
initTraversal(node);
curNode = node.getParent();
pushScope(scope, true /* quietly */);
traverseBranch(node, curNode);
popScope(true /* quietly */);
}
/**
* Traverses an inner node recursively with a refined scope. An inner node may
* be any node with a non {@code null} parent (i.e. all nodes except the
* root).
*
* @param node the node to traverse
* @param parent the node's parent, it may not be {@code null}
* @param refinedScope the refined scope of the scope currently at the top of
* the scope stack or in trivial cases that very scope or {@code null}
*/
void traverseInnerNode(Node node, Node parent, Scope refinedScope) {
checkNotNull(parent);
initTraversal(node);
if (refinedScope != null && getScope() != refinedScope) {
curNode = node;
pushScope(refinedScope);
traverseBranch(node, parent);
popScope();
} else {
traverseBranch(node, parent);
}
}
public AbstractCompiler getCompiler() {
return compiler;
}
/**
* Gets the current line number, or zero if it cannot be determined. The line
* number is retrieved lazily as a running time optimization.
*/
public int getLineNumber() {
Node cur = curNode;
while (cur != null) {
int line = cur.getLineno();
if (line >= 0) {
return line;
}
cur = cur.getParent();
}
return 0;
}
/**
* Gets the current char number, or zero if it cannot be determined. The line
* number is retrieved lazily as a running time optimization.
*/
public int getCharno() {
Node cur = curNode;
while (cur != null) {
int line = cur.getCharno();
if (line >= 0) {
return line;
}
cur = cur.getParent();
}
return 0;
}
/**
* Gets the current input source name.
*
* @return A string that may be empty, but not null
*/
public String getSourceName() {
return sourceName;
}
/**
* Gets the current input source.
*/
public CompilerInput getInput() {
if (compilerInput == null && inputId != null) {
compilerInput = compiler.getInput(inputId);
}
return compilerInput;
}
/**
* Gets the current input module.
*/
public JSModule getModule() {
CompilerInput input = getInput();
return input == null ? null : input.getModule();
}
/** Returns the node currently being traversed. */
public Node getCurrentNode() {
return curNode;
}
/**
* Traversal for passes that work only on changed functions.
* Suppose a loopable pass P1 uses this traversal.
* Then, if a function doesn't change between two runs of P1, it won't look at
* the function the second time.
* (We're assuming that P1 runs to a fixpoint, o/w we may miss optimizations.)
*
* <p>Most changes are reported with calls to Compiler.reportCodeChange(), which
* doesn't know which scope changed. We keep track of the current scope by
* calling Compiler.setScope inside pushScope and popScope.
* The automatic tracking can be wrong in rare cases when a pass changes scope
* w/out causing a call to pushScope or popScope.
*
* Passes that do cross-scope modifications call
* Compiler.reportChangeToEnclosingScope(Node n).
*/
public static void traverseChangedFunctions(
final AbstractCompiler compiler, final ChangeScopeRootCallback callback) {
final Node jsRoot = compiler.getJsRoot();
NodeTraversal.traverseEs6(compiler, jsRoot,
new AbstractPreOrderCallback() {
@Override
public final boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
if (NodeUtil.isChangeScopeRoot(n) && compiler.hasScopeChanged(n)) {
callback.enterChangeScopeRoot(compiler, n);
}
return true;
}
});
}
/**
* Traverses using the ES6SyntacticScopeCreator
*/
// TODO (stephshi): rename to "traverse" when the old traverse method is no longer used
public static void traverseEs6(AbstractCompiler compiler, Node root, Callback cb) {
NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler));
t.traverse(root);
}
/** Traverses from a particular scope node using the ES6SyntacticScopeCreator */
private static void traverseEs6ScopeRoot(
AbstractCompiler compiler, Node scopeNode, Callback cb, MemoizedScopeCreator scopeCreator) {
NodeTraversal t = new NodeTraversal(compiler, cb, scopeCreator);
t.traverseScopeRoot(scopeNode);
}
/**
* @deprecated Use the ES6SyntacticScopeCreator instead.
*/
@Deprecated
public static void traverseTyped(AbstractCompiler compiler, Node root, Callback cb) {
NodeTraversal t = new NodeTraversal(compiler, cb, SyntacticScopeCreator.makeTyped(compiler));
t.traverse(root);
}
public static void traverseRootsEs6(
AbstractCompiler compiler, Callback cb, Node externs, Node root) {
NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler));
t.traverseRoots(externs, root);
}
/**
* @deprecated Use the ES6SyntacticScopeCreator instead.
*/
@Deprecated
public static void traverseRootsTyped(
AbstractCompiler compiler, Callback cb, Node externs, Node root) {
NodeTraversal t = new NodeTraversal(compiler, cb, SyntacticScopeCreator.makeTyped(compiler));
t.traverseRoots(externs, root);
}
private void handleScript(Node n, Node parent) {
setChangeScope(n);
setInputId(n.getInputId(), getSourceName(n));
curNode = n;
if (callback.shouldTraverse(this, n, parent)) {
traverseChildren(n);
curNode = n;
callback.visit(this, n, parent);
}
setChangeScope(null);
}
private void handleFunction(Node n, Node parent) {
Node changeScope = this.currentChangeScope;
setChangeScope(n);
curNode = n;
if (callback.shouldTraverse(this, n, parent)) {
traverseFunction(n, parent);
curNode = n;
callback.visit(this, n, parent);
}
setChangeScope(changeScope);
}
/**
* Traverses a branch.
*/
private void traverseBranch(Node n, Node parent) {
Token type = n.getToken();
if (type == Token.SCRIPT) {
handleScript(n, parent);
return;
} else if (type == Token.FUNCTION) {
handleFunction(n, parent);
return;
}
curNode = n;
if (!callback.shouldTraverse(this, n, parent)) {
return;
}
if (type == Token.CLASS) {
traverseClass(n);
} else if (type == Token.MODULE_BODY) {
traverseModule(n);
} else if (useBlockScope && NodeUtil.createsBlockScope(n)) {
traverseBlockScope(n);
} else {
traverseChildren(n);
}
curNode = n;
callback.visit(this, n, parent);
}
/** Traverses a function. */
private void traverseFunction(Node n, Node parent) {
final Node fnName = n.getFirstChild();
boolean isFunctionExpression = parent != null && NodeUtil.isFunctionExpression(n);
if (!isFunctionExpression) {
// Function declarations are in the scope containing the declaration.
traverseBranch(fnName, n);
}
curNode = n;
pushScope(n);
if (isFunctionExpression) {
// Function expression names are only accessible within the function
// scope.
traverseBranch(fnName, n);
}
final Node args = fnName.getNext();
final Node body = args.getNext();
// Args
traverseBranch(args, n);
// Body
// ES6 "arrow" function may not have a block as a body.
traverseBranch(body, n);
popScope();
}
/** Traverses a class. */
private void traverseClass(Node n) {
final Node className = n.getFirstChild();
boolean isClassExpression = NodeUtil.isClassExpression(n);
if (!isClassExpression) {
// Class declarations are in the scope containing the declaration.
traverseBranch(className, n);
}
curNode = n;
pushScope(n);
if (isClassExpression) {
// Class expression names are only accessible within the function
// scope.
traverseBranch(className, n);
}
final Node extendsClause = n.getSecondChild();
final Node body = extendsClause.getNext();
// Extends
traverseBranch(extendsClause, n);
// Body
traverseBranch(body, n);
popScope();
}
private void traverseChildren(Node n) {
for (Node child = n.getFirstChild(); child != null; ) {
// child could be replaced, in which case our child node
// would no longer point to the true next
Node next = child.getNext();
traverseBranch(child, n);
child = next;
}
}
/** Traverses a module. */
private void traverseModule(Node n) {
pushScope(n);
traverseChildren(n);
popScope();
}
/** Traverses a non-function block. */
private void traverseBlockScope(Node n) {
pushScope(n);
traverseChildren(n);
popScope();
}
/** Examines the functions stack for the last instance of a function node. When possible, prefer
* this method over NodeUtil.getEnclosingFunction() because this in general looks at less nodes.
*/
public Node getEnclosingFunction() {
Node root = getCfgRoot();
return root.isFunction() ? root : null;
}
/** Sets the given node as the current scope and pushes the relevant frames on the CFG stacks. */
private void recordScopeRoot(Node node) {
if (NodeUtil.isValidCfgRoot(node)) {
cfgs.push(node);
}
}
/** Creates a new scope (e.g. when entering a function). */
private void pushScope(Node node) {
checkNotNull(curNode);
checkNotNull(node);
scopeRoots.add(node);
recordScopeRoot(node);
if (scopeCallback != null) {
scopeCallback.enterScope(this);
}
}
/** Creates a new scope (e.g. when entering a function). */
private void pushScope(Scope s) {
pushScope(s, false);
}
/**
* Creates a new scope (e.g. when entering a function).
* @param quietly Don't fire an enterScope callback.
*/
private void pushScope(Scope s, boolean quietly) {
checkNotNull(curNode);
scopes.push(s);
recordScopeRoot(s.getRootNode());
if (!quietly && scopeCallback != null) {
scopeCallback.enterScope(this);
}
}
private void popScope() {
popScope(false);
}
/**
* Pops back to the previous scope (e.g. when leaving a function).
* @param quietly Don't fire the exitScope callback.
*/
private void popScope(boolean quietly) {
if (!quietly && scopeCallback != null) {
scopeCallback.exitScope(this);
}
Node scopeRoot;
int roots = scopeRoots.size();
if (roots > 0) {
scopeRoot = scopeRoots.remove(roots - 1);
} else {
scopeRoot = scopes.pop().getRootNode();
}
if (NodeUtil.isValidCfgRoot(scopeRoot)) {
cfgs.pop();
}
}
/** Gets the current scope. */
public Scope getScope() {
Scope scope = scopes.peek();
for (int i = 0; i < scopeRoots.size(); i++) {
scope = scopeCreator.createScope(scopeRoots.get(i), scope);
scopes.push(scope);
}
scopeRoots.clear();
// No need to call compiler.setScope; the top scopeRoot is now the top scope
return scope;
}
public boolean isHoistScope() {
return Scope.isHoistScopeRootNode(getScopeRoot());
}
public Node getClosestHoistScopeRoot() {
int roots = scopeRoots.size();
for (int i = roots; i > 0; i--) {
Node rootNode = scopeRoots.get(i - 1);
if (Scope.isHoistScopeRootNode(rootNode)) {
return rootNode;
}
}
return scopes.peek().getClosestHoistScope().getRootNode();
}
public Scope getClosestHoistScope() {
// TODO(moz): This should not call getScope(). We should find the root of the closest hoist
// scope and effectively getScope() from there, which avoids scanning inner scopes that might
// not be needed.
return getScope().getClosestHoistScope();
}
public TypedScope getTypedScope() {
Scope s = getScope();
checkState(s instanceof TypedScope, "getTypedScope called for untyped traversal");
return (TypedScope) s;
}
/** Gets the control flow graph for the current JS scope. */
public ControlFlowGraph<Node> getControlFlowGraph() {
ControlFlowGraph<Node> result;
Object o = cfgs.peek();
if (o instanceof Node) {
Node cfgRoot = (Node) o;
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false, true);
cfa.process(null, cfgRoot);
result = cfa.getCfg();
cfgs.pop();
cfgs.push(result);
} else {
result = (ControlFlowGraph<Node>) o;
}
return result;
}
/** Returns the current scope's root. */
public Node getScopeRoot() {
int roots = scopeRoots.size();
if (roots > 0) {
return scopeRoots.get(roots - 1);
} else {
Scope s = scopes.peek();
return s != null ? s.getRootNode() : null;
}
}
private Node getCfgRoot() {
Node result;
Object o = cfgs.peek();
if (o instanceof Node) {
result = (Node) o;
} else {
result = ((ControlFlowGraph<Node>) o).getEntry().getValue();
}
return result;
}
public ScopeCreator getScopeCreator() {
return scopeCreator;
}
/**
* Determines whether the traversal is currently in the global scope. Note that this returns false
* in a global block scope.
*/
public boolean inGlobalScope() {
return getScopeDepth() == 0;
}
/** Determines whether the traversal is currently in the scope of the block of a function. */
public boolean inFunctionBlockScope() {
Node scopeRoot = getScopeRoot();
return scopeRoot.isNormalBlock() && scopeRoot.getParent().isFunction();
}
/**
* Determines whether the hoist scope of the current traversal is global.
*/
public boolean inGlobalHoistScope() {
Node cfgRoot = getCfgRoot();
checkState(
cfgRoot.isScript()
|| cfgRoot.isRoot()
|| cfgRoot.isNormalBlock()
|| cfgRoot.isFunction()
|| cfgRoot.isModuleBody(),
cfgRoot);
return cfgRoot.isScript() || cfgRoot.isRoot() || cfgRoot.isNormalBlock();
}
/**
* Determines whether the traversal is currently in the global scope. Note that this returns false
* in a global block scope.
*/
public boolean inModuleScope() {
return NodeUtil.isModuleScopeRoot(getScopeRoot());
}
/**
* Determines whether the hoist scope of the current traversal is global.
*/
public boolean inModuleHoistScope() {
Node moduleRoot = getCfgRoot();
if (moduleRoot.isFunction()) {
// For wrapped modules, the function block is the module scope root.
moduleRoot = moduleRoot.getLastChild();
}
return NodeUtil.isModuleScopeRoot(moduleRoot);
}
int getScopeDepth() {
int sum = scopes.size() + scopeRoots.size();
checkState(sum > 0);
return sum - 1; // Use 0-based scope depth to be consistent within the compiler
}
/** Reports a diagnostic (error or warning) */
public void report(Node n, DiagnosticType diagnosticType,
String... arguments) {
JSError error = JSError.make(n, diagnosticType, arguments);
compiler.report(error);
}
public void reportCodeChange() {
Node changeScope = this.currentChangeScope;
checkNotNull(changeScope);
checkState(NodeUtil.isChangeScopeRoot(changeScope), changeScope);
compiler.reportChangeToChangeScope(changeScope);
}
public void reportCodeChange(Node n) {
compiler.reportChangeToEnclosingScope(n);
}
private static String getSourceName(Node n) {
String name = n.getSourceFileName();
return nullToEmpty(name);
}
/**
* @param n The current change scope, should be null when the traversal is complete.
*/
private void setChangeScope(Node n) {
this.currentChangeScope = n;
compiler.setChangeScope(n);
}
private Node getEnclosingScript(Node n) {
while (n != null && !n.isScript()) {
n = n.getParent();
}
return n;
}
private void initTraversal(Node traversalRoot) {
Node changeScope = NodeUtil.getEnclosingChangeScopeRoot(traversalRoot);
setChangeScope(changeScope);
Node script = getEnclosingScript(changeScope);
if (script != null) {
setInputId(script.getInputId(), script.getSourceFileName());
} else {
setInputId(null, "");
}
}
/**
* Prefills the scopeRoots stack up to a given spot in the AST. Allows for starting traversal at
* any spot while still having correct scope state.
*/
private void initScopeRoots(Node n) {
Deque<Node> queuedScopeRoots = new ArrayDeque<>();
while (n != null) {
if (isScopeRoot(n)) {
queuedScopeRoots.addFirst(n);
}
n = n.getParent();
}
for (Node queuedScopeRoot : queuedScopeRoots) {
pushScope(queuedScopeRoot);
}
}
private boolean isScopeRoot(Node n) {
if (n.isRoot() && n.getParent() == null) {
return true;
} else if (n.isFunction()) {
return true;
} else if (useBlockScope && NodeUtil.createsBlockScope(n)) {
return true;
}
return false;
}
private void setInputId(InputId id, String sourceName) {
inputId = id;
this.sourceName = sourceName;
compilerInput = null;
}
InputId getInputId() {
return inputId;
}
/**
* Creates a JSError during NodeTraversal.
*
* @param n Determines the line and char position within the source file name
* @param type The DiagnosticType
* @param arguments Arguments to be incorporated into the message
*/
public JSError makeError(Node n, CheckLevel level, DiagnosticType type,
String... arguments) {
return JSError.make(n, level, type, arguments);
}
/**
* Creates a JSError during NodeTraversal.
*
* @param n Determines the line and char position within the source file name
* @param type The DiagnosticType
* @param arguments Arguments to be incorporated into the message
*/
public JSError makeError(Node n, DiagnosticType type, String... arguments) {
return JSError.make(n, type, arguments);
}
private String getBestSourceFileName(Node n) {
return n == null ? sourceName : n.getSourceFileName();
}
}
| |
package name.falgout.jeffrey.throwing.stream.adapter;
import java.util.LongSummaryStatistics;
import java.util.OptionalDouble;
import java.util.OptionalLong;
import java.util.function.Function;
import java.util.function.LongFunction;
import java.util.stream.LongStream;
import name.falgout.jeffrey.throwing.RethrowChain;
import name.falgout.jeffrey.throwing.ThrowingBaseSpliterator;
import name.falgout.jeffrey.throwing.ThrowingBiConsumer;
import name.falgout.jeffrey.throwing.ThrowingIterator.OfLong;
import name.falgout.jeffrey.throwing.ThrowingLongBinaryOperator;
import name.falgout.jeffrey.throwing.ThrowingLongConsumer;
import name.falgout.jeffrey.throwing.ThrowingLongFunction;
import name.falgout.jeffrey.throwing.ThrowingLongPredicate;
import name.falgout.jeffrey.throwing.ThrowingLongToDoubleFunction;
import name.falgout.jeffrey.throwing.ThrowingLongToIntFunction;
import name.falgout.jeffrey.throwing.ThrowingLongUnaryOperator;
import name.falgout.jeffrey.throwing.ThrowingObjLongConsumer;
import name.falgout.jeffrey.throwing.ThrowingSupplier;
import name.falgout.jeffrey.throwing.adapter.ExceptionMasker;
import name.falgout.jeffrey.throwing.stream.ThrowingDoubleStream;
import name.falgout.jeffrey.throwing.stream.ThrowingIntStream;
import name.falgout.jeffrey.throwing.stream.ThrowingLongStream;
import name.falgout.jeffrey.throwing.stream.ThrowingStream;
class CheckedLongStream<X extends Throwable> extends
CheckedBaseStream<Long, X, LongStream, ThrowingLongStream<X>> implements ThrowingLongStream<X> {
CheckedLongStream(LongStream delegate, ExceptionMasker<X> ExceptionMasker) {
super(delegate, ExceptionMasker);
}
CheckedLongStream(LongStream delegate, ExceptionMasker<X> ExceptionMasker,
RethrowChain<Throwable, X> chain) {
super(delegate, ExceptionMasker, chain);
}
@Override
public ThrowingLongStream<X> getSelf() {
return this;
}
@Override
public ThrowingLongStream<X> createNewAdapter(LongStream delegate) {
return new CheckedLongStream<>(delegate, getExceptionMasker(), getChain());
}
@Override
public OfLong<X> iterator() {
return ThrowingBridge.of(getDelegate().iterator(), getExceptionMasker());
}
@Override
public ThrowingBaseSpliterator.OfLong<X> spliterator() {
return ThrowingBridge.of(getDelegate().spliterator(), getExceptionMasker());
}
@Override
public ThrowingLongStream<X> filter(ThrowingLongPredicate<? extends X> predicate) {
return chain(LongStream::filter, getExceptionMasker().mask(predicate));
}
@Override
public ThrowingLongStream<X> map(ThrowingLongUnaryOperator<? extends X> mapper) {
return chain(LongStream::map, getExceptionMasker().mask(mapper));
}
@Override
public <U> ThrowingStream<U, X> mapToObj(ThrowingLongFunction<? extends U, ? extends X> mapper) {
LongFunction<? extends U> f = getExceptionMasker().mask(mapper);
return ThrowingBridge.of(getDelegate().mapToObj(f), getExceptionMasker());
}
@Override
public ThrowingIntStream<X> mapToInt(ThrowingLongToIntFunction<? extends X> mapper) {
return ThrowingBridge.of(getDelegate().mapToInt(getExceptionMasker().mask(mapper)),
getExceptionMasker());
}
@Override
public ThrowingDoubleStream<X> mapToDouble(ThrowingLongToDoubleFunction<? extends X> mapper) {
return ThrowingBridge.of(getDelegate().mapToDouble(getExceptionMasker().mask(mapper)),
getExceptionMasker());
}
@Override
public ThrowingLongStream<X> flatMap(
ThrowingLongFunction<? extends ThrowingLongStream<? extends X>, ? extends X> mapper) {
@SuppressWarnings("unchecked")
Function<? super ThrowingLongStream<? extends X>, ? extends LongStream> c =
s -> ThrowingBridge.of((ThrowingLongStream<X>) s, getExceptionClass());
return chain(LongStream::flatMap, getExceptionMasker().mask(mapper.andThen(c::apply)));
}
@Override
public ThrowingLongStream<X> distinct() {
return chain(LongStream::distinct);
}
@Override
public ThrowingLongStream<X> sorted() {
return chain(LongStream::sorted);
}
@Override
public ThrowingLongStream<X> peek(ThrowingLongConsumer<? extends X> action) {
return chain(LongStream::peek, getExceptionMasker().mask(action));
}
@Override
public ThrowingLongStream<X> limit(long maxSize) {
return chain(LongStream::limit, maxSize);
}
@Override
public ThrowingLongStream<X> skip(long n) {
return chain(LongStream::skip, n);
}
@Override
public void forEach(ThrowingLongConsumer<? extends X> action) throws X {
unmaskException(() -> getDelegate().forEach(getExceptionMasker().mask(action)));
}
@Override
public void forEachOrdered(ThrowingLongConsumer<? extends X> action) throws X {
unmaskException(() -> getDelegate().forEachOrdered(getExceptionMasker().mask(action)));
}
@Override
public long[] toArray() throws X {
return unmaskException(getDelegate()::toArray);
}
@Override
public long reduce(long identity, ThrowingLongBinaryOperator<? extends X> op) throws X {
return unmaskException(() -> getDelegate().reduce(identity, getExceptionMasker().mask(op)));
}
@Override
public OptionalLong reduce(ThrowingLongBinaryOperator<? extends X> op) throws X {
return unmaskException(() -> getDelegate().reduce(getExceptionMasker().mask(op)));
}
@Override
public <R> R collect(ThrowingSupplier<R, ? extends X> supplier,
ThrowingObjLongConsumer<R, ? extends X> accumulator,
ThrowingBiConsumer<R, R, ? extends X> combiner) throws X {
return unmaskException(() -> getDelegate().collect(getExceptionMasker().mask(supplier),
getExceptionMasker().mask(accumulator), getExceptionMasker().mask(combiner)));
}
@Override
public long sum() throws X {
return unmaskException(getDelegate()::sum);
}
@Override
public OptionalLong min() throws X {
return unmaskException(getDelegate()::min);
}
@Override
public OptionalLong max() throws X {
return unmaskException(getDelegate()::max);
}
@Override
public long count() throws X {
return unmaskException(getDelegate()::count);
}
@Override
public OptionalDouble average() throws X {
return unmaskException(getDelegate()::average);
}
@Override
public LongSummaryStatistics summaryStatistics() throws X {
return unmaskException(getDelegate()::summaryStatistics);
}
@Override
public boolean anyMatch(ThrowingLongPredicate<? extends X> predicate) throws X {
return unmaskException(() -> getDelegate().anyMatch(getExceptionMasker().mask(predicate)));
}
@Override
public boolean allMatch(ThrowingLongPredicate<? extends X> predicate) throws X {
return unmaskException(() -> getDelegate().allMatch(getExceptionMasker().mask(predicate)));
}
@Override
public boolean noneMatch(ThrowingLongPredicate<? extends X> predicate) throws X {
return unmaskException(() -> getDelegate().noneMatch(getExceptionMasker().mask(predicate)));
}
@Override
public OptionalLong findFirst() throws X {
return unmaskException(getDelegate()::findFirst);
}
@Override
public OptionalLong findAny() throws X {
return unmaskException(getDelegate()::findAny);
}
@Override
public ThrowingDoubleStream<X> asDoubleStream() {
return ThrowingBridge.of(getDelegate().asDoubleStream(), getExceptionMasker());
}
@Override
public ThrowingStream<Long, X> boxed() {
return ThrowingBridge.of(getDelegate().boxed(), getExceptionMasker());
}
@Override
public <Y extends Throwable> ThrowingLongStream<Y> rethrow(Class<Y> e,
Function<? super X, ? extends Y> mapper) {
RethrowChain<Throwable, Y> c = getChain().rethrow(mapper);
return new CheckedLongStream<>(getDelegate(), new ExceptionMasker<>(e), c);
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.databasejoin;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.w3c.dom.Node;
public class DatabaseJoinMeta extends BaseStepMeta implements StepMetaInterface
{
private static Class<?> PKG = DatabaseJoinMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
/** database connection */
private DatabaseMeta databaseMeta;
/** SQL Statement */
private String sql;
/** Number of rows to return (0=ALL) */
private int rowLimit;
/** false: don't return rows where nothing is found
true: at least return one source row, the rest is NULL */
private boolean outerJoin;
/** Fields to use as parameters (fill in the ? markers) */
private String parameterField[];
/** Type of the paramenters */
private int parameterType[];
/** false: don't replave variable in scrip
true: replace variable in script */
private boolean replacevars;
public DatabaseJoinMeta()
{
super(); // allocate BaseStepMeta
}
/**
* @return Returns the database.
*/
public DatabaseMeta getDatabaseMeta()
{
return databaseMeta;
}
/**
* @param database The database to set.
*/
public void setDatabaseMeta(DatabaseMeta database)
{
this.databaseMeta = database;
}
/**
* @return Returns the outerJoin.
*/
public boolean isOuterJoin()
{
return outerJoin;
}
/**
* @param outerJoin The outerJoin to set.
*/
public void setOuterJoin(boolean outerJoin)
{
this.outerJoin = outerJoin;
}
/**
* @return Returns the replacevars.
*/
public boolean isVariableReplace()
{
return replacevars;
}
/**
* @param replacevars The replacevars to set.
*/
public void setVariableReplace(boolean replacevars)
{
this.replacevars = replacevars;
}
/**
* @return Returns the parameterField.
*/
public String[] getParameterField()
{
return parameterField;
}
/**
* @param parameterField The parameterField to set.
*/
public void setParameterField(String[] parameterField)
{
this.parameterField = parameterField;
}
/**
* @return Returns the parameterType.
*/
public int[] getParameterType()
{
return parameterType;
}
/**
* @param parameterType The parameterType to set.
*/
public void setParameterType(int[] parameterType)
{
this.parameterType = parameterType;
}
/**
* @return Returns the rowLimit.
*/
public int getRowLimit()
{
return rowLimit;
}
/**
* @param rowLimit The rowLimit to set.
*/
public void setRowLimit(int rowLimit)
{
this.rowLimit = rowLimit;
}
/**
* @return Returns the sql.
*/
public String getSql()
{
return sql;
}
/**
* @param sql The sql to set.
*/
public void setSql(String sql)
{
this.sql = sql;
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException {
parameterField=null;
parameterType =null;
outerJoin=false;
replacevars=false;
readData(stepnode, databases);
}
public void allocate(int nrparam)
{
parameterField = new String[nrparam];
parameterType = new int [nrparam];
}
public Object clone()
{
DatabaseJoinMeta retval = (DatabaseJoinMeta)super.clone();
int nrparam = parameterField.length;
retval.allocate(nrparam);
for (int i=0;i<nrparam;i++)
{
retval.parameterField [i] = parameterField[i];
retval.parameterType [i] = parameterType[i];
}
return retval;
}
private void readData(Node stepnode, List<DatabaseMeta> databases)
throws KettleXMLException
{
try
{
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase(databases, con);
sql = XMLHandler.getTagValue(stepnode, "sql"); //$NON-NLS-1$
outerJoin = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "outer_join")); //$NON-NLS-1$ //$NON-NLS-2$
replacevars = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "replace_vars"));
rowLimit = Const.toInt(XMLHandler.getTagValue(stepnode, "rowlimit"), 0); //$NON-NLS-1$
Node param = XMLHandler.getSubNode(stepnode, "parameter"); //$NON-NLS-1$
int nrparam = XMLHandler.countNodes(param, "field"); //$NON-NLS-1$
allocate(nrparam);
for (int i=0;i<nrparam;i++)
{
Node pnode = XMLHandler.getSubNodeByNr(param, "field", i); //$NON-NLS-1$
parameterField [i] = XMLHandler.getTagValue(pnode, "name"); //$NON-NLS-1$
String ptype = XMLHandler.getTagValue(pnode, "type"); //$NON-NLS-1$
parameterType [i] = ValueMeta.getType(ptype);
}
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "DatabaseJoinMeta.Exception.UnableToLoadStepInfo"), e); //$NON-NLS-1$
}
}
public void setDefault()
{
databaseMeta = null;
rowLimit = 0;
sql = ""; //$NON-NLS-1$
outerJoin=false;
parameterField=null;
parameterType=null;
outerJoin=false;
replacevars=false;
int nrparam = 0;
allocate(nrparam);
for (int i=0;i<nrparam;i++)
{
parameterField [i] = "param"+i; //$NON-NLS-1$
parameterType [i] = ValueMetaInterface.TYPE_NUMBER;
}
}
public RowMetaInterface getParameterRow(RowMetaInterface fields)
{
RowMetaInterface param = new RowMeta();
if ( fields != null )
{
for (int i=0;i<parameterField.length;i++)
{
ValueMetaInterface v = fields.searchValueMeta(parameterField[i]);
if (v!=null) param.addValueMeta(v);
}
}
return param;
}
@Override
public void getFields(RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException {
if (databaseMeta==null) return;
Database db = new Database(loggingObject, databaseMeta);
databases = new Database[] { db }; // Keep track of this one for cancelQuery
// Which fields are parameters?
// info[0] comes from the database connection.
//
RowMetaInterface param = getParameterRow(row);
// First try without connecting to the database... (can be S L O W)
// See if it's in the cache...
//
RowMetaInterface add = null;
try
{
add = db.getQueryFields(space.environmentSubstitute(sql), true, param, new Object[param.size()]);
}
catch(KettleDatabaseException dbe)
{
throw new KettleStepException(BaseMessages.getString(PKG, "DatabaseJoinMeta.Exception.UnableToDetermineQueryFields")+Const.CR+sql, dbe); //$NON-NLS-1$
}
if (add!=null) // Cache hit, just return it this...
{
for (int i=0;i<add.size();i++)
{
ValueMetaInterface v=add.getValueMeta(i);
v.setOrigin(name);
}
row.addRowMeta( add );
}
else
// No cache hit, connect to the database, do it the hard way...
//
try
{
db.connect();
add = db.getQueryFields(space.environmentSubstitute(sql), true, param, new Object[param.size()]);
for (int i=0;i<add.size();i++)
{
ValueMetaInterface v=add.getValueMeta(i);
v.setOrigin(name);
}
row.addRowMeta( add );
db.disconnect();
}
catch(KettleDatabaseException dbe)
{
throw new KettleStepException(BaseMessages.getString(PKG, "DatabaseJoinMeta.Exception.ErrorObtainingFields"), dbe); //$NON-NLS-1$
}
}
public String getXML()
{
StringBuffer retval = new StringBuffer(300);
retval.append(" ").append(XMLHandler.addTagValue("connection", databaseMeta==null?"":databaseMeta.getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("rowlimit", rowLimit)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("sql", sql)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("outer_join", outerJoin)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("replace_vars", replacevars));
retval.append(" <parameter>").append(Const.CR); //$NON-NLS-1$
for (int i=0;i<parameterField.length;i++)
{
retval.append(" <field>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", parameterField[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("type", ValueMeta.getTypeDesc(parameterType[i]))); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </field>").append(Const.CR); //$NON-NLS-1$
}
retval.append(" </parameter>").append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException {
try
{
databaseMeta = rep.loadDatabaseMetaFromStepAttribute(id_step, "id_connection", databases);
rowLimit = (int)rep.getStepAttributeInteger(id_step, "rowlimit"); //$NON-NLS-1$
sql = rep.getStepAttributeString (id_step, "sql"); //$NON-NLS-1$
outerJoin = rep.getStepAttributeBoolean(id_step, "outer_join"); //$NON-NLS-1$
replacevars = rep.getStepAttributeBoolean(id_step, "replace_vars");
int nrparam = rep.countNrStepAttributes(id_step, "parameter_field"); //$NON-NLS-1$
allocate(nrparam);
for (int i=0;i<nrparam;i++)
{
parameterField[i] = rep.getStepAttributeString(id_step, i, "parameter_field"); //$NON-NLS-1$
String stype = rep.getStepAttributeString(id_step, i, "parameter_type"); //$NON-NLS-1$
parameterType[i] = ValueMeta.getType(stype);
}
}
catch(Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "DatabaseJoinMeta.Exception.UnexpectedErrorReadingStepInfo"), e); //$NON-NLS-1$
}
}
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step)
throws KettleException
{
try
{
rep.saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", databaseMeta);
rep.saveStepAttribute(id_transformation, id_step, "rowlimit", rowLimit); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "sql", sql); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "outer_join", outerJoin); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "replace_vars", replacevars);
for (int i=0;i<parameterField.length;i++)
{
rep.saveStepAttribute(id_transformation, id_step, i, "parameter_field", parameterField[i]); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, i, "parameter_type", ValueMeta.getTypeDesc( parameterType[i] )); //$NON-NLS-1$
}
// Also, save the step-database relationship!
if (databaseMeta!=null) rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getObjectId());
}
catch(Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "DatabaseJoinMeta.Exception.UnableToSaveStepInfo")+id_step, e); //$NON-NLS-1$
}
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info) {
CheckResult cr;
String error_message = ""; //$NON-NLS-1$
if (databaseMeta!=null)
{
Database db = new Database(loggingObject, databaseMeta);
databases = new Database[] { db }; // Keep track of this one for cancelQuery
try
{
db.connect();
if (sql!=null && sql.length()!=0)
{
RowMetaInterface param = getParameterRow(prev);
error_message = ""; //$NON-NLS-1$
RowMetaInterface r = db.getQueryFields(transMeta.environmentSubstitute(sql), true, param, new Object[param.size()]);
if (r!=null)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.QueryOK"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
else
{
error_message=BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.InvalidDBQuery"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
int q = db.countParameters(transMeta.environmentSubstitute(sql));
if (q!=parameterField.length)
{
error_message=BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.DismatchBetweenParametersAndQuestion")+Const.CR; //$NON-NLS-1$
error_message+=BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.DismatchBetweenParametersAndQuestion2")+q+Const.CR; //$NON-NLS-1$
error_message+=BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.DismatchBetweenParametersAndQuestion3")+parameterField.length; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.NumberOfParamCorrect")+q+")", stepMeta); //$NON-NLS-1$ //$NON-NLS-2$
remarks.add(cr);
}
}
// Look up fields in the input stream <prev>
if (prev!=null && prev.size()>0)
{
boolean first=true;
error_message = ""; //$NON-NLS-1$
boolean error_found = false;
for (int i=0;i<parameterField.length;i++)
{
ValueMetaInterface v = prev.searchValueMeta(parameterField[i]);
if (v==null)
{
if (first)
{
first=false;
error_message+=BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.MissingFields")+Const.CR; //$NON-NLS-1$
}
error_found=true;
error_message+="\t\t"+parameterField[i]+Const.CR; //$NON-NLS-1$
}
}
if (error_found)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.AllFieldsFound"), stepMeta); //$NON-NLS-1$
}
remarks.add(cr);
}
else
{
error_message=BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.CounldNotReadFields")+Const.CR; //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
}
catch(KettleException e)
{
error_message = BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.ErrorOccurred")+e.getMessage(); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
finally
{
db.disconnect();
}
}
else
{
error_message = BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.InvalidConnection"); //$NON-NLS-1$
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, error_message, stepMeta);
remarks.add(cr);
}
// See if we have input streams leading to this step!
if (input.length>0)
{
cr = new CheckResult(CheckResult.TYPE_RESULT_OK, BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.ReceivingInfo"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
else
{
cr = new CheckResult(CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "DatabaseJoinMeta.CheckResult.NoInputReceived"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
}
public RowMetaInterface getTableFields()
{
// Build a dummy parameter row...
//
RowMetaInterface param = new RowMeta();
for (int i=0;i<parameterField.length;i++)
{
param.addValueMeta( new ValueMeta(parameterField[i], parameterType[i]) );
}
RowMetaInterface fields = null;
if (databaseMeta!=null)
{
Database db = new Database(loggingObject, databaseMeta);
databases = new Database[] { db }; // Keep track of this one for cancelQuery
try
{
db.connect();
fields = db.getQueryFields(databaseMeta.environmentSubstitute(sql), true, param, new Object[param.size()]);
}
catch(KettleDatabaseException dbe)
{
logError(BaseMessages.getString(PKG, "DatabaseJoinMeta.Log.DatabaseErrorOccurred")+dbe.getMessage()); //$NON-NLS-1$
}
finally
{
db.disconnect();
}
}
return fields;
}
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans)
{
return new DatabaseJoin(stepMeta, stepDataInterface, cnr, tr, trans);
}
public StepDataInterface getStepData()
{
return new DatabaseJoinData();
}
@Override
public void analyseImpact(List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info) throws KettleStepException {
// Find the lookupfields...
//
RowMetaInterface out = prev.clone();
getFields(out, stepMeta.getName(), new RowMetaInterface[] { info, }, null, transMeta );
if (out!=null)
{
for (int i=0;i<out.size();i++)
{
ValueMetaInterface outvalue = out.getValueMeta(i);
DatabaseImpact di = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ,
transMeta.getName(),
stepMeta.getName(),
databaseMeta.getDatabaseName(),
"", //$NON-NLS-1$
outvalue.getName(),
outvalue.getName(),
stepMeta.getName(),
transMeta.environmentSubstitute(sql),
BaseMessages.getString(PKG, "DatabaseJoinMeta.DatabaseImpact.Title") //$NON-NLS-1$
);
impact.add(di);
}
}
}
public DatabaseMeta[] getUsedDatabaseConnections()
{
if (databaseMeta!=null)
{
return new DatabaseMeta[] { databaseMeta };
}
else
{
return super.getUsedDatabaseConnections();
}
}
public boolean supportsErrorHandling()
{
return true;
}
}
| |
/*
* Copyright 2019 The GraphicsFuzz Project Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphicsfuzz.generator.transformation;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.graphicsfuzz.common.ast.TranslationUnit;
import com.graphicsfuzz.common.ast.stmt.BreakStmt;
import com.graphicsfuzz.common.ast.stmt.ContinueStmt;
import com.graphicsfuzz.common.ast.stmt.DefaultCaseLabel;
import com.graphicsfuzz.common.ast.stmt.ExprCaseLabel;
import com.graphicsfuzz.common.ast.stmt.ForStmt;
import com.graphicsfuzz.common.ast.stmt.IfStmt;
import com.graphicsfuzz.common.ast.stmt.Stmt;
import com.graphicsfuzz.common.ast.stmt.SwitchStmt;
import com.graphicsfuzz.common.ast.visitors.CheckPredicateVisitor;
import com.graphicsfuzz.common.ast.visitors.StandardVisitor;
import com.graphicsfuzz.common.glslversion.ShadingLanguageVersion;
import com.graphicsfuzz.common.transformreduce.GlslShaderJob;
import com.graphicsfuzz.common.transformreduce.ShaderJob;
import com.graphicsfuzz.common.util.IRandom;
import com.graphicsfuzz.common.util.ParseHelper;
import com.graphicsfuzz.common.util.PipelineInfo;
import com.graphicsfuzz.common.util.RandomWrapper;
import com.graphicsfuzz.common.util.ShaderJobFileOperations;
import com.graphicsfuzz.common.util.ShaderKind;
import com.graphicsfuzz.generator.transformation.donation.DonationContext;
import com.graphicsfuzz.generator.transformation.injection.IInjectionPoint;
import com.graphicsfuzz.generator.transformation.injection.InjectionPoints;
import com.graphicsfuzz.generator.util.GenerationParams;
import com.graphicsfuzz.generator.util.TransformationProbabilities;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Optional;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class DonateDeadCodeTransformationTest {
@Rule
public TemporaryFolder testFolder = new TemporaryFolder();
private DonateDeadCodeTransformation getDummyTransformationObject() {
return new DonateDeadCodeTransformation(IRandom::nextBoolean, testFolder.getRoot(),
GenerationParams.normal(ShaderKind.FRAGMENT, false, true));
}
@Test
public void prepareStatementToDonateTopLevelBreakRemovedWhenNecessary() throws Exception {
// Checks that a top-level 'break' gets removed, unless injecting into a loop or switch.
final DonateDeadCodeTransformation dlc = getDummyTransformationObject();
final TranslationUnit donor = ParseHelper.parse("#version 310 es\n"
+ "void main() {\n"
+ " for (int i = 0; i < 10; i ++)\n"
+ " if (i > 5) break;\n"
+ "}\n");
final TranslationUnit reference = ParseHelper.parse("#version 310 es\n"
+ "void main() {\n"
+ " ;\n"
+ " for(int i = 0; i < 100; i++) {\n"
+ " switch (i) {\n"
+ " case 0:\n"
+ " i++;\n"
+ " default:\n"
+ " i++;\n"
+ " }\n"
+ " }"
+ "}\n");
for (IInjectionPoint injectionPoint : new InjectionPoints(reference, new RandomWrapper(0),
item -> true).getAllInjectionPoints()) {
final Stmt toDonate = ((ForStmt) donor.getMainFunction().getBody().getStmt(0)).getBody()
.clone();
assert toDonate instanceof IfStmt;
final DonationContext dc = new DonationContext(toDonate, new HashMap<>(),
new ArrayList<>(), donor.getMainFunction());
final Stmt donated = dlc.prepareStatementToDonate(injectionPoint, dc,
TransformationProbabilities.DEFAULT_PROBABILITIES, new RandomWrapper(0),
ShadingLanguageVersion.ESSL_310);
final boolean containsBreak = new CheckPredicateVisitor() {
@Override
public void visitBreakStmt(BreakStmt breakStmt) {
predicateHolds();
}
}.test(donated);
assertEquals(containsBreak, injectionPoint.inLoop() || injectionPoint.inSwitch());
}
}
@Test
public void prepareStatementToDonateTopLevelContinueRemovedWhenNecessary() throws Exception {
// Checks that a top-level 'continue' gets removed, unless injecting into a loop.
final DonateDeadCodeTransformation dlc = getDummyTransformationObject();
final TranslationUnit donor = ParseHelper.parse("#version 100\n"
+ "void main() {\n"
+ " for (int i = 0; i < 10; i ++)\n"
+ " if (i > 5) continue;\n"
+ "\n"
+ "}\n");
final TranslationUnit reference = ParseHelper.parse("#version 100\n"
+ "void main() {\n"
+ " ;\n"
+ " switch(0) {\n"
+ " case 1:\n"
+ " break;\n"
+ " default:\n"
+ " 1;\n"
+ " }\n"
+ " for(int i = 0; i < 100; i++) {\n"
+ " ;\n"
+ " }\n"
+ "}\n");
for (IInjectionPoint injectionPoint : new InjectionPoints(reference, new RandomWrapper(0),
item -> true).getAllInjectionPoints()) {
final Stmt toDonate = ((ForStmt) donor.getMainFunction().getBody().getStmt(0)).getBody()
.clone();
assert toDonate instanceof IfStmt;
final DonationContext dc = new DonationContext(toDonate, new HashMap<>(),
new ArrayList<>(), donor.getMainFunction());
final Stmt donated = dlc.prepareStatementToDonate(injectionPoint, dc,
TransformationProbabilities.DEFAULT_PROBABILITIES, new RandomWrapper(0),
ShadingLanguageVersion.ESSL_100);
final boolean containsContinue = new CheckPredicateVisitor() {
@Override
public void visitContinueStmt(ContinueStmt continueStmt) {
predicateHolds();
}
}.test(donated);
assertEquals(containsContinue, injectionPoint.inLoop());
}
}
@Test
public void prepareStatementToDonateTopLevelCaseAndDefaultRemoved() throws Exception {
// Checks that top-level 'case' and 'default' labels get removed, even when injecting into
// a switch.
final DonateDeadCodeTransformation dlc = getDummyTransformationObject();
final TranslationUnit donor = ParseHelper.parse("#version 310 es\n"
+ "void main() {\n"
+ " int x = 3;\n"
+ " switch (x) {\n"
+ " case 0:\n"
+ " x++;\n"
+ " default:\n"
+ " x++;\n"
+ " }\n"
+ "}\n");
final TranslationUnit reference = ParseHelper.parse("#version 310 es\n"
+ "void main() {\n"
+ " switch (0) {\n"
+ " case 1:\n"
+ " 1;\n"
+ " default:\n"
+ " 2;\n"
+ " }\n"
+ "}\n");
for (IInjectionPoint injectionPoint : new InjectionPoints(reference, new RandomWrapper(0),
item -> true).getAllInjectionPoints()) {
final Stmt toDonate = ((SwitchStmt) donor.getMainFunction().getBody().getStmt(1)).getBody()
.clone();
DonationContext dc = new DonationContext(toDonate, new HashMap<>(),
new ArrayList<>(), donor.getMainFunction());
final Stmt donated = dlc.prepareStatementToDonate(injectionPoint, dc,
TransformationProbabilities.DEFAULT_PROBABILITIES, new RandomWrapper(0),
ShadingLanguageVersion.ESSL_310);
new StandardVisitor() {
@Override
public void visitDefaultCaseLabel(DefaultCaseLabel defaultCaseLabel) {
// 'default' labels should have been removed.
fail();
}
@Override
public void visitExprCaseLabel(ExprCaseLabel exprCaseLabel) {
// 'case' labels should have been removed.
fail();
}
}.visit(donated);
}
}
@Test
public void prepareStatementToDonateBreakFromLoopKept() throws Exception {
// Checks that a 'break' in a loop gets kept if the whole loop is donated.
final DonateDeadCodeTransformation dlc = getDummyTransformationObject();
final TranslationUnit donor = ParseHelper.parse("#version 100\n"
+ "void main() {\n"
+ " for (int i = 0; i < 10; i ++)\n"
+ " if (i > 5) break;\n"
+ "\n"
+ "}\n");
final TranslationUnit reference = ParseHelper.parse("#version 100\n"
+ "void main() {\n"
+ " ;\n"
+ " for(int i = 0; i < 100; i++) {\n"
+ " }\n"
+ "}\n");
for (IInjectionPoint injectionPoint : new InjectionPoints(reference, new RandomWrapper(0),
item -> true).getAllInjectionPoints()) {
final Stmt toDonate = donor.getMainFunction().getBody().getStmt(0).clone();
assert toDonate instanceof ForStmt;
final DonationContext dc = new DonationContext(toDonate, new HashMap<>(),
new ArrayList<>(), donor.getMainFunction());
final Stmt donated = dlc.prepareStatementToDonate(injectionPoint, dc,
TransformationProbabilities.DEFAULT_PROBABILITIES, new RandomWrapper(0),
ShadingLanguageVersion.ESSL_100);
assertTrue(new CheckPredicateVisitor() {
@Override
public void visitBreakStmt(BreakStmt breakStmt) {
predicateHolds();
}
}.test(donated));
}
}
@Test
public void prepareStatementToDonateSwitchWithBreakAndDefaultKept() throws Exception {
// Checks that 'case', 'default' and 'break' occurring in a switch are kept if the whole
// switch statement is donated.
final DonateDeadCodeTransformation dlc = getDummyTransformationObject();
final TranslationUnit donor = ParseHelper.parse("#version 310 es\n"
+ "void main() {\n"
+ " switch (0) {\n"
+ " case 0:\n"
+ " 1;\n"
+ " break;\n"
+ " default:\n"
+ " 2;\n"
+ " }\n"
+ "}\n");
final TranslationUnit reference = ParseHelper.parse("#version 310 es\n"
+ "void main() {\n"
+ " ;\n"
+ " switch (0) {\n"
+ " case 1:\n"
+ " 1;\n"
+ " default:\n"
+ " 2;\n"
+ " }\n"
+ "}\n");
for (IInjectionPoint injectionPoint : new InjectionPoints(reference, new RandomWrapper(0),
item -> true).getAllInjectionPoints()) {
final Stmt toDonate = donor.getMainFunction().getBody().getStmt(0).clone();
assert toDonate instanceof SwitchStmt;
final DonationContext dc = new DonationContext(toDonate, new HashMap<>(),
new ArrayList<>(), donor.getMainFunction());
final Stmt donated = dlc.prepareStatementToDonate(injectionPoint, dc,
TransformationProbabilities.DEFAULT_PROBABILITIES, new RandomWrapper(0),
ShadingLanguageVersion.ESSL_310);
// Check that the donated statement contains exactly one each of 'break', 'case' and
// 'default'.
new StandardVisitor() {
private boolean foundBreak = false;
private boolean foundCase = false;
private boolean foundDefault = false;
@Override
public void visitBreakStmt(BreakStmt breakStmt) {
assertFalse(foundBreak);
foundBreak = true;
}
@Override
public void visitExprCaseLabel(ExprCaseLabel exprCaseLabel) {
assertFalse(foundCase);
foundCase = true;
}
@Override
public void visitDefaultCaseLabel(DefaultCaseLabel defaultCaseLabel) {
assertFalse(foundDefault);
foundDefault = true;
}
private void check(Stmt stmt) {
visit(stmt);
assertTrue(foundBreak);
assertTrue(foundCase);
assertTrue(foundDefault);
}
};
}
}
@Test
public void prepareStatementToDonateContinueInLoopKept() throws Exception {
// Checks that a 'continue' in a loop gets kept if the whole loop is donated.
final DonateDeadCodeTransformation dlc = getDummyTransformationObject();
final TranslationUnit donor = ParseHelper.parse("#version 100\n"
+ "void main() {\n"
+ " for (int i = 0; i < 10; i ++)\n"
+ " if (i > 5) continue;\n"
+ "\n"
+ "}\n");
final TranslationUnit reference = ParseHelper.parse("#version 100\n"
+ "void main() {\n"
+ " ;\n"
+ " for(int i = 0; i < 100; i++) {\n"
+ " }\n"
+ "}\n");
for (IInjectionPoint injectionPoint : new InjectionPoints(reference, new RandomWrapper(0),
item -> true).getAllInjectionPoints()) {
final Stmt toDonate = donor.getMainFunction().getBody().getStmt(0).clone();
assert toDonate instanceof ForStmt;
final DonationContext dc = new DonationContext(toDonate, new HashMap<>(),
new ArrayList<>(), donor.getMainFunction());
final Stmt donated = dlc.prepareStatementToDonate(injectionPoint, dc,
TransformationProbabilities.DEFAULT_PROBABILITIES, new RandomWrapper(0),
ShadingLanguageVersion.ESSL_100);
// Check that the 'continue' statement is retained.
assertTrue(new CheckPredicateVisitor() {
@Override
public void visitContinueStmt(ContinueStmt continueStmt) {
predicateHolds();
}
}.test(donated));
}
}
@Test
public void testInAndOutParametersDonatedOk() throws Exception {
// This checks that donation of code that uses 'in' and 'out' parameters of functions works.
final ShaderJobFileOperations fileOps = new ShaderJobFileOperations();
final File donors = testFolder.newFolder("donors");
final File referenceFile = testFolder.newFile("reference.json");
{
// This donor is designed to have a high chance of leading to an in, out or inout parameter
// being used by a donated statement, making it a free variable for which a local variable
// will need to be declared.
final String donorSource =
"#version 300 es\n"
+ "void foo(in int a, out int b, inout int c) {\n"
+ " {\n"
+ " {\n"
+ " {\n"
+ " b = a;\n"
+ " c = c + a;\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}\n";
fileOps.writeShaderJobFile(
new GlslShaderJob(
Optional.empty(),
new PipelineInfo(),
ParseHelper.parse(donorSource)),
new File(donors, "donor.json")
);
}
{
final String referenceSource = "#version 300 es\n"
+ "void main() {\n"
+ "}\n";
fileOps.writeShaderJobFile(
new GlslShaderJob(
Optional.empty(),
new PipelineInfo(),
ParseHelper.parse(referenceSource)),
referenceFile
);
}
int noCodeDonatedCount = 0;
// Try the following a few times, so that there is a good chance of triggering the issue
// this test was used to catch, should it return:
for (int seed = 0; seed < 15; seed++) {
final ShaderJob referenceShaderJob = fileOps.readShaderJobFile(referenceFile);
// Do live code donation.
final DonateDeadCodeTransformation transformation =
new DonateDeadCodeTransformation(IRandom::nextBoolean, donors,
GenerationParams.normal(ShaderKind.FRAGMENT, false, false));
assert referenceShaderJob.getFragmentShader().isPresent();
final boolean result = transformation.apply(
referenceShaderJob.getFragmentShader().get(),
TransformationProbabilities.onlyLiveCodeAlwaysSubstitute(),
new RandomWrapper(seed),
GenerationParams.normal(ShaderKind.FRAGMENT, false, false)
);
if (result) {
final File tempFile = testFolder.newFile("shader" + seed + ".json");
fileOps.writeShaderJobFile(referenceShaderJob, tempFile);
// This will fail if the shader job turns out to be invalid.
fileOps.areShadersValid(tempFile, true);
} else {
++noCodeDonatedCount;
}
}
// The above code tests donation of dead code, but there is still a chance that no code will
// be donated. We assert that this happens < 10 times to ensure that we get some test
// coverage, but this could fail due to bad luck.
Assert.assertTrue(
"Donation failure count should be < 10, " + noCodeDonatedCount,
noCodeDonatedCount < 10
);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.infinispan;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.infinispan.util.Condition;
import org.infinispan.Cache;
import org.infinispan.commons.util.concurrent.NotifyingFuture;
import org.infinispan.stats.Stats;
import org.junit.Test;
import static org.apache.camel.component.infinispan.util.Wait.waitFor;
public class InfinispanProducerTest extends InfinispanTestSupport {
private static final String COMMAND_VALUE = "commandValue";
private static final String COMMAND_KEY = "commandKey1";
private static final long LIFESPAN_TIME = 100;
private static final long LIFESPAN_FOR_MAX_IDLE = -1;
private static final long MAX_IDLE_TIME = 200;
@Test
public void keyAndValueArePublishedWithDefaultOperation() throws Exception {
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
}
});
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
}
@Test
public void cacheSizeTest() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
currentCache().put(KEY_TWO, VALUE_TWO);
Exchange exchange = template.request("direct:size", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.SIZE);
}
});
Integer cacheSize = exchange.getIn().getBody(Integer.class);
assertEquals(cacheSize, new Integer(2));
}
@Test
public void publishKeyAndValueByExplicitlySpecifyingTheOperation() throws Exception {
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT);
}
});
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
}
@Test
public void publishKeyAndValueAsync() throws Exception {
final Exchange exchange = template.send("direct:putasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
NotifyingFuture resultPutAsync = exchange.getIn().getBody(NotifyingFuture.class);
Object value = currentCache().get(KEY_ONE);
return resultPutAsync.isDone() && value.toString().equals(VALUE_ONE);
}
}, 5000);
}
@Test
public void publishKeyAndValueAsyncWithLifespan() throws Exception {
final Exchange exchange = template.send("direct:putasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
NotifyingFuture resultPutAsync = exchange.getIn().getBody(NotifyingFuture.class);
Object value = currentCache().get(KEY_ONE);
return resultPutAsync.isDone() && value.equals(VALUE_ONE);
}
}, 1000);
waitForNullValue(KEY_ONE);
}
@Test
public void publishKeyAndValueAsyncWithLifespanAndMaxIdle() throws Exception {
final Exchange exchange = template.send("direct:putasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
NotifyingFuture resultPutAsync = exchange.getIn().getBody(NotifyingFuture.class);
return resultPutAsync.isDone() && currentCache().get(KEY_ONE).toString().equals(VALUE_ONE);
}
}, 1000);
Thread.sleep(300);
waitForNullValue(KEY_ONE);
}
@Test
public void publishMapNormal() throws Exception {
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(KEY_ONE, VALUE_ONE);
map.put(KEY_TWO, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.MAP, map);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTALL);
}
});
assertEquals(2, currentCache().size());
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
value = currentCache().get(KEY_TWO);
assertEquals(VALUE_TWO, value.toString());
}
@Test
public void publishMapWithLifespan() throws Exception {
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(KEY_ONE, VALUE_ONE);
map.put(KEY_TWO, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.MAP, map);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTALL);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
assertEquals(2, currentCache().size());
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
value = currentCache().get(KEY_TWO);
assertEquals(VALUE_TWO, value.toString());
waitForNullValue(KEY_ONE);
}
@Test
public void publishMapWithLifespanAndMaxIdleTime() throws Exception {
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(KEY_ONE, VALUE_ONE);
map.put(KEY_TWO, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.MAP, map);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTALL);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
assertEquals(2, currentCache().size());
Thread.sleep(300);
waitForNullValue(KEY_TWO);
}
@Test
public void publishMapNormalAsync() throws Exception {
template.send("direct:putallasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(KEY_ONE, VALUE_ONE);
map.put(KEY_TWO, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.MAP, map);
}
});
Thread.sleep(100);
assertEquals(2, currentCache().size());
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
value = currentCache().get(KEY_TWO);
assertEquals(VALUE_TWO, value.toString());
}
@Test
public void publishMapWithLifespanAsync() throws Exception {
template.send("direct:putallasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(KEY_ONE, VALUE_ONE);
map.put(KEY_TWO, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.MAP, map);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTALL);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
Object valueOne = currentCache().get(KEY_ONE);
Object valueTwo = currentCache().get(KEY_TWO);
return valueOne.equals(VALUE_ONE) && valueTwo.equals(VALUE_TWO) && currentCache().size() == 2;
}
}, 100);
waitForNullValue(KEY_ONE);
}
@Test
public void publishMapWithLifespanAndMaxIdleTimeAsync() throws Exception {
template.send("direct:putallasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
Map<String, String> map = new HashMap<String, String>();
map.put(KEY_ONE, VALUE_ONE);
map.put(KEY_TWO, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.MAP, map);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTALL);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
return currentCache().size() == 2;
}
}, 100);
Thread.sleep(300);
waitForNullValue(KEY_ONE);
waitForNullValue(KEY_TWO);
}
@Test
public void putIfAbsentAlreadyExists() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
template.send("direct:putifabsent", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTIFABSENT);
}
});
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
assertEquals(1, currentCache().size());
}
@Test
public void putIfAbsentNotExists() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
template.send("direct:putifabsent", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_TWO);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUTIFABSENT);
}
});
Object value = currentCache().get(KEY_TWO);
assertEquals(VALUE_TWO, value.toString());
assertEquals(2, currentCache().size());
}
@Test
public void putIfAbsentKeyAndValueAsync() throws Exception {
final Exchange exchange = template.send("direct:putifabsentasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
NotifyingFuture resultPutAsync = exchange.getIn().getBody(NotifyingFuture.class);
return resultPutAsync.isDone() && currentCache().get(KEY_ONE).equals(VALUE_ONE);
}
}, 2000);
}
@Test
public void putIfAbsentKeyAndValueAsyncWithLifespan() throws Exception {
final Exchange exchange = template.send("direct:putifabsentasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
NotifyingFuture resultPutAsync = exchange.getIn().getBody(NotifyingFuture.class);
return resultPutAsync.isDone() && currentCache().get(KEY_ONE).equals(VALUE_ONE);
}
}, 100);
waitForNullValue(KEY_ONE);
}
@Test
public void putIfAbsentKeyAndValueAsyncWithLifespanAndMaxIdle() throws Exception {
final Exchange exchange = template.send("direct:putifabsentasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
NotifyingFuture resultPutAsync = exchange.getIn().getBody(NotifyingFuture.class);
return resultPutAsync.isDone() && currentCache().get(KEY_ONE).equals(VALUE_ONE);
}
}, 500);
Thread.sleep(300);
waitForNullValue(KEY_ONE);
}
@Test
public void notContainsKeyTest() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:containskey", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_TWO);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.CONTAINSKEY);
}
});
Boolean cacheContainsKey = exchange.getIn().getBody(Boolean.class);
assertFalse(cacheContainsKey);
}
@Test
public void containsKeyTest() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:containskey", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.CONTAINSKEY);
}
});
Boolean cacheContainsKey = exchange.getIn().getBody(Boolean.class);
assertTrue(cacheContainsKey);
}
@Test
public void notContainsValueTest() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:containsvalue", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.CONTAINSVALUE);
}
});
Boolean cacheContainsValue = exchange.getIn().getBody(Boolean.class);
assertFalse(cacheContainsValue);
}
@Test
public void containsValueTest() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:containsvalue", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.CONTAINSVALUE);
}
});
Boolean cacheContainsValue = exchange.getIn().getBody(Boolean.class);
assertTrue(cacheContainsValue);
}
@Test
public void publishKeyAndValueWithLifespan() throws Exception {
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT);
}
});
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
Exchange exchange;
exchange = template.send("direct:get", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
}
});
String resultGet = exchange.getIn().getBody(String.class);
assertEquals(VALUE_ONE, resultGet);
waitForNullValue(KEY_ONE);
}
@Test
public void putOperationReturnsThePreviousValue() throws Exception {
currentCache().put(KEY_ONE, "existing value");
Exchange exchange = template.request("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT);
}
});
String result = exchange.getIn().getBody(String.class);
assertEquals("existing value", result);
}
@Test
public void retrievesAValueByKey() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.GET);
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
}
@Test
public void replaceAValueByKey() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replace", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REPLACE);
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
}
@Test
public void replaceAValueByKeyWithLifespan() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replace", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REPLACE);
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyWithLifespanAndMaxIdleTime() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replace", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REPLACE);
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
Thread.sleep(300);
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyWithOldValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replace", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OLD_VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REPLACE);
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
}
@Test
public void replaceAValueByKeyWithLifespanWithOldValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replace", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OLD_VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REPLACE);
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyWithLifespanAndMaxIdleTimeWithOldValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replace", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OLD_VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REPLACE);
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
Thread.sleep(300);
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyAsync() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replaceasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
}
@Test
public void replaceAValueByKeyWithLifespanAsync() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replaceasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
assertEquals(exchange.getIn().getBody(String.class), VALUE_ONE);
assertEquals(currentCache().get(KEY_ONE), VALUE_TWO);
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyWithLifespanAndMaxIdleTimeAsync() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replaceasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
Thread.sleep(300);
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyAsyncWithOldValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replaceasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.OLD_VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
}
@Test
public void replaceAValueByKeyWithLifespanAsyncWithOldValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replaceasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OLD_VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_TIME));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
waitForNullValue(KEY_ONE);
}
@Test
public void replaceAValueByKeyWithLifespanAndMaxIdleTimeAsyncWithOldValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:replaceasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OLD_VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME, new Long(LIFESPAN_FOR_MAX_IDLE));
exchange.getIn().setHeader(InfinispanConstants.LIFESPAN_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME, new Long(MAX_IDLE_TIME));
exchange.getIn().setHeader(InfinispanConstants.MAX_IDLE_TIME_UNIT, TimeUnit.MILLISECONDS.toString());
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
assertEquals(VALUE_TWO, currentCache().get(KEY_ONE));
Thread.sleep(300);
waitForNullValue(KEY_ONE);
}
@Test
public void deletesExistingValueByKey() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REMOVE);
}
});
assertEquals(VALUE_ONE, exchange.getIn().getBody(String.class));
Object value = currentCache().get(KEY_ONE);
assertNull(value);
}
@Test
public void deletesExistingValueByKeyAsync() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:removeasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REMOVEASYNC);
}
});
Thread.sleep(100);
NotifyingFuture fut = exchange.getIn().getBody(NotifyingFuture.class);
assertTrue(fut.isDone());
Object value = currentCache().get(KEY_ONE);
assertNull(value);
}
@Test
public void deletesExistingValueByKeyWithValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REMOVE);
}
});
assertTrue(exchange.getIn().getBody(Boolean.class));
Object value = currentCache().get(KEY_ONE);
assertNull(value);
}
@Test
public void deletesExistingValueByKeyAsyncWithValue() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
Exchange exchange = template.request("direct:removeasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.REMOVEASYNC);
}
});
Thread.sleep(100);
NotifyingFuture fut = exchange.getIn().getBody(NotifyingFuture.class);
assertTrue(fut.isDone());
Object value = currentCache().get(KEY_ONE);
assertNull(value);
}
@Test
public void clearsAllValues() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
assertFalse(currentCache().isEmpty());
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.CLEAR);
}
});
assertTrue(currentCache().isEmpty());
}
@Test
public void testUriCommandOption() throws Exception {
template.send("direct:put", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, COMMAND_KEY);
exchange.getIn().setHeader(InfinispanConstants.VALUE, COMMAND_VALUE);
}
});
String result = (String) currentCache().get(COMMAND_KEY);
assertEquals(COMMAND_VALUE, result);
Exchange exchange;
exchange = template.send("direct:get", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, COMMAND_KEY);
}
});
String resultGet = exchange.getIn().getBody(String.class);
assertEquals(COMMAND_VALUE, resultGet);
exchange = template.send("direct:remove", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, COMMAND_KEY);
}
});
String resultRemove = exchange.getIn().getBody(String.class);
assertEquals(COMMAND_VALUE, resultRemove);
assertNull(currentCache().get(COMMAND_KEY));
assertTrue(currentCache().isEmpty());
currentCache().put(COMMAND_KEY, COMMAND_VALUE);
currentCache().put("keyTest", "valueTest");
template.send("direct:clear", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
}
});
assertTrue(currentCache().isEmpty());
}
@Test
public void testDeprecatedUriOption() throws Exception {
template.send("direct:put-deprecated-option", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, COMMAND_KEY);
exchange.getIn().setHeader(InfinispanConstants.VALUE, COMMAND_VALUE);
}
});
String result = (String) currentCache().get(COMMAND_KEY);
assertEquals(COMMAND_VALUE, result);
assertEquals(COMMAND_VALUE, currentCache().get(COMMAND_KEY));
}
@Test
public void testDeprecatedUriCommand() throws Exception {
template.send("direct:put-deprecated-command", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, COMMAND_KEY);
exchange.getIn().setHeader(InfinispanConstants.VALUE, COMMAND_VALUE);
}
});
String result = (String) currentCache().get(COMMAND_KEY);
assertEquals(COMMAND_VALUE, result);
assertEquals(COMMAND_VALUE, currentCache().get(COMMAND_KEY));
}
@Test
public void clearAsyncTest() throws Exception {
currentCache().put(KEY_ONE, VALUE_ONE);
currentCache().put(KEY_TWO, VALUE_TWO);
Exchange exchange = template.request("direct:clearasync", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.CLEARASYNC);
}
});
Thread.sleep(100);
NotifyingFuture fut = exchange.getIn().getBody(NotifyingFuture.class);
assertTrue(fut.isDone());
assertTrue(currentCache().isEmpty());
}
@Test
public void statsOperation() throws Exception {
((Cache) currentCache()).getAdvancedCache().getStats().setStatisticsEnabled(true);
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_ONE);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_ONE);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT);
}
});
Object value = currentCache().get(KEY_ONE);
assertEquals(VALUE_ONE, value.toString());
template.send("direct:start", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, KEY_TWO);
exchange.getIn().setHeader(InfinispanConstants.VALUE, VALUE_TWO);
exchange.getIn().setHeader(InfinispanConstants.OPERATION, InfinispanOperation.PUT);
}
});
value = currentCache().get(KEY_TWO);
assertEquals(VALUE_TWO, value.toString());
Exchange exchange;
exchange = template.send("direct:stats", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
}
});
Stats resultStats = exchange.getIn().getBody(Stats.class);
assertEquals(2L, resultStats.getTotalNumberOfEntries());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("infinispan?cacheContainer=#cacheContainer");
from("direct:put")
.to("infinispan?cacheContainer=#cacheContainer&operation=PUT");
from("direct:put-deprecated-option")
.to("infinispan?cacheContainer=#cacheContainer&command=PUT");
from("direct:put-deprecated-command")
.to("infinispan?cacheContainer=#cacheContainer&command=CamelInfinispanOperationPut");
from("direct:putifabsent")
.to("infinispan?cacheContainer=#cacheContainer&operation=PUTIFABSENT");
from("direct:get")
.to("infinispan?cacheContainer=#cacheContainer&operation=GET");
from("direct:remove")
.to("infinispan?cacheContainer=#cacheContainer&operation=REMOVE");
from("direct:clear")
.to("infinispan?cacheContainer=#cacheContainer&operation=CLEAR");
from("direct:replace")
.to("infinispan?cacheContainer=#cacheContainer&operation=REPLACE");
from("direct:containskey")
.to("infinispan?cacheContainer=#cacheContainer&operation=CONTAINSKEY");
from("direct:containsvalue")
.to("infinispan?cacheContainer=#cacheContainer&operation=CONTAINSVALUE");
from("direct:size")
.to("infinispan?cacheContainer=#cacheContainer&operation=SIZE");
from("direct:putasync")
.to("infinispan?cacheContainer=#cacheContainer&operation=PUTASYNC");
from("direct:putallasync")
.to("infinispan?cacheContainer=#cacheContainer&operation=PUTALLASYNC");
from("direct:putifabsentasync")
.to("infinispan?cacheContainer=#cacheContainer&operation=PUTIFABSENTASYNC");
from("direct:replaceasync")
.to("infinispan?cacheContainer=#cacheContainer&operation=REPLACEASYNC");
from("direct:removeasync")
.to("infinispan?cacheContainer=#cacheContainer&operation=REMOVEASYNC");
from("direct:clearasync")
.to("infinispan?cacheContainer=#cacheContainer&operation=CLEARASYNC");
from("direct:stats")
.to("infinispan?cacheContainer=#cacheContainer&operation=STATS");
}
};
}
private void waitForNullValue(final String key) {
waitFor(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
Exchange exchange = template.send("direct:get", new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
exchange.getIn().setHeader(InfinispanConstants.KEY, key);
}
});
return exchange.getIn().getBody(String.class) == null;
}
}, 1000);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.complex;
import static java.util.Collections.singletonList;
import static org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt;
import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt;
import static org.apache.arrow.util.Preconditions.checkArgument;
import static org.apache.arrow.vector.complex.BaseRepeatedValueVector.DATA_VECTOR_NAME;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import org.apache.arrow.memory.ArrowBuf;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.OutOfMemoryException;
import org.apache.arrow.memory.util.ArrowBufPointer;
import org.apache.arrow.memory.util.ByteFunctionHelpers;
import org.apache.arrow.memory.util.CommonUtil;
import org.apache.arrow.memory.util.hash.ArrowBufHasher;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.AddOrGetResult;
import org.apache.arrow.vector.BaseValueVector;
import org.apache.arrow.vector.BitVectorHelper;
import org.apache.arrow.vector.BufferBacked;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.ZeroVector;
import org.apache.arrow.vector.compare.VectorVisitor;
import org.apache.arrow.vector.complex.impl.UnionFixedSizeListReader;
import org.apache.arrow.vector.complex.impl.UnionFixedSizeListWriter;
import org.apache.arrow.vector.ipc.message.ArrowFieldNode;
import org.apache.arrow.vector.types.Types.MinorType;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.CallBack;
import org.apache.arrow.vector.util.JsonStringArrayList;
import org.apache.arrow.vector.util.OversizedAllocationException;
import org.apache.arrow.vector.util.SchemaChangeRuntimeException;
import org.apache.arrow.vector.util.TransferPair;
/** A ListVector where every list value is of the same size. */
public class FixedSizeListVector extends BaseValueVector implements BaseListVector, PromotableVector {
public static FixedSizeListVector empty(String name, int size, BufferAllocator allocator) {
FieldType fieldType = FieldType.nullable(new ArrowType.FixedSizeList(size));
return new FixedSizeListVector(name, allocator, fieldType, null);
}
private FieldVector vector;
private ArrowBuf validityBuffer;
private final int listSize;
private final FieldType fieldType;
private final String name;
private UnionFixedSizeListReader reader;
private int valueCount;
private int validityAllocationSizeInBytes;
/**
* Creates a new instance.
*
* @param name The name for the vector.
* @param allocator The allocator to use for creating/reallocating buffers for the vector.
* @param fieldType The underlying data type of the vector.
* @param unusedSchemaChangeCallback Currently unused.
*/
public FixedSizeListVector(String name,
BufferAllocator allocator,
FieldType fieldType,
CallBack unusedSchemaChangeCallback) {
super(allocator);
this.name = name;
this.validityBuffer = allocator.getEmpty();
this.vector = ZeroVector.INSTANCE;
this.fieldType = fieldType;
this.listSize = ((ArrowType.FixedSizeList) fieldType.getType()).getListSize();
Preconditions.checkArgument(listSize >= 0, "list size must be non-negative");
this.valueCount = 0;
this.validityAllocationSizeInBytes = getValidityBufferSizeFromCount(INITIAL_VALUE_ALLOCATION);
}
@Override
public Field getField() {
List<Field> children = Collections.singletonList(getDataVector().getField());
return new Field(name, fieldType, children);
}
@Override
public MinorType getMinorType() {
return MinorType.FIXED_SIZE_LIST;
}
@Override
public String getName() {
return name;
}
/** Get the fixed size for each list. */
public int getListSize() {
return listSize;
}
@Override
public void initializeChildrenFromFields(List<Field> children) {
checkArgument(children.size() == 1,
"Lists have one child Field. Found: %s", children.isEmpty() ? "none" : children);
Field field = children.get(0);
AddOrGetResult<FieldVector> addOrGetVector = addOrGetVector(field.getFieldType());
checkArgument(addOrGetVector.isCreated(), "Child vector already existed: %s", addOrGetVector.getVector());
addOrGetVector.getVector().initializeChildrenFromFields(field.getChildren());
}
@Override
public List<FieldVector> getChildrenFromFields() {
return singletonList(vector);
}
@Override
public void loadFieldBuffers(ArrowFieldNode fieldNode, List<ArrowBuf> ownBuffers) {
if (ownBuffers.size() != 1) {
throw new IllegalArgumentException("Illegal buffer count, expected " + 1 + ", got: " + ownBuffers.size());
}
ArrowBuf bitBuffer = ownBuffers.get(0);
validityBuffer.getReferenceManager().release();
validityBuffer = BitVectorHelper.loadValidityBuffer(fieldNode, bitBuffer, allocator);
valueCount = fieldNode.getLength();
validityAllocationSizeInBytes = checkedCastToInt(validityBuffer.capacity());
}
@Override
public List<ArrowBuf> getFieldBuffers() {
List<ArrowBuf> result = new ArrayList<>(1);
setReaderAndWriterIndex();
result.add(validityBuffer);
return result;
}
private void setReaderAndWriterIndex() {
validityBuffer.readerIndex(0);
validityBuffer.writerIndex(getValidityBufferSizeFromCount(valueCount));
}
/**
* Get the inner vectors.
*
* @deprecated This API will be removed as the current implementations no longer support inner vectors.
*
* @return the inner vectors for this field as defined by the TypeLayout
*/
@Deprecated
@Override
public List<BufferBacked> getFieldInnerVectors() {
throw new UnsupportedOperationException("There are no inner vectors. Use getFieldBuffers");
}
@Override
public UnionFixedSizeListReader getReader() {
if (reader == null) {
reader = new UnionFixedSizeListReader(this);
}
return reader;
}
private void invalidateReader() {
reader = null;
}
@Override
public void allocateNew() throws OutOfMemoryException {
if (!allocateNewSafe()) {
throw new OutOfMemoryException("Failure while allocating memory");
}
}
@Override
public boolean allocateNewSafe() {
/* boolean to keep track if all the memory allocation were successful
* Used in the case of composite vectors when we need to allocate multiple
* buffers for multiple vectors. If one of the allocations failed we need to
* clear all the memory that we allocated
*/
boolean success = false;
try {
/* we are doing a new allocation -- release the current buffers */
clear();
/* allocate validity buffer */
allocateValidityBuffer(validityAllocationSizeInBytes);
success = vector.allocateNewSafe();
} finally {
if (!success) {
clear();
return false;
}
}
return true;
}
private void allocateValidityBuffer(final long size) {
final int curSize = (int) size;
validityBuffer = allocator.buffer(curSize);
validityBuffer.readerIndex(0);
validityAllocationSizeInBytes = curSize;
validityBuffer.setZero(0, validityBuffer.capacity());
}
@Override
public void reAlloc() {
reallocValidityBuffer();
vector.reAlloc();
}
private void reallocValidityBuffer() {
final int currentBufferCapacity = checkedCastToInt(validityBuffer.capacity());
long newAllocationSize = currentBufferCapacity * 2;
if (newAllocationSize == 0) {
if (validityAllocationSizeInBytes > 0) {
newAllocationSize = validityAllocationSizeInBytes;
} else {
newAllocationSize = getValidityBufferSizeFromCount(INITIAL_VALUE_ALLOCATION) * 2;
}
}
newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize);
assert newAllocationSize >= 1;
if (newAllocationSize > MAX_ALLOCATION_SIZE) {
throw new OversizedAllocationException("Unable to expand the buffer");
}
final ArrowBuf newBuf = allocator.buffer((int) newAllocationSize);
newBuf.setBytes(0, validityBuffer, 0, currentBufferCapacity);
newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity);
validityBuffer.getReferenceManager().release(1);
validityBuffer = newBuf;
validityAllocationSizeInBytes = (int) newAllocationSize;
}
public FieldVector getDataVector() {
return vector;
}
/**
* Start a new value in the list vector.
*
* @param index index of the value to start
*/
public int startNewValue(int index) {
while (index >= getValidityBufferValueCapacity()) {
reallocValidityBuffer();
}
BitVectorHelper.setBit(validityBuffer, index);
return index * listSize;
}
public UnionFixedSizeListWriter getWriter() {
return new UnionFixedSizeListWriter(this);
}
@Override
public void setInitialCapacity(int numRecords) {
validityAllocationSizeInBytes = getValidityBufferSizeFromCount(numRecords);
vector.setInitialCapacity(numRecords * listSize);
}
@Override
public int getValueCapacity() {
if (vector == ZeroVector.INSTANCE || listSize == 0) {
return 0;
}
return Math.min(vector.getValueCapacity() / listSize, getValidityBufferValueCapacity());
}
@Override
public int getBufferSize() {
if (getValueCount() == 0) {
return 0;
}
return getValidityBufferSizeFromCount(valueCount) + vector.getBufferSize();
}
@Override
public int getBufferSizeFor(int valueCount) {
if (valueCount == 0) {
return 0;
}
return getValidityBufferSizeFromCount(valueCount) +
vector.getBufferSizeFor(valueCount * listSize);
}
@Override
public Iterator<ValueVector> iterator() {
return Collections.<ValueVector>singleton(vector).iterator();
}
@Override
public void clear() {
validityBuffer = releaseBuffer(validityBuffer);
vector.clear();
valueCount = 0;
super.clear();
}
@Override
public void reset() {
validityBuffer.setZero(0, validityBuffer.capacity());
vector.reset();
valueCount = 0;
}
@Override
public ArrowBuf[] getBuffers(boolean clear) {
setReaderAndWriterIndex();
final ArrowBuf[] buffers;
if (getBufferSize() == 0) {
buffers = new ArrowBuf[0];
} else {
List<ArrowBuf> list = new ArrayList<>();
list.add(validityBuffer);
list.addAll(Arrays.asList(vector.getBuffers(false)));
buffers = list.toArray(new ArrowBuf[list.size()]);
}
if (clear) {
for (ArrowBuf buffer : buffers) {
buffer.getReferenceManager().retain();
}
clear();
}
return buffers;
}
/**
* Get value indicating if inner vector is set.
* @return 1 if inner vector is explicitly set via #addOrGetVector else 0
*/
public int size() {
return vector == ZeroVector.INSTANCE ? 0 : 1;
}
@Override
@SuppressWarnings("unchecked")
public <T extends ValueVector> AddOrGetResult<T> addOrGetVector(FieldType type) {
boolean created = false;
if (vector == ZeroVector.INSTANCE) {
vector = type.createNewSingleVector(DATA_VECTOR_NAME, allocator, null);
invalidateReader();
created = true;
}
// returned vector must have the same field
if (!Objects.equals(vector.getField().getType(), type.getType())) {
final String msg = String.format("Inner vector type mismatch. Requested type: [%s], actual type: [%s]",
type.getType(), vector.getField().getType());
throw new SchemaChangeRuntimeException(msg);
}
return new AddOrGetResult<>((T) vector, created);
}
@Override
public void copyFromSafe(int inIndex, int outIndex, ValueVector from) {
copyFrom(inIndex, outIndex, from);
}
@Override
public void copyFrom(int fromIndex, int thisIndex, ValueVector from) {
Preconditions.checkArgument(this.getMinorType() == from.getMinorType());
TransferPair pair = from.makeTransferPair(this);
pair.copyValueSafe(fromIndex, thisIndex);
}
@Override
public UnionVector promoteToUnion() {
UnionVector vector = new UnionVector(name, allocator, /* field type */ null, /* call-back */ null);
this.vector.clear();
this.vector = vector;
invalidateReader();
return vector;
}
@Override
public long getValidityBufferAddress() {
return validityBuffer.memoryAddress();
}
@Override
public long getDataBufferAddress() {
throw new UnsupportedOperationException();
}
@Override
public long getOffsetBufferAddress() {
throw new UnsupportedOperationException();
}
@Override
public ArrowBuf getValidityBuffer() {
return validityBuffer;
}
@Override
public ArrowBuf getDataBuffer() {
throw new UnsupportedOperationException();
}
@Override
public ArrowBuf getOffsetBuffer() {
throw new UnsupportedOperationException();
}
@Override
public List<?> getObject(int index) {
if (isSet(index) == 0) {
return null;
}
final List<Object> vals = new JsonStringArrayList<>(listSize);
for (int i = 0; i < listSize; i++) {
vals.add(vector.getObject(index * listSize + i));
}
return vals;
}
/**
* Returns whether the value at index null.
*/
public boolean isNull(int index) {
return (isSet(index) == 0);
}
/**
* Returns non-zero when the value at index is non-null.
*/
public int isSet(int index) {
final int byteIndex = index >> 3;
final byte b = validityBuffer.getByte(byteIndex);
final int bitIndex = index & 7;
return (b >> bitIndex) & 0x01;
}
@Override
public int getNullCount() {
return BitVectorHelper.getNullCount(validityBuffer, valueCount);
}
@Override
public int getValueCount() {
return valueCount;
}
/**
* Returns the number of elements the validity buffer can represent with its
* current capacity.
*/
private int getValidityBufferValueCapacity() {
return capAtMaxInt(validityBuffer.capacity() * 8);
}
/**
* Sets the value at index to null. Reallocates if index is larger than capacity.
*/
public void setNull(int index) {
while (index >= getValidityBufferValueCapacity()) {
reallocValidityBuffer();
}
BitVectorHelper.unsetBit(validityBuffer, index);
}
/** Sets the value at index to not-null. Reallocates if index is larger than capacity. */
public void setNotNull(int index) {
while (index >= getValidityBufferValueCapacity()) {
reallocValidityBuffer();
}
BitVectorHelper.setBit(validityBuffer, index);
}
@Override
public void setValueCount(int valueCount) {
this.valueCount = valueCount;
while (valueCount > getValidityBufferValueCapacity()) {
reallocValidityBuffer();
}
vector.setValueCount(valueCount * listSize);
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
return getTransferPair(ref, allocator, null);
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) {
return new TransferImpl(ref, allocator, callBack);
}
@Override
public TransferPair makeTransferPair(ValueVector target) {
return new TransferImpl((FixedSizeListVector) target);
}
@Override
public int hashCode(int index) {
return hashCode(index, null);
}
@Override
public int hashCode(int index, ArrowBufHasher hasher) {
if (isSet(index) == 0) {
return ArrowBufPointer.NULL_HASH_CODE;
}
int hash = 0;
for (int i = 0; i < listSize; i++) {
hash = ByteFunctionHelpers.combineHash(hash, vector.hashCode(index * listSize + i, hasher));
}
return hash;
}
@Override
public <OUT, IN> OUT accept(VectorVisitor<OUT, IN> visitor, IN value) {
return visitor.visit(this, value);
}
@Override
public int getElementStartIndex(int index) {
return listSize * index;
}
@Override
public int getElementEndIndex(int index) {
return listSize * (index + 1);
}
private class TransferImpl implements TransferPair {
FixedSizeListVector to;
TransferPair dataPair;
public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) {
this(new FixedSizeListVector(name, allocator, fieldType, callBack));
}
public TransferImpl(FixedSizeListVector to) {
this.to = to;
to.addOrGetVector(vector.getField().getFieldType());
dataPair = vector.makeTransferPair(to.vector);
}
@Override
public void transfer() {
to.clear();
dataPair.transfer();
to.validityBuffer = BaseValueVector.transferBuffer(validityBuffer, to.allocator);
to.setValueCount(valueCount);
clear();
}
@Override
public void splitAndTransfer(int startIndex, int length) {
Preconditions.checkArgument(startIndex >= 0 && length >= 0 && startIndex + length <= valueCount,
"Invalid parameters startIndex: %s, length: %s for valueCount: %s", startIndex, length, valueCount);
final int startPoint = listSize * startIndex;
final int sliceLength = listSize * length;
to.clear();
/* splitAndTransfer validity buffer */
splitAndTransferValidityBuffer(startIndex, length, to);
/* splitAndTransfer data buffer */
dataPair.splitAndTransfer(startPoint, sliceLength);
to.setValueCount(length);
}
/*
* transfer the validity.
*/
private void splitAndTransferValidityBuffer(int startIndex, int length, FixedSizeListVector target) {
int firstByteSource = BitVectorHelper.byteIndex(startIndex);
int lastByteSource = BitVectorHelper.byteIndex(valueCount - 1);
int byteSizeTarget = getValidityBufferSizeFromCount(length);
int offset = startIndex % 8;
if (length > 0) {
if (offset == 0) {
// slice
if (target.validityBuffer != null) {
target.validityBuffer.getReferenceManager().release();
}
target.validityBuffer = validityBuffer.slice(firstByteSource, byteSizeTarget);
target.validityBuffer.getReferenceManager().retain(1);
} else {
/* Copy data
* When the first bit starts from the middle of a byte (offset != 0),
* copy data from src BitVector.
* Each byte in the target is composed by a part in i-th byte,
* another part in (i+1)-th byte.
*/
target.allocateValidityBuffer(byteSizeTarget);
for (int i = 0; i < byteSizeTarget - 1; i++) {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer, firstByteSource + i, offset);
byte b2 = BitVectorHelper.getBitsFromNextByte(validityBuffer, firstByteSource + i + 1, offset);
target.validityBuffer.setByte(i, (b1 + b2));
}
/* Copying the last piece is done in the following manner:
* if the source vector has 1 or more bytes remaining, we copy
* the last piece as a byte formed by shifting data
* from the current byte and the next byte.
*
* if the source vector has no more bytes remaining
* (we are at the last byte), we copy the last piece as a byte
* by shifting data from the current byte.
*/
if ((firstByteSource + byteSizeTarget - 1) < lastByteSource) {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer,
firstByteSource + byteSizeTarget - 1, offset);
byte b2 = BitVectorHelper.getBitsFromNextByte(validityBuffer,
firstByteSource + byteSizeTarget, offset);
target.validityBuffer.setByte(byteSizeTarget - 1, b1 + b2);
} else {
byte b1 = BitVectorHelper.getBitsFromCurrentByte(validityBuffer,
firstByteSource + byteSizeTarget - 1, offset);
target.validityBuffer.setByte(byteSizeTarget - 1, b1);
}
}
}
}
@Override
public ValueVector getTo() {
return to;
}
@Override
public void copyValueSafe(int fromIndex, int toIndex) {
while (toIndex >= to.getValueCapacity()) {
to.reAlloc();
}
BitVectorHelper.setValidityBit(to.validityBuffer, toIndex, isSet(fromIndex));
int fromOffset = fromIndex * listSize;
int toOffset = toIndex * listSize;
for (int i = 0; i < listSize; i++) {
dataPair.copyValueSafe(fromOffset + i, toOffset + i);
}
}
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2007 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.gwt.dev.asm.util;
import com.google.gwt.dev.asm.Opcodes;
import com.google.gwt.dev.asm.signature.SignatureVisitor;
/**
* A {@link SignatureVisitor} that prints a disassembled view of the signature
* it visits.
*
* @author Eugene Kuleshov
* @author Eric Bruneton
*/
public class TraceSignatureVisitor implements SignatureVisitor {
private final StringBuffer declaration;
private boolean isInterface;
private boolean seenFormalParameter;
private boolean seenInterfaceBound;
private boolean seenParameter;
private boolean seenInterface;
private StringBuffer returnType;
private StringBuffer exceptions;
/**
* Stack used to keep track of class types that have arguments. Each element
* of this stack is a boolean encoded in one bit. The top of the stack is
* the lowest order bit. Pushing false = *2, pushing true = *2+1, popping =
* /2.
*/
private int argumentStack;
/**
* Stack used to keep track of array class types. Each element of this stack
* is a boolean encoded in one bit. The top of the stack is the lowest order
* bit. Pushing false = *2, pushing true = *2+1, popping = /2.
*/
private int arrayStack;
private String separator = "";
public TraceSignatureVisitor(final int access) {
isInterface = (access & Opcodes.ACC_INTERFACE) != 0;
this.declaration = new StringBuffer();
}
private TraceSignatureVisitor(final StringBuffer buf) {
this.declaration = buf;
}
public void visitFormalTypeParameter(final String name) {
declaration.append(seenFormalParameter ? ", " : "<").append(name);
seenFormalParameter = true;
seenInterfaceBound = false;
}
public SignatureVisitor visitClassBound() {
separator = " extends ";
startType();
return this;
}
public SignatureVisitor visitInterfaceBound() {
separator = seenInterfaceBound ? ", " : " extends ";
seenInterfaceBound = true;
startType();
return this;
}
public SignatureVisitor visitSuperclass() {
endFormals();
separator = " extends ";
startType();
return this;
}
public SignatureVisitor visitInterface() {
separator = seenInterface ? ", " : isInterface
? " extends "
: " implements ";
seenInterface = true;
startType();
return this;
}
public SignatureVisitor visitParameterType() {
endFormals();
if (seenParameter) {
declaration.append(", ");
} else {
seenParameter = true;
declaration.append('(');
}
startType();
return this;
}
public SignatureVisitor visitReturnType() {
endFormals();
if (seenParameter) {
seenParameter = false;
} else {
declaration.append('(');
}
declaration.append(')');
returnType = new StringBuffer();
return new TraceSignatureVisitor(returnType);
}
public SignatureVisitor visitExceptionType() {
if (exceptions == null) {
exceptions = new StringBuffer();
} else {
exceptions.append(", ");
}
// startType();
return new TraceSignatureVisitor(exceptions);
}
public void visitBaseType(final char descriptor) {
switch (descriptor) {
case 'V':
declaration.append("void");
break;
case 'B':
declaration.append("byte");
break;
case 'J':
declaration.append("long");
break;
case 'Z':
declaration.append("boolean");
break;
case 'I':
declaration.append("int");
break;
case 'S':
declaration.append("short");
break;
case 'C':
declaration.append("char");
break;
case 'F':
declaration.append("float");
break;
// case 'D':
default:
declaration.append("double");
break;
}
endType();
}
public void visitTypeVariable(final String name) {
declaration.append(name);
endType();
}
public SignatureVisitor visitArrayType() {
startType();
arrayStack |= 1;
return this;
}
public void visitClassType(final String name) {
if ("java/lang/Object".equals(name)) {
// Map<java.lang.Object,java.util.List>
// or
// abstract public V get(Object key); (seen in Dictionary.class)
// should have Object
// but java.lang.String extends java.lang.Object is unnecessary
boolean needObjectClass = argumentStack % 2 != 0 || seenParameter;
if (needObjectClass) {
declaration.append(separator).append(name.replace('/', '.'));
}
} else {
declaration.append(separator).append(name.replace('/', '.'));
}
separator = "";
argumentStack *= 2;
}
public void visitInnerClassType(final String name) {
if (argumentStack % 2 != 0) {
declaration.append('>');
}
argumentStack /= 2;
declaration.append('.');
declaration.append(separator).append(name.replace('/', '.'));
separator = "";
argumentStack *= 2;
}
public void visitTypeArgument() {
if (argumentStack % 2 == 0) {
++argumentStack;
declaration.append('<');
} else {
declaration.append(", ");
}
declaration.append('?');
}
public SignatureVisitor visitTypeArgument(final char tag) {
if (argumentStack % 2 == 0) {
++argumentStack;
declaration.append('<');
} else {
declaration.append(", ");
}
if (tag == EXTENDS) {
declaration.append("? extends ");
} else if (tag == SUPER) {
declaration.append("? super ");
}
startType();
return this;
}
public void visitEnd() {
if (argumentStack % 2 != 0) {
declaration.append('>');
}
argumentStack /= 2;
endType();
}
public String getDeclaration() {
return declaration.toString();
}
public String getReturnType() {
return returnType == null ? null : returnType.toString();
}
public String getExceptions() {
return exceptions == null ? null : exceptions.toString();
}
// -----------------------------------------------
private void endFormals() {
if (seenFormalParameter) {
declaration.append('>');
seenFormalParameter = false;
}
}
private void startType() {
arrayStack *= 2;
}
private void endType() {
if (arrayStack % 2 == 0) {
arrayStack /= 2;
} else {
while (arrayStack % 2 != 0) {
arrayStack /= 2;
declaration.append("[]");
}
}
}
}
| |
/*
* Copyright 2015 Synced Synapse. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xbmc.kore.ui.sections.audio;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.BaseColumns;
import android.support.annotation.Nullable;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.view.View;
import org.xbmc.kore.jsonrpc.event.MediaSyncEvent;
import org.xbmc.kore.jsonrpc.type.PlaylistType;
import org.xbmc.kore.provider.MediaContract;
import org.xbmc.kore.provider.MediaDatabase;
import org.xbmc.kore.provider.MediaProvider;
import org.xbmc.kore.service.library.LibrarySyncService;
import org.xbmc.kore.ui.AbstractAdditionalInfoFragment;
import org.xbmc.kore.ui.AbstractInfoFragment;
import org.xbmc.kore.ui.generic.RefreshItem;
import org.xbmc.kore.ui.widgets.fabspeeddial.FABSpeedDial;
import org.xbmc.kore.utils.FileDownloadHelper;
import org.xbmc.kore.utils.LogUtils;
import org.xbmc.kore.utils.MediaPlayerUtils;
import org.xbmc.kore.utils.UIUtils;
import java.util.ArrayList;
public class ArtistInfoFragment extends AbstractInfoFragment
implements LoaderManager.LoaderCallbacks<Cursor> {
private static final String TAG = LogUtils.makeLogTag(ArtistInfoFragment.class);
// Loader IDs
private static final int LOADER_ARTIST = 0,
LOADER_SONGS = 1;
/**
* Handler on which to post RPC callbacks
*/
private Handler callbackHandler = new Handler();
@Override
protected AbstractAdditionalInfoFragment getAdditionalInfoFragment() {
return null;
}
@Override
protected RefreshItem createRefreshItem() {
RefreshItem refreshItem = new RefreshItem(getActivity(), LibrarySyncService.SYNC_ALL_MUSIC);
refreshItem.setListener(new RefreshItem.RefreshItemListener() {
@Override
public void onSyncProcessEnded(MediaSyncEvent event) {
if (event.status == MediaSyncEvent.STATUS_SUCCESS)
getLoaderManager().restartLoader(LOADER_ARTIST, null, ArtistInfoFragment.this);
}
});
return refreshItem;
}
@Override
protected boolean setupMediaActionBar() {
setOnAddToPlaylistListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
final PlaylistType.Item playListItem = new PlaylistType.Item();
playListItem.artistid = getDataHolder().getId();
MediaPlayerUtils.queue(ArtistInfoFragment.this, playListItem, PlaylistType.GetPlaylistsReturnType.AUDIO);
}
});
setOnDownloadListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
getLoaderManager().initLoader(LOADER_SONGS, null, ArtistInfoFragment.this);
}
});
return true;
}
@Override
protected boolean setupFAB(FABSpeedDial FAB) {
FAB.setOnFabClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
PlaylistType.Item item = new PlaylistType.Item();
item.artistid = getDataHolder().getId();
playItemOnKodi(item);
}
});
return true;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setExpandDescription(true);
}
@Override
public void onActivityCreated (Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getLoaderManager().initLoader(LOADER_ARTIST, null, this);
setHasOptionsMenu(false);
}
@Override
public void onPause() {
//Make sure loader is not reloaded for albums and songs when we return
//These loaders should only be activated by the user pressing the download button
getLoaderManager().destroyLoader(LOADER_SONGS);
super.onPause();
}
/**
* Loader callbacks
*/
/** {@inheritDoc} */
@Override
public Loader<Cursor> onCreateLoader(int i, Bundle bundle) {
Uri uri;
switch (i) {
case LOADER_ARTIST:
uri = MediaContract.Artists.buildArtistUri(getHostInfo().getId(), getDataHolder().getId());
return new CursorLoader(getActivity(), uri,
DetailsQuery.PROJECTION, null, null, null);
case LOADER_SONGS:
uri = MediaContract.Songs.buildArtistSongsListUri(getHostInfo().getId(), getDataHolder().getId());
return new CursorLoader(getActivity(), uri,
SongsListQuery.PROJECTION, null, null, SongsListQuery.SORT);
default:
return null;
}
}
/** {@inheritDoc} */
@Override
public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) {
if (cursor != null && cursor.getCount() > 0) {
switch (cursorLoader.getId()) {
case LOADER_ARTIST:
cursor.moveToFirst();
FileDownloadHelper.SongInfo songInfo = new FileDownloadHelper.SongInfo(
cursor.getString(DetailsQuery.ARTIST),null, -1, -1, null, null);
setDownloadButtonState(songInfo.downloadDirectoryExists());
DataHolder dataHolder = getDataHolder();
dataHolder.setTitle(cursor.getString(DetailsQuery.ARTIST));
dataHolder.setUndertitle(cursor.getString(DetailsQuery.GENRE));
dataHolder.setDescription(cursor.getString(DetailsQuery.DESCRIPTION));
dataHolder.setPosterUrl(cursor.getString(DetailsQuery.THUMBNAIL));
dataHolder.setFanArtUrl(cursor.getString(DetailsQuery.FANART));
updateView(dataHolder);
break;
case LOADER_SONGS:
final ArrayList<FileDownloadHelper.SongInfo> songInfoList = new ArrayList<>(cursor.getCount());
if (cursor.moveToFirst()) {
do {
songInfoList.add(createSongInfo(cursor));
} while (cursor.moveToNext());
}
UIUtils.downloadSongs(getActivity(), songInfoList, getHostInfo(), callbackHandler);
}
}
}
/** {@inheritDoc} */
@Override
public void onLoaderReset(Loader<Cursor> cursorLoader) {
// Release loader's data
}
private FileDownloadHelper.SongInfo createSongInfo(Cursor cursor) {
return new FileDownloadHelper.SongInfo(
cursor.getString(SongsListQuery.DISPLAYARTIST),
cursor.getString(SongsListQuery.ALBUMTITLE),
cursor.getInt(SongsListQuery.SONGID),
cursor.getInt(SongsListQuery.TRACK),
cursor.getString(SongsListQuery.TITLE),
cursor.getString(SongsListQuery.FILE));
}
private interface DetailsQuery {
String[] PROJECTION = {
BaseColumns._ID,
MediaContract.Artists.ARTISTID,
MediaContract.Artists.ARTIST,
MediaContract.Artists.GENRE,
MediaContract.Artists.THUMBNAIL,
MediaContract.Artists.DESCRIPTION,
MediaContract.Artists.FANART
};
int ID = 0;
int ARTISTID = 1;
int ARTIST = 2;
int GENRE = 3;
int THUMBNAIL = 4;
int DESCRIPTION = 5;
int FANART = 6;
}
/**
* Song list query parameters.
*/
private interface SongsListQuery {
String[] PROJECTION = {
MediaDatabase.Tables.SONGS + "." + BaseColumns._ID,
MediaProvider.Qualified.SONGS_TITLE,
MediaProvider.Qualified.SONGS_TRACK,
MediaProvider.Qualified.SONGS_DURATION,
MediaProvider.Qualified.SONGS_FILE,
MediaProvider.Qualified.SONGS_SONGID,
MediaProvider.Qualified.SONGS_ALBUMID,
MediaProvider.Qualified.ALBUMS_TITLE,
MediaProvider.Qualified.SONGS_DISPLAYARTIST
};
String SORT = MediaContract.Songs.TRACK + " ASC";
int ID = 0;
int TITLE = 1;
int TRACK = 2;
int DURATION = 3;
int FILE = 4;
int SONGID = 5;
int ALBUMID = 6;
int ALBUMTITLE = 7;
int DISPLAYARTIST = 8;
}
}
| |
/*
* Copyright 2017 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.util;
import com.navercorp.pinpoint.common.server.bo.event.MonitorInfoBo;
import com.navercorp.pinpoint.common.server.bo.event.ThreadDumpBo;
import com.navercorp.pinpoint.common.server.bo.event.ThreadState;
import com.navercorp.pinpoint.common.util.CollectionUtils;
import com.navercorp.pinpoint.thrift.dto.command.TMonitorInfo;
import com.navercorp.pinpoint.thrift.dto.command.TThreadDump;
import com.navercorp.pinpoint.thrift.dto.command.TThreadState;
import org.apache.commons.lang3.StringUtils;
import java.util.List;
/**
* @author Taejin Koo
* @author jaehong.kim - Add createDumpMessage() for ThreadDumpBo
*/
public final class ThreadDumpUtils {
public static final String LINE_SEPARATOR = System.getProperty("line.separator");
public static final String TAB_SEPARATOR = " "; // tab to 4 spaces
public static String createDumpMessage(TThreadDump threadDump) {
TThreadState threadState = getThreadState(threadDump.getThreadState());
// set threadName
StringBuilder message = new StringBuilder("\"" + threadDump.getThreadName() + "\"");
// set threadId
String hexStringThreadId = Long.toHexString(threadDump.getThreadId());
message.append(" Id=0x" + hexStringThreadId);
// set threadState
message.append(" " + threadState.name());
if (!StringUtils.isBlank(threadDump.getLockName())) {
message.append(" on ").append(threadDump.getLockName());
}
if (!StringUtils.isBlank(threadDump.getLockOwnerName())) {
message.append(" owned by \"").append(threadDump.getLockOwnerName()).append("\" Id=").append(threadDump.getLockOwnerId());
}
if (threadDump.isSuspended()) {
message.append(" (suspended)");
}
if (threadDump.isInNative()) {
message.append(" (in native)");
}
message.append(LINE_SEPARATOR);
// set StackTrace
for (int i = 0; i < threadDump.getStackTraceSize(); i++) {
String stackTrace = threadDump.getStackTrace().get(i);
message.append(TAB_SEPARATOR + "at ").append(stackTrace);
message.append(LINE_SEPARATOR);
if (i == 0 && !StringUtils.isBlank(threadDump.getLockName())) {
switch (threadState) {
case BLOCKED:
message.append(TAB_SEPARATOR + "- blocked on ").append(threadDump.getLockName());
message.append(LINE_SEPARATOR);
break;
case WAITING:
message.append(TAB_SEPARATOR + "- waiting on ").append(threadDump.getLockName());
message.append(LINE_SEPARATOR);
break;
case TIMED_WAITING:
message.append(TAB_SEPARATOR + "- waiting on ").append(threadDump.getLockName());
message.append(LINE_SEPARATOR);
break;
default:
}
}
if (threadDump.getLockedMonitors() != null) {
for (TMonitorInfo lockedMonitor : threadDump.getLockedMonitors()) {
if (lockedMonitor.getStackDepth() == i) {
message.append(TAB_SEPARATOR + "- locked ").append(lockedMonitor.getStackFrame());
message.append(LINE_SEPARATOR);
}
}
}
}
// set Locks
List<String> lockedSynchronizers = threadDump.getLockedSynchronizers();
if (!CollectionUtils.isEmpty(lockedSynchronizers)) {
message.append(LINE_SEPARATOR + TAB_SEPARATOR + "Number of locked synchronizers = ").append(lockedSynchronizers.size());
message.append(LINE_SEPARATOR);
for (String lockedSynchronizer : lockedSynchronizers) {
message.append(TAB_SEPARATOR + "- ").append(lockedSynchronizer);
message.append(LINE_SEPARATOR);
}
}
message.append(LINE_SEPARATOR);
return message.toString();
}
private static TThreadState getThreadState(TThreadState threadState) {
if (threadState == null) {
return TThreadState.UNKNOWN;
} else {
return threadState;
}
}
public static String createDumpMessage(ThreadDumpBo threadDump) {
ThreadState threadState = getThreadState(threadDump.getThreadState());
// set threadName
StringBuilder message = new StringBuilder("\"" + threadDump.getThreadName() + "\"");
// set threadId
String hexStringThreadId = Long.toHexString(threadDump.getThreadId());
message.append(" Id=0x" + hexStringThreadId);
// set threadState
message.append(" " + threadState.name());
if (!StringUtils.isBlank(threadDump.getLockName())) {
message.append(" on ").append(threadDump.getLockName());
}
if (!StringUtils.isBlank(threadDump.getLockOwnerName())) {
message.append(" owned by \"").append(threadDump.getLockOwnerName()).append("\" Id=").append(threadDump.getLockOwnerId());
}
if (threadDump.isSuspended()) {
message.append(" (suspended)");
}
if (threadDump.isInNative()) {
message.append(" (in native)");
}
message.append(LINE_SEPARATOR);
// set StackTrace
final int stackTraceSize = threadDump.getStackTraceList().size();
for (int i = 0; i < stackTraceSize; i++) {
final String stackTrace = threadDump.getStackTraceList().get(i);
message.append(TAB_SEPARATOR + "at ").append(stackTrace);
message.append(LINE_SEPARATOR);
if (i == 0 && !StringUtils.isBlank(threadDump.getLockName())) {
switch (threadState) {
case BLOCKED:
message.append(TAB_SEPARATOR + "- blocked on ").append(threadDump.getLockName());
message.append(LINE_SEPARATOR);
break;
case WAITING:
message.append(TAB_SEPARATOR + "- waiting on ").append(threadDump.getLockName());
message.append(LINE_SEPARATOR);
break;
case TIMED_WAITING:
message.append(TAB_SEPARATOR + "- waiting on ").append(threadDump.getLockName());
message.append(LINE_SEPARATOR);
break;
default:
}
}
if (CollectionUtils.hasLength(threadDump.getLockedMonitorInfoList())) {
for (MonitorInfoBo lockedMonitor : threadDump.getLockedMonitorInfoList()) {
if (lockedMonitor.getStackDepth() == i) {
message.append(TAB_SEPARATOR + "- locked ").append(lockedMonitor.getStackFrame());
message.append(LINE_SEPARATOR);
}
}
}
}
// set Locks
List<String> lockedSynchronizerList = threadDump.getLockedSynchronizerList();
if (CollectionUtils.hasLength(lockedSynchronizerList)) {
message.append(LINE_SEPARATOR + TAB_SEPARATOR + "Number of locked synchronizers = ").append(lockedSynchronizerList.size());
message.append(LINE_SEPARATOR);
for (String lockedSynchronizer : lockedSynchronizerList) {
message.append(TAB_SEPARATOR + "- ").append(lockedSynchronizer);
message.append(LINE_SEPARATOR);
}
}
message.append(LINE_SEPARATOR);
return message.toString();
}
private static ThreadState getThreadState(ThreadState threadState) {
if (threadState == null) {
return ThreadState.UNKNOWN;
} else {
return threadState;
}
}
}
| |
/*
* Copyright 2014-2016 Hans-Christoph Steiner
* Copyright 2012-2016 Nathan Freitas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.guardianproject.netcipher;
import android.app.Application;
import android.net.Uri;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.text.TextUtils;
import android.util.JsonReader;
import android.util.Log;
import info.guardianproject.netcipher.client.StrongBuilderBase;
import info.guardianproject.netcipher.client.TlsOnlySocketFactory;
import info.guardianproject.netcipher.proxy.NetCipherURLStreamHandlerFactory;
import info.guardianproject.netcipher.proxy.OrbotHelper;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandlerFactory;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
public class NetCipher {
private static final String TAG = "NetCipher";
private NetCipher() {
// this is a utility class with only static methods
}
public final static Proxy ORBOT_HTTP_PROXY = new Proxy(Proxy.Type.HTTP,
new InetSocketAddress("127.0.0.1", OrbotHelper.DEFAULT_PROXY_HTTP_PORT));
public final static Proxy ORBOT_SOCKS_PROXY = new Proxy(Proxy.Type.SOCKS,
new InetSocketAddress("127.0.0.1", OrbotHelper.DEFAULT_PROXY_SOCKS_PORT));
private static Proxy proxy;
/**
* Set the global HTTP proxy for all new {@link HttpURLConnection}s and
* {@link HttpsURLConnection}s that are created after this is called.
* <p>
* {@link #useTor()} will override this setting. Traffic must be directed
* to Tor using the proxy settings, and Orbot has its own proxy settings
* for connections that need proxies to work. So if "use Tor" is enabled,
* as tested by looking for the static instance of Proxy, then no other
* proxy settings are allowed to override the current Tor proxy.
*
* @param host the IP address for the HTTP proxy to use globally
* @param port the port number for the HTTP proxy to use globally
* @see #setProxy(Proxy)
* @see #clearProxy()
*/
public static void setProxy(String host, int port) {
if (!TextUtils.isEmpty(host) && port > 0) {
InetSocketAddress isa = new InetSocketAddress(host, port);
setProxy(new Proxy(Proxy.Type.HTTP, isa));
} else if (NetCipher.proxy != ORBOT_HTTP_PROXY) {
setProxy(null);
}
}
/**
* Set the global HTTP proxy for all new {@link HttpURLConnection}s and
* {@link HttpsURLConnection}s that are created after this is called.
* <p>
* {@link #useTor()} will override this setting. Traffic must be directed
* to Tor using the proxy settings, and Orbot has its own proxy settings
* for connections that need proxies to work. So if "use Tor" is enabled,
* as tested by looking for the static instance of Proxy, then no other
* proxy settings are allowed to override the current Tor proxy.
*
* @param proxy the HTTP proxy to use globally
* @see #setProxy(String, int)
* @see #clearProxy()
*/
public static void setProxy(Proxy proxy) {
if (proxy != null && NetCipher.proxy == ORBOT_HTTP_PROXY) {
Log.w(TAG, "useTor is enabled, ignoring new proxy settings!");
} else {
NetCipher.proxy = proxy;
}
}
/**
* Get the currently active global HTTP {@link Proxy}.
*
* @return the active HTTP {@link Proxy}
*/
public static Proxy getProxy() {
return proxy;
}
/**
* Clear the global HTTP proxy for all new {@link HttpURLConnection}s and
* {@link HttpsURLConnection}s that are created after this is called. This
* returns things to the default, proxy-less state.
*/
public static void clearProxy() {
setProxy(null);
}
/**
* Set Orbot as the global HTTP proxy for all new {@link HttpURLConnection}
* s and {@link HttpsURLConnection}s that are created after this is called.
* This overrides all future calls to {@link #setProxy(Proxy)}, except to
* clear the proxy, e.g. {@code #setProxy(null)} or {@link #clearProxy()}.
* <p>
* Traffic must be directed to Tor using the proxy settings, and Orbot has its
* own proxy settings for connections that need proxies to work. So if "use
* Tor" is enabled, as tested by looking for the static instance of Proxy,
* then no other proxy settings are allowed to override the current Tor proxy.
*
* @see #clearProxy()
* @see #useGlobalProxy()
*/
public static void useTor() {
if (Build.VERSION.SDK_INT < 24) {
setProxy(ORBOT_HTTP_PROXY);
} else {
setProxy(ORBOT_SOCKS_PROXY);
}
}
/**
* Makes a connection to {@code check.torproject.org} to read its results
* of whether the connection came via Tor or not.
*
* @return true if {@code check.torproject.org} says connection is via Tor, false if not or on error
* @see <a href="https://check.torproject.org">check.torproject.org</a>
*/
@RequiresApi(api = 11)
public static boolean isURLConnectionUsingTor() {
if (Build.VERSION.SDK_INT < 11) {
throw new UnsupportedOperationException("only works on android-11 or higher");
}
try {
URL url = new URL(StrongBuilderBase.TOR_CHECK_URL);
return checkIsTor(url.openConnection());
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
@RequiresApi(api = 11)
public static boolean isNetCipherGetHttpURLConnectionUsingTor() {
if (Build.VERSION.SDK_INT < 11) {
throw new UnsupportedOperationException("only works on android-11 or higher");
}
try {
URL url = new URL(StrongBuilderBase.TOR_CHECK_URL);
return checkIsTor(NetCipher.getHttpURLConnection(url));
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
@RequiresApi(api = 11)
private static boolean checkIsTor(URLConnection connection) throws IOException {
boolean isTor = false;
JsonReader jsonReader = new JsonReader(new InputStreamReader(connection.getInputStream()));
jsonReader.beginObject();
while (jsonReader.hasNext()) {
String name = jsonReader.nextName();
if ("IsTor".equals(name)) {
isTor = jsonReader.nextBoolean();
break;
} else {
jsonReader.skipValue();
}
}
return isTor;
}
/**
* Call this method in {@link Application#onCreate()} to enable NetCipher
* to control the proxying. This only works on
* {@link Build.VERSION_CODES#O Android 8.0 Oreo} or newer. There needs to
* be a separate call to {@link #setProxy(Proxy)} or {@link #useTor()} for
* proxying to actually be enabled. {@link #clearProxy()} will then remove
* the proxying when the global proxy control is in place, but the
* {@link URLStreamHandlerFactory} will stay in place until app restart.
*
* @see #useTor()
* @see #setProxy(Proxy)
* @see #setProxy(String, int)
* @see #clearProxy()
* @see URL#setURLStreamHandlerFactory(URLStreamHandlerFactory)
*/
@RequiresApi(api = 26)
public static void useGlobalProxy() {
if (Build.VERSION.SDK_INT < 26) {
throw new UnsupportedOperationException("only works on Android 8.0 (26) or higher");
}
URL.setURLStreamHandlerFactory(new NetCipherURLStreamHandlerFactory());
}
/**
* This is the same as {@link #useGlobalProxy()} except that it can run on
* Android 7.x (SDK 24 and 25). The global proxying leaks DNS on Android 7.x,
* so this is not suitable for a privacy proxy. It will make access proxying
* work. It can also be used as a failsafe to help prevent leaks when the
* proxying is configured per-connection.
*
* @see #useGlobalProxy()
* @see #useTor()
* @see #setProxy(Proxy)
* @see #setProxy(String, int)
* @see #clearProxy()
* @see URL#setURLStreamHandlerFactory(URLStreamHandlerFactory)
*/
@Deprecated
@RequiresApi(api = 24)
public static void useGlobalProxyWithDNSLeaksOnAndroid7x() {
if (Build.VERSION.SDK_INT >= 26) {
useGlobalProxy();
return;
}
if (Build.VERSION.SDK_INT < 24) {
throw new UnsupportedOperationException("only works on Android 7.0 (24) or higher");
}
Log.w(TAG, "Android 7.x fails to globally proxy DNS! DNS will leak and .onion addresses will always fail!");
URL.setURLStreamHandlerFactory(new NetCipherURLStreamHandlerFactory());
}
/**
* Get a {@link TlsOnlySocketFactory} from NetCipher.
*
* @see HttpsURLConnection#setDefaultSSLSocketFactory(SSLSocketFactory)
*/
public static TlsOnlySocketFactory getTlsOnlySocketFactory() {
return getTlsOnlySocketFactory(false);
}
/**
* Get a {@link TlsOnlySocketFactory} from NetCipher, and specify whether
* it should use a more compatible, but less strong, suite of ciphers.
*
* @see HttpsURLConnection#setDefaultSSLSocketFactory(SSLSocketFactory)
*/
public static TlsOnlySocketFactory getTlsOnlySocketFactory(boolean compatible) {
SSLContext sslcontext;
try {
sslcontext = SSLContext.getInstance(TlsOnlySocketFactory.TLSV1);
sslcontext.init(null, null, null);
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException(e);
} catch (KeyManagementException e) {
throw new IllegalArgumentException(e);
}
return new TlsOnlySocketFactory(sslcontext.getSocketFactory(), compatible);
}
/**
* Get a {@link HttpURLConnection} from a {@link URL}, and specify whether
* it should use a more compatible, but less strong, suite of ciphers.
* <p>
* If {@link #useGlobalProxy()} is called, this method will use the global
* proxy settings. For {@code .onion} addresses, this will still directly
* configure the proxy, but that should be the same exact settings.
*
* @param url
* @param compatible
* @return the {@code url} in an instance of {@link HttpURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect
*/
public static HttpURLConnection getHttpURLConnection(URL url, boolean compatible)
throws IOException {
// .onion addresses only work via Tor, so force Tor for all of them
Proxy proxy = NetCipher.proxy;
if (OrbotHelper.isOnionAddress(url)) {
if (Build.VERSION.SDK_INT < 24) {
proxy = ORBOT_HTTP_PROXY;
} else {
proxy = ORBOT_SOCKS_PROXY;
}
}
HttpURLConnection connection;
if (proxy != null) {
connection = (HttpURLConnection) url.openConnection(proxy);
} else {
connection = (HttpURLConnection) url.openConnection();
}
if (connection instanceof HttpsURLConnection) {
HttpsURLConnection httpsConnection = ((HttpsURLConnection) connection);
SSLSocketFactory tlsOnly = getTlsOnlySocketFactory(compatible);
httpsConnection.setSSLSocketFactory(tlsOnly);
if (Build.VERSION.SDK_INT < 16) {
httpsConnection.setHostnameVerifier(org.apache.http.conn.ssl.SSLSocketFactory.STRICT_HOSTNAME_VERIFIER);
}
}
return connection;
}
/**
* Get a {@link HttpsURLConnection} from a URL {@link String} using the best
* TLS configuration available on the device.
*
* @param urlString
* @return the URL in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect,
* or if an HTTP URL is given that does not support HTTPS
*/
public static HttpsURLConnection getHttpsURLConnection(String urlString) throws IOException {
URL url = new URL(urlString.replaceFirst("^[Hh][Tt][Tt][Pp]:", "https:"));
return getHttpsURLConnection(url, false);
}
/**
* Get a {@link HttpsURLConnection} from a {@link Uri} using the best TLS
* configuration available on the device.
*
* @param uri
* @return the {@code uri} in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect,
* or if an HTTP URL is given that does not support HTTPS
*/
public static HttpsURLConnection getHttpsURLConnection(Uri uri) throws IOException {
return getHttpsURLConnection(uri.toString());
}
/**
* Get a {@link HttpsURLConnection} from a {@link URI} using the best TLS
* configuration available on the device.
*
* @param uri
* @return the {@code uri} in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect,
* or if an HTTP URL is given that does not support HTTPS
*/
public static HttpsURLConnection getHttpsURLConnection(URI uri) throws IOException {
if (TextUtils.equals(uri.getScheme(), "https"))
return getHttpsURLConnection(uri.toURL(), false);
else
// otherwise force scheme to https
return getHttpsURLConnection(uri.toString());
}
/**
* Get a {@link HttpsURLConnection} from a {@link URL} using the best TLS
* configuration available on the device.
*
* @param url
* @return the {@code url} in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect,
* or if an HTTP URL is given that does not support HTTPS
*/
public static HttpsURLConnection getHttpsURLConnection(URL url) throws IOException {
return getHttpsURLConnection(url, false);
}
/**
* Get a {@link HttpsURLConnection} from a {@link URL} using a more
* compatible, but less strong, suite of ciphers.
*
* @param url
* @return the {@code url} in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect,
* or if an HTTP URL is given that does not support HTTPS
*/
public static HttpsURLConnection getCompatibleHttpsURLConnection(URL url) throws IOException {
return getHttpsURLConnection(url, true);
}
/**
* Get a {@link HttpsURLConnection} from a {@link URL}, and specify whether
* it should use a more compatible, but less strong, suite of ciphers.
*
* @param url
* @param compatible
* @return the {@code url} in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect,
* or if an HTTP URL is given that does not support HTTPS
*/
public static HttpsURLConnection getHttpsURLConnection(URL url, boolean compatible)
throws IOException {
// use default method, but enforce a HttpsURLConnection
HttpURLConnection connection = getHttpURLConnection(url, compatible);
if (connection instanceof HttpsURLConnection) {
return (HttpsURLConnection) connection;
} else {
throw new IllegalArgumentException("not an HTTPS connection!");
}
}
/**
* Get a {@link HttpURLConnection} from a {@link URL}. If the connection is
* {@code https://}, it will use a more compatible, but less strong, TLS
* configuration.
*
* @param url
* @return the {@code url} in an instance of {@link HttpsURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect
*/
public static HttpURLConnection getCompatibleHttpURLConnection(URL url) throws IOException {
return getHttpURLConnection(url, true);
}
/**
* Get a {@link HttpURLConnection} from a URL {@link String}. If it is an
* {@code https://} link, then this will use the best TLS configuration
* available on the device.
*
* @param urlString
* @return the URL in an instance of {@link HttpURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect
*/
public static HttpURLConnection getHttpURLConnection(String urlString) throws IOException {
return getHttpURLConnection(new URL(urlString));
}
/**
* Get a {@link HttpURLConnection} from a {@link Uri}. If it is an
* {@code https://} link, then this will use the best TLS configuration
* available on the device.
*
* @param uri
* @return the {@code uri} in an instance of {@link HttpURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect
*/
public static HttpURLConnection getHttpURLConnection(Uri uri) throws IOException {
return getHttpURLConnection(uri.toString());
}
/**
* Get a {@link HttpURLConnection} from a {@link URI}. If it is an
* {@code https://} link, then this will use the best TLS configuration
* available on the device.
*
* @param uri
* @return the {@code uri} in an instance of {@link HttpURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect
*/
public static HttpURLConnection getHttpURLConnection(URI uri) throws IOException {
return getHttpURLConnection(uri.toURL());
}
/**
* Get a {@link HttpURLConnection} from a {@link URL}. If it is an
* {@code https://} link, then this will use the best TLS configuration
* available on the device.
*
* @param url
* @return the {@code url} in an instance of {@link HttpURLConnection}
* @throws IOException
* @throws IllegalArgumentException if the proxy or TLS setup is incorrect
*/
public static HttpURLConnection getHttpURLConnection(URL url) throws IOException {
return (HttpURLConnection) getHttpURLConnection(url, false);
}
}
| |
package org.asciidoctor;
import org.asciidoctor.arquillian.api.Unshared;
import org.asciidoctor.ast.Cursor;
import org.asciidoctor.internal.JRubyAsciidoctor;
import org.asciidoctor.log.LogHandler;
import org.asciidoctor.log.LogRecord;
import org.asciidoctor.log.TestLogHandlerService;
import org.asciidoctor.util.ClasspathResources;
import org.hamcrest.Matchers;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import static org.asciidoctor.OptionsBuilder.options;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@RunWith(Arquillian.class)
public class WhenAsciidoctorLogsToConsole {
@ArquillianResource
private ClasspathResources classpath = new ClasspathResources();
@ArquillianResource
private TemporaryFolder testFolder;
@ArquillianResource(Unshared.class)
private Asciidoctor asciidoctor;
@Before
public void before() {
asciidoctor = JRubyAsciidoctor.create();
TestLogHandlerService.clear();
}
@After
public void cleanup() throws IOException {
LogManager.getLogManager().readConfiguration();
TestLogHandlerService.clear();
}
@Test
public void shouldRedirectToJUL() throws Exception {
final MemoryLogHandler memoryLogHandler = registerMemoryLogHandler();
File inputFile = classpath.getResource("documentwithnotexistingfile.adoc");
String renderContent = asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile.delete();
assertEquals(4, memoryLogHandler.getLogRecords().size());
assertThat(memoryLogHandler.getLogRecords().get(0).getMessage(),
both(containsString("include file not found"))
.and(containsString("documentwithnotexistingfile.adoc: line 3")));
}
private MemoryLogHandler registerMemoryLogHandler() {
final Logger logger = Logger.getLogger("asciidoctor");
final MemoryLogHandler handler = new MemoryLogHandler();
logger.addHandler(handler);
return handler;
}
@Test
public void shouldNotifyLogHandler() throws Exception {
final List<LogRecord> logRecords = new ArrayList<>();
final LogHandler logHandler = new LogHandler() {
@Override
public void log(LogRecord logRecord) {
logRecords.add(logRecord);
}
};
asciidoctor.registerLogHandler(logHandler);
File inputFile = classpath.getResource("documentwithnotexistingfile.adoc");
String renderContent = asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile.delete();
assertEquals(4, logRecords.size());
assertThat(logRecords.get(0).getMessage(), containsString("include file not found"));
final Cursor cursor = logRecords.get(0).getCursor();
assertThat(cursor.getDir().replace('\\', '/'), is(inputFile.getParent().replace('\\', '/')));
assertThat(cursor.getFile(), is(inputFile.getName()));
assertThat(cursor.getLineNumber(), is(3));
for (LogRecord logRecord: logRecords) {
assertThat(logRecord.getCursor(), not(nullValue()));
assertThat(logRecord.getCursor().getFile(), not(nullValue()));
assertThat(logRecord.getCursor().getDir(), not(nullValue()));
}
}
@Test
@Ignore("Until logging of invalid refs is enabled by default")
public void shouldLogInvalidRefs() throws Exception {
final List<LogRecord> logRecords = new ArrayList<>();
final LogHandler logHandler = new LogHandler() {
@Override
public void log(LogRecord logRecord) {
logRecords.add(logRecord);
}
};
asciidoctor.registerLogHandler(logHandler);
File inputFile = classpath.getResource("documentwithinvalidrefs.adoc");
String renderContent = asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.toFile(false)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
assertThat(logRecords, hasSize(1));
assertThat(logRecords.get(0).getMessage(), containsString("invalid reference: invalidref"));
final Cursor cursor = logRecords.get(0).getCursor();
assertThat(cursor, is(nullValue()));
}
@Test
public void shouldOnlyNotifyFromRegisteredAsciidoctor() throws Exception {
final List<LogRecord> logRecords = new ArrayList<>();
final Asciidoctor secondInstance = Asciidoctor.Factory.create();
final LogHandler logHandler = new LogHandler() {
@Override
public void log(LogRecord logRecord) {
logRecords.add(logRecord);
}
};
// Register at first instance!
asciidoctor.registerLogHandler(logHandler);
// Now render via second instance and check that there is no notification
File inputFile = classpath.getResource("documentwithnotexistingfile.adoc");
String renderContent1 = secondInstance.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile1 = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile1.delete();
assertEquals(0, logRecords.size());
// Now render via first instance and check that notifications appeared.
String renderContent = asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile2 = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile2.delete();
assertEquals(4, logRecords.size());
assertThat(logRecords.get(0).getMessage(), containsString("include file not found"));
final Cursor cursor = (Cursor) logRecords.get(0).getCursor();
assertThat(cursor.getDir().replace('\\', '/'), is(inputFile.getParent().replace('\\', '/')));
assertThat(cursor.getFile(), is(inputFile.getName()));
assertThat(cursor.getLineNumber(), is(3));
}
@Test
public void shouldNoLongerNotifyAfterUnregisterOnlyNotifyFromRegisteredAsciidoctor() throws Exception {
final List<LogRecord> logRecords = new ArrayList<>();
final LogHandler logHandler = new LogHandler() {
@Override
public void log(LogRecord logRecord) {
logRecords.add(logRecord);
}
};
asciidoctor.registerLogHandler(logHandler);
File inputFile = classpath.getResource("documentwithnotexistingfile.adoc");
String renderContent = asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile.delete();
assertEquals(4, logRecords.size());
logRecords.clear();
asciidoctor.unregisterLogHandler(logHandler);
asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile2 = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile2.delete();
assertEquals(0, logRecords.size());
}
@Test
public void shouldNotifyLogHandlerService() throws Exception {
File inputFile = classpath.getResource("documentwithnotexistingfile.adoc");
String renderContent = asciidoctor.renderFile(inputFile,
options()
.inPlace(true)
.safe(SafeMode.SERVER)
.attributes(
AttributesBuilder.attributes().allowUriRead(true))
.asMap());
File expectedFile = new File(inputFile.getParent(), "documentwithnotexistingfile.html");
expectedFile.delete();
final List<LogRecord> logRecords = TestLogHandlerService.getLogRecords();
for (LogRecord logRecord : logRecords) {
System.err.println(">> " + logRecord.getMessage());
}
assertThat(logRecords, hasSize(4));
assertThat(logRecords.get(0).getMessage(), containsString("include file not found"));
final Cursor cursor = logRecords.get(0).getCursor();
assertThat(cursor.getDir().replace('\\', '/'), is(inputFile.getParent().replace('\\', '/')));
assertThat(cursor.getFile(), is(inputFile.getName()));
assertThat(cursor.getLineNumber(), is(3));
for (LogRecord logRecord: logRecords) {
assertThat(logRecord.getCursor(), not(Matchers.nullValue()));
assertThat(logRecord.getCursor().getFile(), not(Matchers.nullValue()));
assertThat(logRecord.getCursor().getDir(), not(Matchers.nullValue()));
}
}
}
| |
/*
* Copyright 2014-2015 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.avro.ui.gwt.client.widget;
import org.kaaproject.avro.ui.gwt.client.util.Utils;
import com.google.gwt.animation.client.Animation;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Display;
import com.google.gwt.dom.client.Style.Position;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.logical.shared.ResizeEvent;
import com.google.gwt.event.logical.shared.ResizeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.Event.NativePreviewEvent;
import com.google.gwt.user.client.Event.NativePreviewHandler;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.HasVerticalAlignment;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.SimplePanel;
public class BusyPopup extends SimplePanel {
private static BusyPopup instance;
public static void showPopup() {
if (instance == null) {
instance = new BusyPopup();
}
instance.rollDown();
}
public static void hidePopup() {
if (instance != null) {
instance.hide();
}
}
private static final int ANIMATION_DURATION = 300;
private static final int GLASS_Z_INDEX = 32766;
private static final int POPUP_Z_INDEX = 32767;
private ResizeHandler glassResizer = new ResizeHandler() {
public void onResize(ResizeEvent event) {
Style style = glass.getStyle();
int winWidth = Window.getClientWidth();
int winHeight = Window.getClientHeight();
style.setDisplay(Display.NONE);
style.setWidth(0, Unit.PX);
style.setHeight(0, Unit.PX);
int width = Document.get().getScrollWidth();
int height = Document.get().getScrollHeight();
style.setWidth(Math.max(width, winWidth), Unit.PX);
style.setHeight(Math.max(height, winHeight), Unit.PX);
style.setDisplay(Display.BLOCK);
}
};
private boolean showing;
private boolean isAnimationEnabled = true;
private Element glass;
private HandlerRegistration nativePreviewHandlerRegistration;
private int leftPosition = -1;
private int topPosition = -1;
private RollAnimation rollAnimation = new RollAnimation(this);
public BusyPopup() {
glass = Document.get().createDivElement();
glass.setClassName(Utils.avroUiStyle.busyGlass());
glass.getStyle().setPosition(Position.ABSOLUTE);
glass.getStyle().setLeft(0, Unit.PX);
glass.getStyle().setTop(0, Unit.PX);
glass.getStyle().setZIndex(GLASS_Z_INDEX);
getElement().getStyle().setZIndex(POPUP_Z_INDEX);
HorizontalPanel panel = new HorizontalPanel();
panel.setSize("320px", "70px");
panel.addStyleName(Utils.avroUiStyle.busyPopup());
Image image = new Image();
image.setResource(Utils.resources.busyIndicator());
panel.add(image);
panel.setCellWidth(image, "60px");
panel.setCellHorizontalAlignment(image, HasHorizontalAlignment.ALIGN_CENTER);
panel.setCellVerticalAlignment(image, HasVerticalAlignment.ALIGN_MIDDLE);
Label label = new Label();
label.setText(Utils.constants.busyPopupText());
label.getElement().getStyle().setPaddingRight(15, Unit.PX);
panel.add(label);
panel.setCellHorizontalAlignment(label, HasHorizontalAlignment.ALIGN_CENTER);
panel.setCellVerticalAlignment(label, HasVerticalAlignment.ALIGN_MIDDLE);
setWidget(panel);
}
public void setAnimationEnabled(boolean enable) {
isAnimationEnabled = enable;
}
public void setPopupPosition(int left, int top) {
leftPosition = left;
topPosition = top;
left -= Document.get().getBodyOffsetLeft();
top -= Document.get().getBodyOffsetTop();
Element elem = getElement();
elem.getStyle().setPropertyPx("left", left);
elem.getStyle().setPropertyPx("top", top);
}
public void rollDown() {
boolean initiallyShowing = showing;
boolean initiallyAnimated = isAnimationEnabled;
if (!initiallyShowing) {
setVisible(false);
setAnimationEnabled(false);
show();
}
Element elem = getElement();
elem.getStyle().setPropertyPx("left", 0);
elem.getStyle().setPropertyPx("top", 0);
int left = (Window.getClientWidth() - getOffsetWidth()) >> 1;
int top = -getOffsetHeight();
setPopupPosition(Math.max(Window.getScrollLeft() + left, 0), Math.max(
Window.getScrollTop() + top, -getOffsetHeight()));
if (!initiallyShowing) {
setAnimationEnabled(initiallyAnimated);
if (initiallyAnimated) {
setVisible(true);
rollAnimation.run(ANIMATION_DURATION);
} else {
setVisible(true);
}
}
}
@Override
public void setVisible(boolean visible) {
getElement().getStyle().setProperty("visibility", visible ? "visible" : "hidden");
if (glass != null) {
glass.getStyle().setProperty("visibility", visible ? "visible" : "hidden");
}
}
public boolean isShowing() {
return showing;
}
public void show() {
if (showing) {
return;
} else if (isAttached()) {
this.removeFromParent();
}
rollAnimation.setState(true, false);
}
public void hide() {
if (!isShowing()) {
return;
}
rollAnimation.setState(false, false);
}
private void previewNativeEvent(NativePreviewEvent event) {
event.cancel();
return;
}
private void updateHandlers() {
if (nativePreviewHandlerRegistration != null) {
nativePreviewHandlerRegistration.removeHandler();
nativePreviewHandlerRegistration = null;
}
if (showing) {
nativePreviewHandlerRegistration = Event.addNativePreviewHandler(new NativePreviewHandler() {
public void onPreviewNativeEvent(NativePreviewEvent event) {
previewNativeEvent(event);
}
});
}
}
static class RollAnimation extends Animation {
private BusyPopup curPanel = null;
private boolean isUnloading;
private boolean showing;
private Timer showTimer;
private boolean glassShowing;
private HandlerRegistration resizeRegistration;
private int offsetHeight = -1;
public RollAnimation(BusyPopup panel) {
this.curPanel = panel;
}
public void setState(boolean showing, boolean isUnloading) {
this.isUnloading = isUnloading;
cancel();
if (showTimer != null) {
showTimer.cancel();
showTimer = null;
onComplete();
}
curPanel.showing = showing;
curPanel.updateHandlers();
boolean animate = !isUnloading && curPanel.isAnimationEnabled;
this.showing = showing;
if (animate) {
if (showing) {
maybeShowGlass();
curPanel.getElement().getStyle().setProperty("position", "absolute");
if (curPanel.topPosition != -1) {
curPanel.setPopupPosition(curPanel.leftPosition,
curPanel.topPosition);
}
RootPanel.get().add(curPanel);
showTimer = new Timer() {
@Override
public void run() {
showTimer = null;
RollAnimation.this.run(ANIMATION_DURATION);
}
};
showTimer.schedule(1);
} else {
run(ANIMATION_DURATION);
}
} else {
onInstantaneousRun();
}
}
@Override
protected void onComplete() {
if (!showing) {
maybeShowGlass();
if (!isUnloading) {
RootPanel.get().remove(curPanel);
}
}
curPanel.getElement().getStyle().setProperty("overflow", "visible");
}
@Override
protected void onStart() {
offsetHeight = curPanel.getOffsetHeight();
super.onStart();
}
@Override
protected void onUpdate(double progress) {
if (!showing) {
progress = 1.0 - progress;
}
int topPosition = (int) (progress * offsetHeight) - offsetHeight;
curPanel.setPopupPosition(curPanel.leftPosition, Math.max(
Window.getScrollTop() + topPosition, -offsetHeight));
}
private void maybeShowGlass() {
if (showing) {
Document.get().getBody().appendChild(curPanel.glass);
resizeRegistration = Window.addResizeHandler(curPanel.glassResizer);
curPanel.glassResizer.onResize(null);
glassShowing = true;
} else if (glassShowing) {
Document.get().getBody().removeChild(curPanel.glass);
resizeRegistration.removeHandler();
resizeRegistration = null;
glassShowing = false;
}
}
private void onInstantaneousRun() {
maybeShowGlass();
if (showing) {
curPanel.getElement().getStyle().setProperty("position", "absolute");
if (curPanel.topPosition != -1) {
curPanel.setPopupPosition(curPanel.leftPosition, curPanel.topPosition);
}
RootPanel.get().add(curPanel);
} else {
if (!isUnloading) {
RootPanel.get().remove(curPanel);
}
}
curPanel.getElement().getStyle().setProperty("overflow", "visible");
}
}
}
| |
/*
* Copyright 2009-2014 DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.mrgeo.ingest;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Mapper;
import org.mrgeo.image.MrsImageException;
import org.mrgeo.image.MrsImagePyramidMetadata;
import org.mrgeo.image.MrsImagePyramidMetadata.Classification;
import org.mrgeo.data.DataProviderFactory;
import org.mrgeo.data.DataProviderFactory.AccessMode;
import org.mrgeo.data.adhoc.AdHocDataProvider;
import org.mrgeo.data.raster.RasterWritable;
import org.mrgeo.data.tile.TileIdWritable;
import org.mrgeo.utils.Bounds;
import org.mrgeo.utils.TMSUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.awt.image.Raster;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
public class IngestImageMapper extends Mapper<TileIdWritable, RasterWritable, TileIdWritable, RasterWritable>
{
private static Logger log = LoggerFactory.getLogger(IngestImageMapper.class);
private Counter tileCounter = null;
private MrsImagePyramidMetadata metadata = null;
Number nodata = 0;
enum Direction {
TOP,
BOTTOM,
LEFT,
RIGHT
}
@SuppressWarnings("rawtypes")
@Override
public void setup(Mapper.Context context)
{
tileCounter = context.getCounter("Ingest Mapper", "Mapper Tiles Processed");
Configuration conf = context.getConfiguration();
metadata = new MrsImagePyramidMetadata();
// these should have been verified before here. but we'll keep the check, just in case
int zoomlevel = conf.getInt("zoomlevel", -1);
if (zoomlevel < 0)
{
throw new MrsImageException(
"Error, no \"zoomlevel\" parameter in configuration, zoomlevel needs to be calculated & set before map/reduce");
}
metadata.setMaxZoomLevel(zoomlevel);
int tilesize = conf.getInt("tilesize", -1);
if (tilesize < 0)
{
throw new MrsImageException(
"Error, no \"tilesize\" parameter in configuration, tilesize needs to be calculated & set before map/reduce");
}
metadata.setTilesize(tilesize);
String cl = conf.get("classification", null);
if (cl == null)
{
throw new MrsImageException(
"Error, no \"classification\" parameter in configuration, classification needs to be calculated & set before map/reduce");
}
metadata.setClassification(Classification.valueOf(cl));
String nd = conf.get("nodata", null);
if (nd == null)
{
throw new MrsImageException(
"Error, no \"nodata\" parameter in configuration, nodata needs to be calculated & set before map/reduce");
}
nodata = Double.parseDouble(nd);
metadata.setBounds(new Bounds());
// Don't need to calculate these...
// metadata.setImage(zoomlevel);
// metadata.setPixelBounds(zoomlevel, pixelBounds);
// metadata.setPyramid(pyramid);
// metadata.setResamplingMethod(resamplingMethod);
// metadata.setTileBounds(zoomlevel, tileBounds);
}
@Override
public void cleanup(Context context) throws IOException, InterruptedException
{
if (metadata != null)
{
// save the (partial) metadata with the adhoc provider.
String adhoc = context.getConfiguration().get("metadata.provider", null);
if (adhoc == null)
{
throw new IOException("Metadata provider not set");
}
AdHocDataProvider provider = DataProviderFactory.getAdHocDataProvider(adhoc,
AccessMode.WRITE, context.getConfiguration());
OutputStream os = provider.add();
metadata.save(os);
os.close();
}
}
@Override
public void map(TileIdWritable key, RasterWritable value, Context context) throws IOException, InterruptedException
{
if (log.isDebugEnabled())
{
long zerocnt = 0;
long nonzerocnt = 0;
byte[] data = value.get();
byte headerlen = data[3];
for (int i = ((headerlen + 1) * 4); i < data.length; i++)
{
if (data[i] == 0)
{
zerocnt++;
}
else
{
nonzerocnt++;
}
}
TMSUtils.Tile t = TMSUtils.tileid(key.get(), 10);
log.debug("key: " + key.get() + " (" + t.tx + "," + t.ty + ")");
log.debug("data stats: nonzero bytes: " + nonzerocnt + " zero bytes: " + zerocnt);
}
// are we calculating metadata here?
if (metadata != null)
{
Raster raster = RasterWritable.toRaster(value);
if (metadata.getBands() <= 0)
{
metadata.setBands(raster.getNumBands());
double[] defaults = new double[raster.getNumBands()];
Arrays.fill(defaults, nodata.doubleValue());
metadata.setDefaultValues(defaults);
metadata.setTileType(raster.getTransferType());
}
// need to expand the bounds by the tile, but we also need to crop based on the nodata
// values...
TMSUtils.Tile tile = TMSUtils.tileid(key.get(), metadata.getMaxZoomLevel());
TMSUtils.Bounds tb = TMSUtils.tileBounds(tile.tx, tile.ty, metadata.getMaxZoomLevel(), metadata.getTilesize());
if (!metadata.getBounds().isValid())
{
// in this case, order is important! (B, T, R, L)
tb = cropToData(Direction.BOTTOM, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
tb = cropToData(Direction.TOP, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
tb = cropToData(Direction.RIGHT, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
tb = cropToData(Direction.LEFT, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
metadata.setBounds(new Bounds(tb.w, tb.s, tb.e, tb.n));
}
else
{
Bounds bounds = metadata.getBounds();
boolean expand = false;
// in this case, order is important! (B, T, R, L)
if (tb.s < bounds.getMinY())
{
tb = cropToData(Direction.BOTTOM, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
expand = true;
}
if (tb.n > bounds.getMaxY())
{
tb = cropToData(Direction.TOP, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
expand = true;
}
if (tb.e > bounds.getMaxX())
{
tb = cropToData(Direction.RIGHT, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
expand = true;
}
if (tb.w < bounds.getMinX())
{
tb = cropToData(Direction.LEFT, raster, tb, metadata.getDefaultValuesDouble(), metadata.getMaxZoomLevel(), metadata.getTilesize());
expand = true;
}
if (expand)
{
bounds.expand(tb.w, tb.s, tb.e, tb.n);
}
}
}
tileCounter.increment(1);
context.write(key, value);
}
private static TMSUtils.Bounds cropToData(Direction direction, Raster raster, TMSUtils.Bounds bounds, double[] nodata, int zoom, int tilesize)
{
TMSUtils.Pixel ll = TMSUtils.latLonToPixels(bounds.s, bounds.w, zoom, tilesize);
TMSUtils.Pixel ur = TMSUtils.latLonToPixels(bounds.n, bounds.e, zoom, tilesize);
int x, y;
boolean stop = false;
switch (direction)
{
case BOTTOM:
for (y = raster.getHeight() - 1; y >= 0 && !stop; y--)
{
for (x = 0; x < raster.getWidth() && !stop; x++)
{
for (int b = 0; b < raster.getNumBands() && !stop; b++)
{
double v = raster.getSampleDouble(x, y, b);
if (Double.isNaN(nodata[b]))
{
if (!Double.isNaN(v))
{
stop = true;
break;
}
}
else if (nodata[b] != v)
{
stop = true;
break;
}
}
}
}
ll = new TMSUtils.Pixel(ll.px, ur.py - y + 2);
break;
case LEFT:
for (x = 0; x < raster.getWidth() && !stop; x++)
{
for (y = 0; y < raster.getHeight() && !stop; y++)
{
for (int b = 0; b < raster.getNumBands() && !stop; b++)
{
double v = raster.getSampleDouble(x, y, b);
if (Double.isNaN(nodata[b]))
{
if (!Double.isNaN(v))
{
stop = true;
break;
}
}
else if (nodata[b] != v)
{
stop = true;
break;
}
}
}
}
ll = new TMSUtils.Pixel(ll.px + x, ll.py);
break;
case RIGHT:
for (x = raster.getWidth() - 1; x >= 0 && !stop; x--)
{
for (y = 0; y < raster.getHeight() && !stop; y++)
{
for (int b = 0; b < raster.getNumBands() && !stop; b++)
{
double v = raster.getSampleDouble(x, y, b);
if (Double.isNaN(nodata[b]))
{
if (!Double.isNaN(v))
{
stop = true;
break;
}
}
else if (nodata[b] != v)
{
stop = true;
break;
}
}
}
}
ur = new TMSUtils.Pixel(ll.px + x + 2, ur.py);
break;
case TOP:
for (y = 0; y < raster.getHeight() && !stop; y++)
{
for (x = 0; x < raster.getWidth() && !stop; x++)
{
for (int b = 0; b < raster.getNumBands() && !stop; b++)
{
double v = raster.getSampleDouble(x, y, b);
if (Double.isNaN(nodata[b]))
{
if (!Double.isNaN(v))
{
stop = true;
break;
}
}
else if (nodata[b] != v)
{
stop = true;
break;
}
}
}
}
ur = new TMSUtils.Pixel(ur.px, ur.py - y + 1);
break;
default:
break;
}
TMSUtils.LatLon llll = TMSUtils.pixelToLatLon(ll.px, ll.py, zoom, tilesize);
TMSUtils.LatLon urll = TMSUtils.pixelToLatLon(ur.px, ur.py, zoom, tilesize);
return new TMSUtils.Bounds(llll.lon, llll.lat, urll.lon, urll.lat);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypeInfoImpl;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.completion.scope.JavaCompletionProcessor;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementDecorator;
import com.intellij.patterns.ElementPattern;
import com.intellij.psi.*;
import com.intellij.psi.filters.ElementExtractorFilter;
import com.intellij.psi.filters.ElementFilter;
import com.intellij.psi.filters.OrFilter;
import com.intellij.psi.filters.getters.ExpectedTypesGetter;
import com.intellij.psi.filters.getters.JavaMembersGetter;
import com.intellij.psi.filters.types.AssignableFromFilter;
import com.intellij.psi.filters.types.AssignableToFilter;
import com.intellij.psi.infos.CandidateInfo;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.proximity.ReferenceListWeigher;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import gnu.trove.TObjectHashingStrategy;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.patterns.PlatformPatterns.psiElement;
import static com.intellij.patterns.StandardPatterns.or;
/**
* @author peter
*/
public class JavaSmartCompletionContributor extends CompletionContributor {
private static final TObjectHashingStrategy<ExpectedTypeInfo> EXPECTED_TYPE_INFO_STRATEGY = new TObjectHashingStrategy<ExpectedTypeInfo>() {
@Override
public int computeHashCode(final ExpectedTypeInfo object) {
return object.getType().hashCode();
}
@Override
public boolean equals(final ExpectedTypeInfo o1, final ExpectedTypeInfo o2) {
return o1.getType().equals(o2.getType());
}
};
private static final ElementExtractorFilter THROWABLES_FILTER = new ElementExtractorFilter(new AssignableFromFilter(CommonClassNames.JAVA_LANG_THROWABLE));
public static final ElementPattern<PsiElement> AFTER_NEW =
psiElement().afterLeaf(
psiElement().withText(PsiKeyword.NEW).andNot(
psiElement().afterLeaf(
psiElement().withText(PsiKeyword.THROW))));
static final ElementPattern<PsiElement> AFTER_THROW_NEW = psiElement().afterLeaf(psiElement().withText(PsiKeyword.NEW).afterLeaf(PsiKeyword.THROW));
public static final ElementPattern<PsiElement> INSIDE_EXPRESSION = or(
psiElement().withParent(PsiExpression.class).andNot(psiElement().withParent(PsiLiteralExpression.class)).andNot(psiElement().withParent(PsiMethodReferenceExpression.class)),
psiElement().inside(PsiClassObjectAccessExpression.class),
psiElement().inside(PsiThisExpression.class),
psiElement().inside(PsiSuperExpression.class)
);
static final ElementPattern<PsiElement> INSIDE_TYPECAST_EXPRESSION = psiElement().withParent(
psiElement(PsiReferenceExpression.class).afterLeaf(
psiElement().withText(")").withParent(PsiTypeCastExpression.class)));
@Nullable
private static ElementFilter getClassReferenceFilter(final PsiElement element, final boolean inRefList) {
//throw new foo
if (AFTER_THROW_NEW.accepts(element)) {
return THROWABLES_FILTER;
}
//new xxx.yyy
if (psiElement().afterLeaf(psiElement().withText(".")).withSuperParent(2, psiElement(PsiNewExpression.class)).accepts(element)) {
if (((PsiNewExpression)element.getParent().getParent()).getClassReference() == element.getParent()) {
PsiType[] types = ExpectedTypesGetter.getExpectedTypes(element, false);
return new OrFilter(ContainerUtil.map2Array(types, ElementFilter.class, (Function<PsiType, ElementFilter>)type -> new AssignableFromFilter(type)));
}
}
// extends/implements/throws
if (inRefList) {
return new ElementExtractorFilter(new ElementFilter() {
@Override
public boolean isAcceptable(Object aClass, @Nullable PsiElement context) {
return aClass instanceof PsiClass && ReferenceListWeigher.INSTANCE.getApplicability((PsiClass)aClass, element) !=
ReferenceListWeigher.ReferenceListApplicability.inapplicable;
}
@Override
public boolean isClassAcceptable(Class hintClass) {
return true;
}
});
}
return null;
}
public JavaSmartCompletionContributor() {
extend(CompletionType.SMART, SmartCastProvider.TYPECAST_TYPE_CANDIDATE, new SmartCastProvider());
extend(CompletionType.SMART, SameSignatureCallParametersProvider.IN_CALL_ARGUMENT, new SameSignatureCallParametersProvider());
extend(CompletionType.SMART, MethodReturnTypeProvider.IN_METHOD_RETURN_TYPE, new MethodReturnTypeProvider());
extend(CompletionType.SMART, InstanceofTypeProvider.AFTER_INSTANCEOF, new InstanceofTypeProvider());
extend(CompletionType.SMART, psiElement(), new CompletionProvider<CompletionParameters>() {
@Override
protected void addCompletions(@NotNull final CompletionParameters parameters, @NotNull final ProcessingContext context, @NotNull final CompletionResultSet result) {
if (SmartCastProvider.shouldSuggestCast(parameters)) return;
final PsiElement element = parameters.getPosition();
final PsiJavaCodeReferenceElement reference =
PsiTreeUtil.findElementOfClassAtOffset(element.getContainingFile(), parameters.getOffset(), PsiJavaCodeReferenceElement.class, false);
if (reference != null) {
boolean inRefList = ReferenceListWeigher.INSIDE_REFERENCE_LIST.accepts(element);
ElementFilter filter = getClassReferenceFilter(element, inRefList);
if (filter != null) {
final List<ExpectedTypeInfo> infos = Arrays.asList(getExpectedTypes(parameters));
for (LookupElement item : completeReference(element, reference, filter, true, false, parameters, result.getPrefixMatcher())) {
if (item.getObject() instanceof PsiClass) {
if (!inRefList) {
item = LookupElementDecorator.withInsertHandler(item, ConstructorInsertHandler.SMART_INSTANCE);
}
result.addElement(decorate(item, infos));
}
}
}
else if (INSIDE_TYPECAST_EXPRESSION.accepts(element)) {
final PsiTypeCastExpression cast = PsiTreeUtil.getContextOfType(element, PsiTypeCastExpression.class, true);
if (cast != null && cast.getCastType() != null) {
filter = new AssignableToFilter(cast.getCastType().getType());
for (final LookupElement item : completeReference(element, reference, filter, false, true, parameters, result.getPrefixMatcher())) {
result.addElement(item);
}
}
}
}
}
});
extend(CompletionType.SMART, INSIDE_EXPRESSION, new ExpectedTypeBasedCompletionProvider() {
@Override
protected void addCompletions(final CompletionParameters params, final CompletionResultSet result, final Collection<ExpectedTypeInfo> _infos) {
if (SmartCastProvider.shouldSuggestCast(params)) return;
Consumer<LookupElement> noTypeCheck = decorateWithoutTypeCheck(result, _infos);
THashSet<ExpectedTypeInfo> mergedInfos = new THashSet<>(_infos, EXPECTED_TYPE_INFO_STRATEGY);
List<Runnable> chainedEtc = new ArrayList<>();
for (final ExpectedTypeInfo info : mergedInfos) {
Runnable slowContinuation =
ReferenceExpressionCompletionContributor.fillCompletionVariants(new JavaSmartCompletionParameters(params, info), noTypeCheck);
ContainerUtil.addIfNotNull(chainedEtc, slowContinuation);
}
addExpectedTypeMembers(params, mergedInfos, true, noTypeCheck);
PsiElement parent = params.getPosition().getParent();
if (parent instanceof PsiReferenceExpression) {
CollectConversion.addCollectConversion((PsiReferenceExpression)parent, mergedInfos, noTypeCheck);
}
for (final ExpectedTypeInfo info : mergedInfos) {
BasicExpressionCompletionContributor.fillCompletionVariants(new JavaSmartCompletionParameters(params, info), lookupElement -> {
final PsiType psiType = JavaCompletionUtil.getLookupElementType(lookupElement);
if (psiType != null && info.getType().isAssignableFrom(psiType)) {
result.addElement(decorate(lookupElement, _infos));
}
}, result.getPrefixMatcher());
}
for (Runnable runnable : chainedEtc) {
runnable.run();
}
final boolean searchInheritors = params.getInvocationCount() > 1;
if (searchInheritors) {
addExpectedTypeMembers(params, mergedInfos, false, noTypeCheck);
}
}
});
extend(CompletionType.SMART, ExpectedAnnotationsProvider.ANNOTATION_ATTRIBUTE_VALUE, new ExpectedAnnotationsProvider());
extend(CompletionType.SMART, CatchTypeProvider.CATCH_CLAUSE_TYPE, new CatchTypeProvider());
extend(CompletionType.SMART, TypeArgumentCompletionProvider.IN_TYPE_ARGS, new TypeArgumentCompletionProvider(true, null));
extend(CompletionType.SMART, AFTER_NEW, new JavaInheritorsGetter(ConstructorInsertHandler.SMART_INSTANCE));
extend(CompletionType.SMART, LabelReferenceCompletion.LABEL_REFERENCE, new LabelReferenceCompletion());
extend(CompletionType.SMART, psiElement(), new FunctionalExpressionCompletionProvider());
extend(CompletionType.SMART, psiElement().afterLeaf("::"), new MethodReferenceCompletionProvider());
}
@NotNull
private static Consumer<LookupElement> decorateWithoutTypeCheck(final CompletionResultSet result, final Collection<? extends ExpectedTypeInfo> infos) {
return lookupElement -> result.addElement(decorate(lookupElement, infos));
}
private static void addExpectedTypeMembers(CompletionParameters params,
THashSet<? extends ExpectedTypeInfo> mergedInfos,
boolean quick,
Consumer<LookupElement> consumer) {
PsiElement position = params.getPosition();
if (!JavaKeywordCompletion.AFTER_DOT.accepts(position)) {
for (ExpectedTypeInfo info : mergedInfos) {
new JavaMembersGetter(info.getType(), params).addMembers(!quick, consumer);
if (!info.getDefaultType().equals(info.getType())) {
new JavaMembersGetter(info.getDefaultType(), params).addMembers(!quick, consumer);
}
}
}
}
@Override
public void fillCompletionVariants(@NotNull CompletionParameters parameters, @NotNull CompletionResultSet result) {
if (parameters.getPosition() instanceof PsiComment) {
return;
}
super.fillCompletionVariants(parameters, JavaCompletionSorting.addJavaSorting(parameters, result));
}
public static SmartCompletionDecorator decorate(LookupElement lookupElement, Collection<? extends ExpectedTypeInfo> infos) {
return new SmartCompletionDecorator(lookupElement, infos);
}
@NotNull
public static ExpectedTypeInfo[] getExpectedTypes(final CompletionParameters parameters) {
return getExpectedTypes(parameters.getPosition(), parameters.getCompletionType() == CompletionType.SMART);
}
@NotNull
public static ExpectedTypeInfo[] getExpectedTypes(PsiElement position, boolean voidable) {
if (psiElement().withParent(psiElement(PsiReferenceExpression.class).withParent(PsiThrowStatement.class)).accepts(position)) {
final PsiElementFactory factory = JavaPsiFacade.getElementFactory(position.getProject());
final PsiClassType classType = factory
.createTypeByFQClassName(CommonClassNames.JAVA_LANG_RUNTIME_EXCEPTION, position.getResolveScope());
final List<ExpectedTypeInfo> result = new SmartList<>();
result.add(new ExpectedTypeInfoImpl(classType, ExpectedTypeInfo.TYPE_OR_SUBTYPE, classType, TailType.SEMICOLON, null, ExpectedTypeInfoImpl.NULL));
final PsiMethod method = PsiTreeUtil.getContextOfType(position, PsiMethod.class, true);
if (method != null) {
for (final PsiClassType type : method.getThrowsList().getReferencedTypes()) {
result.add(new ExpectedTypeInfoImpl(type, ExpectedTypeInfo.TYPE_OR_SUBTYPE, type, TailType.SEMICOLON, null, ExpectedTypeInfoImpl.NULL));
}
}
return result.toArray(ExpectedTypeInfo.EMPTY_ARRAY);
}
PsiExpression expression = PsiTreeUtil.getContextOfType(position, PsiExpression.class, true);
if (expression == null) return ExpectedTypeInfo.EMPTY_ARRAY;
return ExpectedTypesProvider.getExpectedTypes(expression, true, voidable, false);
}
static Set<LookupElement> completeReference(final PsiElement element,
PsiJavaCodeReferenceElement reference,
final ElementFilter filter,
final boolean acceptClasses,
final boolean acceptMembers,
CompletionParameters parameters, final PrefixMatcher matcher) {
ElementFilter checkClass = new ElementFilter() {
@Override
public boolean isAcceptable(Object element, PsiElement context) {
return filter.isAcceptable(element, context);
}
@Override
public boolean isClassAcceptable(Class hintClass) {
if (ReflectionUtil.isAssignable(PsiClass.class, hintClass)) {
return acceptClasses;
}
if (ReflectionUtil.isAssignable(PsiVariable.class, hintClass) ||
ReflectionUtil.isAssignable(PsiMethod.class, hintClass) ||
ReflectionUtil.isAssignable(CandidateInfo.class, hintClass)) {
return acceptMembers;
}
return false;
}
};
JavaCompletionProcessor.Options options =
JavaCompletionProcessor.Options.DEFAULT_OPTIONS.withFilterStaticAfterInstance(parameters.getInvocationCount() <= 1);
return JavaCompletionUtil.processJavaReference(element, reference, checkClass, options, matcher, parameters);
}
@Override
public void beforeCompletion(@NotNull CompletionInitializationContext context) {
if (context.getCompletionType() != CompletionType.SMART) {
return;
}
if (!context.getEditor().getSelectionModel().hasSelection()) {
final PsiFile file = context.getFile();
PsiElement element = file.findElementAt(context.getStartOffset());
if (element instanceof PsiIdentifier) {
element = element.getParent();
while (element instanceof PsiJavaCodeReferenceElement || element instanceof PsiCall ||
element instanceof PsiThisExpression || element instanceof PsiSuperExpression ||
element instanceof PsiTypeElement ||
element instanceof PsiClassObjectAccessExpression) {
int newEnd = element.getTextRange().getEndOffset();
if (element instanceof PsiMethodCallExpression) {
newEnd = ((PsiMethodCallExpression)element).getMethodExpression().getTextRange().getEndOffset();
}
else if (element instanceof PsiNewExpression) {
final PsiJavaCodeReferenceElement classReference = ((PsiNewExpression)element).getClassReference();
if (classReference != null) {
newEnd = classReference.getTextRange().getEndOffset();
}
}
context.setReplacementOffset(newEnd);
element = element.getParent();
}
}
}
PsiElement lastElement = context.getFile().findElementAt(context.getStartOffset() - 1);
if (lastElement != null && lastElement.getText().equals("(") && lastElement.getParent() instanceof PsiParenthesizedExpression) {
// don't trim dummy identifier or we won't be able to determine the type of the expression after '('
// which is needed to insert correct cast
return;
}
context.setDummyIdentifier(CompletionUtil.DUMMY_IDENTIFIER_TRIMMED);
}
}
| |
package com.fillumina.lcs;
import java.util.AbstractList;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* Item of a linked list of ordered LCS matches.
* {@link LcsItem}s are created by the Myers algorithm by always adding a lower
* match to a chain of matches. Because of that the first element of the chain
* (the head) is always the last added one and so it contains an updated
* LCS length. Because this implementation is so tied with the internals of
* the Myers algorithm it cannot be part of the public API.
*/
class LcsItemImpl extends AbstractList<LcsItem>
implements LcsItem {
private static final long serialVersionUID = 1L;
static final LcsItemImpl NULL = new LcsItemImpl(-1, -1, 0);
private final int x;
private final int y;
private final int steps;
private LcsItemImpl next;
private LcsItemImpl last;
private int lcs;
LcsItemImpl(int x, int y, int steps) {
this.x = x;
this.y = y;
this.steps = steps;
this.lcs = steps;
}
/**
* This is NOT a general chain algorithm, it works because the way
* matches are generated in the Myers LCS algorithms.
* @return always return {@code this}.
*/
LcsItemImpl chain(final LcsItemImpl other) {
LcsItemImpl current = (last != null) ? last : this;
current.next = other;
lcs += other.lcs;
if (other.last != null) {
current = other.last;
} else {
current = other;
}
last = current;
return this;
}
/** The value is valid only for the head item of the sequence. */
@Override
public int size() {
return lcs;
}
/**
* @return the index in the first sequence from which the match starts.
*/
@Override
public int getFirstSequenceIndex() {
return x;
}
/**
* @return the index in the second sequence from which the match starts.
*/
@Override
public int getSecondSequenceIndex() {
return y;
}
/** @return how many subsequent indexes are there. */
@Override
public int getSteps() {
return steps;
}
/**
* Being a linked list accessing elements with this method is very
* inefficient. Use iterators instead.
*/
@Override
public LcsItem get(int index) {
LcsItemImpl current = this;
for (int i=0; i<index; i++) {
current = current.next;
}
return current;
}
abstract class IndexIterator implements Iterator<Integer> {
private final Iterator<LcsItem> i = LcsItemImpl.this.iterator();
protected int step = 0;
protected LcsItemImpl current;
protected boolean hasNext;
public IndexIterator() {
increment();
}
protected final void increment() {
while (current == null ||
current.steps == 0 ||
(step + 1) == current.steps) {
if (i.hasNext()) {
current = (LcsItemImpl) i.next();
step = -1;
} else {
hasNext = false;
return;
}
}
step++;
hasNext = true;
}
@Override
public boolean hasNext() {
return hasNext;
}
}
/**
* @return an iterable of matching indexes on the first sequence
* starting from the present match.
*/
@Override
public Iterable<Integer> lcsIndexesOfTheFirstSequence() {
return new Iterable<Integer>() {
@Override
public Iterator<Integer> iterator() {
return new IndexIterator() {
@Override
public Integer next() {
if (!hasNext) {
throw new NoSuchElementException();
}
final int result = current.x + step;
increment();
return result;
}
};
}
};
}
/**
* @return an iterable of matching indexes on the second sequence
* starting from the present match.
*/
@Override
public Iterable<Integer> lcsIndexesOfTheSecondSequence() {
return new Iterable<Integer>() {
@Override
public Iterator<Integer> iterator() {
return new IndexIterator() {
@Override
public Integer next() {
if (!hasNext) {
throw new NoSuchElementException();
}
final int result = current.y + step;
increment();
return result;
}
};
}
};
}
public Iterator<LcsItem> iterator() {
return new Iterator<LcsItem>() {
private LcsItemImpl current = LcsItemImpl.this;
@Override
public boolean hasNext() {
return current != null && current != NULL;
}
@Override
public LcsItemImpl next() {
LcsItemImpl tmp = current;
current = current.next;
return tmp;
}
};
}
@Override
public String toString() {
final String className = getClass().getSimpleName();
if (this == NULL) {
return className + "{NULL}";
}
return className +
"{xStart=" + x + ", yStart=" + y + ", steps=" + steps + '}';
}
public static LcsItemImpl chain(LcsItemImpl before,
LcsItemImpl middle, LcsItemImpl after) {
if (middle == null) {
if (after == null) {
return before;
}
if (before == null) {
return after;
}
return before.chain(after);
}
if (after == null) {
if (before == null) {
return middle;
}
return before.chain(middle);
}
if (before == null) {
return middle.chain(after);
}
return before.chain(middle.chain(after));
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.nn.conf.constraints;
import org.deeplearning4j.BaseDL4JTest;
import org.deeplearning4j.TestUtils;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.api.layers.LayerConstraint;
import org.deeplearning4j.nn.conf.BackpropType;
import org.deeplearning4j.nn.conf.ComputationGraphConfiguration;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.constraint.MaxNormConstraint;
import org.deeplearning4j.nn.conf.constraint.MinMaxNormConstraint;
import org.deeplearning4j.nn.conf.constraint.NonNegativeConstraint;
import org.deeplearning4j.nn.conf.constraint.UnitNormConstraint;
import org.deeplearning4j.nn.conf.distribution.NormalDistribution;
import org.deeplearning4j.nn.conf.graph.MergeVertex;
import org.deeplearning4j.nn.conf.graph.rnn.LastTimeStepVertex;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.LSTM;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.junit.Test;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.RmsProp;
import org.nd4j.linalg.learning.config.Sgd;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestConstraints extends BaseDL4JTest {
@Test
public void testLayerRecurrentConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5))
.list()
.layer(new LSTM.Builder().nIn(12).nOut(10)
.constrainRecurrent(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input.reshape(3,12,1), labels);
INDArray RW0 = net.getParam("0_RW");
if (lc instanceof MaxNormConstraint) {
assertTrue(RW0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(RW0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(RW0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(RW0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(RW0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(RW0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
@Test
public void testLayerBiasConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5))
.biasInit(10.0)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainBias(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray b0 = net.getParam("0_b");
if (lc instanceof MaxNormConstraint) {
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(b0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(b0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
@Test
public void testLayerWeightsConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5))
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainWeights(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
if (lc instanceof MaxNormConstraint) {
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(w0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
@Test
public void testLayerWeightsAndBiasConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5))
.biasInit(0.2)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainAllParameters(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
INDArray b0 = net.getParam("0_b");
if (lc instanceof MaxNormConstraint) {
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(w0.minNumber().doubleValue() >= 0.0);
assertTrue(b0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
@Test
public void testLayerWeightsAndBiasSeparateConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for (LayerConstraint lc : constraints) {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.updater(new Sgd(0.0))
.dist(new NormalDistribution(0, 5))
.biasInit(0.2)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10)
.constrainWeights(lc).constrainBias(lc).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
INDArray b0 = net.getParam("0_b");
if (lc instanceof MaxNormConstraint) {
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);
} else if (lc instanceof MinMaxNormConstraint) {
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
} else if (lc instanceof NonNegativeConstraint) {
assertTrue(w0.minNumber().doubleValue() >= 0.0);
assertTrue(b0.minNumber().doubleValue() >= 0.0);
} else if (lc instanceof UnitNormConstraint) {
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6);
assertEquals(b0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6);
}
TestUtils.testModelSerialization(net);
}
}
@Test
public void testModelConstraints() throws Exception {
LayerConstraint[] constraints = new LayerConstraint[]{
new MaxNormConstraint(0.5, 1),
new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
new NonNegativeConstraint(),
new UnitNormConstraint(1)
};
for(LayerConstraint lc : constraints){
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
.constrainWeights(lc)
.updater(new Sgd(0.0))
.dist(new NormalDistribution(0,5))
.biasInit(1)
.list()
.layer(new DenseLayer.Builder().nIn(12).nOut(10).build())
.layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
LayerConstraint exp = lc.clone();
assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
assertEquals(exp.toString(), net.getLayer(1).conf().getLayer().getConstraints().get(0).toString());
INDArray input = Nd4j.rand(3, 12);
INDArray labels = Nd4j.rand(3, 8);
net.fit(input, labels);
INDArray w0 = net.getParam("0_W");
INDArray w1 = net.getParam("1_W");
if(lc instanceof MaxNormConstraint){
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5 );
assertTrue(w1.norm2(1).maxNumber().doubleValue() <= 0.5 );
} else if(lc instanceof MinMaxNormConstraint){
assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3 );
assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4 );
assertTrue(w1.norm2(1).minNumber().doubleValue() >= 0.3 );
assertTrue(w1.norm2(1).maxNumber().doubleValue() <= 0.4 );
} else if(lc instanceof NonNegativeConstraint ){
assertTrue(w0.minNumber().doubleValue() >= 0.0 );
} else if(lc instanceof UnitNormConstraint ){
assertEquals(w0.norm2(1).minNumber().doubleValue(), 1.0, 1e-6 );
assertEquals(w0.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6 );
assertEquals(w1.norm2(1).minNumber().doubleValue(), 1.0, 1e-6 );
assertEquals(w1.norm2(1).maxNumber().doubleValue(), 1.0, 1e-6 );
}
TestUtils.testModelSerialization(net);
}
}
@Test
public void testConstraints(){
double learningRate = 0.001;
int nIn = 10;
int lstmLayerSize = 32;
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.weightInit(WeightInit.RELU_UNIFORM)
.updater(new RmsProp(learningRate))
.graphBuilder()
.addInputs("input_lstm", "input_cpc")
.addLayer("first_lstm_layer",
new LSTM.Builder()
.nIn(nIn)
.nOut(lstmLayerSize)
.activation(Activation.RELU)
.constrainWeights(new NonNegativeConstraint())
.build(),
"input_lstm")
.addVertex("lastTimeStep", new LastTimeStepVertex("input_lstm"), "first_lstm_layer")
.addVertex("merge", new MergeVertex(),
"lastTimeStep", "input_cpc")
.addLayer("dense",
new DenseLayer.Builder()
.constrainWeights(new NonNegativeConstraint())
.nIn(lstmLayerSize + 1)
.nOut(lstmLayerSize/2)
.activation(Activation.RELU)
.build(),
"merge")
.addLayer("second_dense",
new DenseLayer.Builder()
.constrainWeights(new NonNegativeConstraint())
.nIn(lstmLayerSize/2)
.nOut(lstmLayerSize/8)
.activation(Activation.RELU)
.build(),
"dense")
.addLayer("output_layer",
new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
.constrainWeights(new NonNegativeConstraint())
.nIn(lstmLayerSize/8)
.nOut(1)
.activation(Activation.IDENTITY)
.build(),
"second_dense")
.setOutputs("output_layer")
.backpropType(BackpropType.Standard)
.build();
ComputationGraph g = new ComputationGraph(conf);
g.init();
for( int i=0; i<100; i++ ){
INDArray in1 = Nd4j.rand(new int[]{1, nIn, 5});
INDArray in2 = Nd4j.rand(new int[]{1, 1});
INDArray label = Nd4j.rand(new int[]{1, 1});
g.fit(new INDArray[]{in1, in2}, new INDArray[]{label});
for(Map.Entry<String,INDArray> e : g.paramTable().entrySet()){
if(!e.getKey().contains("W")){
continue;
}
double min = e.getValue().minNumber().doubleValue();
assertTrue( min >= 0.0);
}
}
}
}
| |
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* DatabaseSaver.java
* Copyright (C) 2004 University of Waikato, Hamilton, New Zealand
*
*/
package weka.core.converters;
import weka.core.Attribute;
import weka.core.Capabilities;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.Utils;
import weka.core.Capabilities.Capability;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.Enumeration;
import java.util.Properties;
import java.util.Vector;
/**
<!-- globalinfo-start -->
* Writes to a database (tested with MySQL, InstantDB, HSQLDB).
* <p/>
<!-- globalinfo-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -url <JDBC URL>
* The JDBC URL to connect to.
* (default: from DatabaseUtils.props file)</pre>
*
* <pre> -user <name>
* The user to connect with to the database.
* (default: none)</pre>
*
* <pre> -password <password>
* The password to connect with to the database.
* (default: none)</pre>
*
* <pre> -T <table name>
* The name of the table.
* (default: the relation name)</pre>
*
* <pre> -P
* Add an ID column as primary key. The name is specified
* in the DatabaseUtils file ('idColumn'). The DatabaseLoader
* won't load this column.</pre>
*
* <pre> -i <input file name>
* Input file in arff format that should be saved in database.</pre>
*
<!-- options-end -->
*
* @author Stefan Mutter (mutter@cs.waikato.ac.nz)
* @version $Revision: 7499 $
*/
public class DatabaseSaver
extends AbstractSaver
implements BatchConverter, IncrementalConverter, DatabaseConverter, OptionHandler {
/** for serialization. */
static final long serialVersionUID = 863971733782624956L;
/** The database connection. */
private DatabaseConnection m_DataBaseConnection;
/** The name of the table in which the instances should be stored. */
private String m_tableName;
/** An input arff file (for command line use). */
private String m_inputFile;
/** The database specific type for a string (read in from the properties file). */
private String m_createText;
/** The database specific type for a double (read in from the properties file). */
private String m_createDouble;
/** The database specific type for an int (read in from the properties file). */
private String m_createInt;
/** The database specific type for a date (read in from the properties file). */
private String m_createDate;
/** For converting the date value into a database string. */
private SimpleDateFormat m_DateFormat;
/** The name of the primary key column that will be automatically generated (if enabled). The name is read from DatabaseUtils.*/
private String m_idColumn;
/** counts the rows and used as a primary key value. */
private int m_count;
/** Flag indicating if a primary key column should be added. */
private boolean m_id;
/** Flag indicating whether the default name of the table is the relaion name or not.*/
private boolean m_tabName;
/** the user name for the database. */
private String m_Username;
/** the password for the database. */
private String m_Password;
/** The property file for the database connection. */
protected static String PROPERTY_FILE = DatabaseConnection.PROPERTY_FILE;
/** Properties associated with the database connection. */
protected static Properties PROPERTIES;
/** reads the property file */
static {
try {
PROPERTIES = Utils.readProperties(PROPERTY_FILE);
} catch (Exception ex) {
System.err.println("Problem reading properties. Fix before continuing.");
System.err.println(ex);
}
}
/**
* Constructor.
*
* @throws Exception throws Exception if property file cannot be read
*/
public DatabaseSaver() throws Exception{
resetOptions();
m_createText = PROPERTIES.getProperty("CREATE_STRING");
m_createDouble = PROPERTIES.getProperty("CREATE_DOUBLE");
m_createInt = PROPERTIES.getProperty("CREATE_INT");
m_createDate = PROPERTIES.getProperty("CREATE_DATE", "DATETIME");
m_DateFormat = new SimpleDateFormat(PROPERTIES.getProperty("DateFormat", "yyyy-MM-dd HH:mm:ss"));
m_idColumn = PROPERTIES.getProperty("idColumn");
}
/**
* Resets the Saver ready to save a new data set.
*/
public void resetOptions(){
super.resetOptions();
setRetrieval(NONE);
m_tableName = "";
m_Username = "";
m_Password = "";
m_count = 1;
m_id = false;
m_tabName = true;
try{
if(m_DataBaseConnection != null && m_DataBaseConnection.isConnected())
m_DataBaseConnection.disconnectFromDatabase();
m_DataBaseConnection = new DatabaseConnection();
}catch(Exception ex) {
printException(ex);
}
}
/**
* Cancels the incremental saving process and tries to drop the table if
* the write mode is CANCEL.
*/
public void cancel(){
if(getWriteMode() == CANCEL){
try{
m_DataBaseConnection.update("DROP TABLE "+m_tableName);
if(m_DataBaseConnection.tableExists(m_tableName))
System.err.println("Table cannot be dropped.");
}catch(Exception ex) {
printException(ex);
}
resetOptions();
}
}
/**
* Returns a string describing this Saver.
*
* @return a description of the Saver suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return "Writes to a database (tested with MySQL, InstantDB, HSQLDB).";
}
/**
* Sets the table's name.
*
* @param tn the name of the table
*/
public void setTableName(String tn){
m_tableName = tn;
}
/**
* Gets the table's name.
*
* @return the table's name
*/
public String getTableName(){
return m_tableName;
}
/**
* Returns the tip text for this property.
*
* @return the tip text for this property
*/
public String tableNameTipText(){
return "Sets the name of the table.";
}
/**
* En/Dis-ables the automatic generation of a primary key.
*
* @param flag flag for automatic key-genereration
*/
public void setAutoKeyGeneration(boolean flag){
m_id = flag;
}
/**
* Gets whether or not a primary key will be generated automatically.
*
* @return true if a primary key column will be generated, false otherwise
*/
public boolean getAutoKeyGeneration(){
return m_id;
}
/**
* Returns the tip text for this property.
*
* @return tip text for this property
*/
public String autoKeyGenerationTipText(){
return "If set to true, a primary key column is generated automatically (containing the row number as INTEGER). The name of the key is read from DatabaseUtils (idColumn)"
+" This primary key can be used for incremental loading (requires an unique key). This primary key will not be loaded as an attribute.";
}
/**
* En/Dis-ables that the relation name is used for the name of the table (default enabled).
*
* @param flag if true the relation name is used as table name
*/
public void setRelationForTableName(boolean flag){
m_tabName = flag;
}
/**
* Gets whether or not the relation name is used as name of the table.
*
* @return true if the relation name is used as the name of the table, false otherwise
*/
public boolean getRelationForTableName(){
return m_tabName;
}
/**
* Returns the tip text fo this property.
*
* @return the tip text for this property
*/
public String relationForTableNameTipText(){
return "If set to true, the relation name will be used as name for the database table. Otherwise the user has to provide a table name.";
}
/**
* Sets the database URL.
*
* @param url the URL
*/
public void setUrl(String url){
m_DataBaseConnection.setDatabaseURL(url);
}
/**
* Gets the database URL.
*
* @return the URL
*/
public String getUrl(){
return m_DataBaseConnection.getDatabaseURL();
}
/**
* Returns the tip text for this property.
*
* @return the tip text for this property
*/
public String urlTipText(){
return "The URL of the database";
}
/**
* Sets the database user.
*
* @param user the user name
*/
public void setUser(String user){
m_Username = user;
m_DataBaseConnection.setUsername(user);
}
/**
* Gets the database user.
*
* @return the user name
*/
public String getUser(){
return m_DataBaseConnection.getUsername();
}
/**
* Returns the tip text for this property.
*
* @return the tip text for this property
*/
public String userTipText(){
return "The user name for the database";
}
/**
* Sets the database password.
*
* @param password the password
*/
public void setPassword(String password){
m_Password = password;
m_DataBaseConnection.setPassword(password);
}
/**
* Returns the database password.
*
* @return the database password
*/
public String getPassword() {
return m_DataBaseConnection.getPassword();
}
/**
* Returns the tip text for this property.
*
* @return the tip text for this property
*/
public String passwordTipText(){
return "The database password";
}
/**
* Sets the database url.
*
* @param url the database url
* @param userName the user name
* @param password the password
*/
public void setDestination(String url, String userName, String password){
try{
m_DataBaseConnection = new DatabaseConnection();
m_DataBaseConnection.setDatabaseURL(url);
m_DataBaseConnection.setUsername(userName);
m_DataBaseConnection.setPassword(password);
} catch(Exception ex) {
printException(ex);
}
}
/**
* Sets the database url.
*
* @param url the database url
*/
public void setDestination(String url){
try{
m_DataBaseConnection = new DatabaseConnection();
m_DataBaseConnection.setDatabaseURL(url);
m_DataBaseConnection.setUsername(m_Username);
m_DataBaseConnection.setPassword(m_Password);
} catch(Exception ex) {
printException(ex);
}
}
/** Sets the database url using the DatabaseUtils file. */
public void setDestination(){
try{
m_DataBaseConnection = new DatabaseConnection();
m_DataBaseConnection.setUsername(m_Username);
m_DataBaseConnection.setPassword(m_Password);
} catch(Exception ex) {
printException(ex);
}
}
/**
* Returns the Capabilities of this saver.
*
* @return the capabilities of this object
* @see Capabilities
*/
public Capabilities getCapabilities() {
Capabilities result = super.getCapabilities();
// attributes
result.enable(Capability.NOMINAL_ATTRIBUTES);
result.enable(Capability.NUMERIC_ATTRIBUTES);
result.enable(Capability.DATE_ATTRIBUTES);
result.enable(Capability.STRING_ATTRIBUTES);
result.enable(Capability.MISSING_VALUES);
// class
result.enable(Capability.NOMINAL_CLASS);
result.enable(Capability.NUMERIC_CLASS);
result.enable(Capability.DATE_CLASS);
result.enable(Capability.STRING_CLASS);
result.enable(Capability.NO_CLASS);
result.enable(Capability.MISSING_CLASS_VALUES);
return result;
}
/**
* Opens a connection to the database.
*
*/
public void connectToDatabase() {
try{
if(!m_DataBaseConnection.isConnected())
m_DataBaseConnection.connectToDatabase();
} catch(Exception ex) {
printException(ex);
}
}
/**
* Writes the structure (header information) to a database by creating a new table.
*
* @throws Exception if something goes wrong
*/
private void writeStructure() throws Exception{
StringBuffer query = new StringBuffer();
Instances structure = getInstances();
query.append("CREATE TABLE ");
if(m_tabName || m_tableName.equals(""))
m_tableName = m_DataBaseConnection.maskKeyword(structure.relationName());
if(m_DataBaseConnection.getUpperCase()){
m_tableName = m_tableName.toUpperCase();
m_createInt = m_createInt.toUpperCase();
m_createDouble = m_createDouble.toUpperCase();
m_createText = m_createText.toUpperCase();
m_createDate = m_createDate.toUpperCase();
}
m_tableName = m_tableName.replaceAll("[^\\w]","_");
m_tableName = m_DataBaseConnection.maskKeyword(m_tableName);
query.append(m_tableName);
if(structure.numAttributes() == 0)
throw new Exception("Instances have no attribute.");
query.append(" ( ");
if(m_id){
if(m_DataBaseConnection.getUpperCase())
m_idColumn = m_idColumn.toUpperCase();
query.append(m_DataBaseConnection.maskKeyword(m_idColumn));
query.append(" ");
query.append(m_createInt);
query.append(" PRIMARY KEY,");
}
for(int i = 0;i < structure.numAttributes(); i++){
Attribute att = structure.attribute(i);
String attName = att.name();
attName = attName.replaceAll("[^\\w]","_");
attName = m_DataBaseConnection.maskKeyword(attName);
if(m_DataBaseConnection.getUpperCase())
query.append(attName.toUpperCase());
else
query.append(attName);
if(att.isDate())
query.append(" " + m_createDate);
else{
if(att.isNumeric())
query.append(" "+m_createDouble);
else
query.append(" "+m_createText);
}
if(i != structure.numAttributes()-1)
query.append(", ");
}
query.append(" )");
//System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +query.toString());
m_DataBaseConnection.update(query.toString());
m_DataBaseConnection.close();
if(!m_DataBaseConnection.tableExists(m_tableName)){
throw new IOException("Table cannot be built.");
}
}
/**
* inserts the given instance into the table.
*
* @param inst the instance to insert
* @throws Exception if something goes wrong
*/
private void writeInstance(Instance inst) throws Exception{
StringBuffer insert = new StringBuffer();
insert.append("INSERT INTO ");
insert.append(m_tableName);
insert.append(" VALUES ( ");
if(m_id){
insert.append(m_count);
insert.append(", ");
m_count++;
}
for(int j = 0; j < inst.numAttributes(); j++){
if(inst.isMissing(j))
insert.append("NULL");
else{
if((inst.attribute(j)).isDate())
insert.append("'" + m_DateFormat.format((long) inst.value(j)) + "'");
else if((inst.attribute(j)).isNumeric())
insert.append(inst.value(j));
else{
String stringInsert = "'"+inst.stringValue(j)+"'";
if (stringInsert.length() > 2)
stringInsert = stringInsert.replaceAll("''","'");
insert.append(stringInsert);
}
}
if(j != inst.numAttributes()-1)
insert.append(", ");
}
insert.append(" )");
//System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +insert.toString());
if (m_DataBaseConnection.update(insert.toString()) < 1) {
throw new IOException("Tuple cannot be inserted.");
}
else {
m_DataBaseConnection.close();
}
}
/**
* Saves an instances incrementally. Structure has to be set by using the
* setStructure() method or setInstances() method. When a structure is set, a table is created.
*
* @param inst the instance to save
* @throws IOException throws IOEXception.
*/
public void writeIncremental(Instance inst) throws IOException{
int writeMode = getWriteMode();
Instances structure = getInstances();
if(m_DataBaseConnection == null)
throw new IOException("No database has been set up.");
if(getRetrieval() == BATCH)
throw new IOException("Batch and incremental saving cannot be mixed.");
setRetrieval(INCREMENTAL);
try{
if(!m_DataBaseConnection.isConnected())
connectToDatabase();
if(writeMode == WAIT){
if(structure == null){
setWriteMode(CANCEL);
if(inst != null)
throw new Exception("Structure(Header Information) has to be set in advance");
}
else
setWriteMode(STRUCTURE_READY);
writeMode = getWriteMode();
}
if(writeMode == CANCEL){
cancel();
}
if(writeMode == STRUCTURE_READY){
setWriteMode(WRITE);
writeStructure();
writeMode = getWriteMode();
}
if(writeMode == WRITE){
if(structure == null)
throw new IOException("No instances information available.");
if(inst != null){
//write instance
writeInstance(inst);
}
else{
//close
m_DataBaseConnection.disconnectFromDatabase();
resetStructure();
m_count = 1;
}
}
}catch(Exception ex) {
printException(ex);
}
}
/**
* Writes a Batch of instances.
*
* @throws IOException throws IOException
*/
public void writeBatch() throws IOException {
Instances instances = getInstances();
if(instances == null)
throw new IOException("No instances to save");
if(getRetrieval() == INCREMENTAL)
throw new IOException("Batch and incremental saving cannot be mixed.");
if(m_DataBaseConnection == null)
throw new IOException("No database has been set up.");
setRetrieval(BATCH);
try{
if(!m_DataBaseConnection.isConnected())
connectToDatabase();
setWriteMode(WRITE);
writeStructure();
for(int i = 0; i < instances.numInstances(); i++){
writeInstance(instances.instance(i));
}
m_DataBaseConnection.disconnectFromDatabase();
setWriteMode(WAIT);
resetStructure();
m_count = 1;
} catch(Exception ex) {
printException(ex);
}
}
/**
* Prints an exception.
*
* @param ex the exception to print
*/
private void printException(Exception ex){
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"\n--- Exception caught ---\n");
while (ex != null) {
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"Message: "
+ ex.getMessage ());
if(ex instanceof SQLException){
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"SQLState: "
+ ((SQLException)ex).getSQLState ());
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"ErrorCode: "
+ ((SQLException)ex).getErrorCode ());
ex = ((SQLException)ex).getNextException();
}
else
ex = null;
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"");
}
}
/**
* Gets the setting.
*
* @return the current setting
*/
public String[] getOptions() {
Vector options = new Vector();
if ( (getUrl() != null) && (getUrl().length() != 0) ) {
options.add("-url");
options.add(getUrl());
}
if ( (getUser() != null) && (getUser().length() != 0) ) {
options.add("-user");
options.add(getUser());
}
if ( (getPassword() != null) && (getPassword().length() != 0) ) {
options.add("-password");
options.add(getPassword());
}
if ( (m_tableName != null) && (m_tableName.length() != 0) ) {
options.add("-T");
options.add(m_tableName);
}
if (m_id)
options.add("-P");
if ( (m_inputFile != null) && (m_inputFile.length() != 0) ) {
options.add("-i");
options.add(m_inputFile);
}
return (String[]) options.toArray(new String[options.size()]);
}
/**
* Lists the available options.
*
* @return an enumeration of the available options
*/
public java.util.Enumeration listOptions() {
FastVector newVector = new FastVector();
newVector.addElement(new Option(
"\tThe JDBC URL to connect to.\n"
+ "\t(default: from DatabaseUtils.props file)",
"url", 1, "-url <JDBC URL>"));
newVector.addElement(new Option(
"\tThe user to connect with to the database.\n"
+ "\t(default: none)",
"user", 1, "-user <name>"));
newVector.addElement(new Option(
"\tThe password to connect with to the database.\n"
+ "\t(default: none)",
"password", 1, "-password <password>"));
newVector.addElement(new Option(
"\tThe name of the table.\n"
+ "\t(default: the relation name)",
"T", 1, "-T <table name>"));
newVector.addElement(new Option(
"\tAdd an ID column as primary key. The name is specified\n"
+ "\tin the DatabaseUtils file ('idColumn'). The DatabaseLoader\n"
+ "\twon't load this column.",
"P", 0, "-P"));
newVector.addElement(new Option(
"\tInput file in arff format that should be saved in database.",
"i", 1, "-i <input file name>"));
return newVector.elements();
}
/**
* Sets the options. <p/>
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -url <JDBC URL>
* The JDBC URL to connect to.
* (default: from DatabaseUtils.props file)</pre>
*
* <pre> -user <name>
* The user to connect with to the database.
* (default: none)</pre>
*
* <pre> -password <password>
* The password to connect with to the database.
* (default: none)</pre>
*
* <pre> -T <table name>
* The name of the table.
* (default: the relation name)</pre>
*
* <pre> -P
* Add an ID column as primary key. The name is specified
* in the DatabaseUtils file ('idColumn'). The DatabaseLoader
* won't load this column.</pre>
*
* <pre> -i <input file name>
* Input file in arff format that should be saved in database.</pre>
*
<!-- options-end -->
*
* @param options the options
* @throws Exception if options cannot be set
*/
public void setOptions(String[] options) throws Exception {
String tableString, inputString, tmpStr;
resetOptions();
tmpStr = Utils.getOption("url", options);
if (tmpStr.length() != 0)
setUrl(tmpStr);
tmpStr = Utils.getOption("user", options);
if (tmpStr.length() != 0)
setUser(tmpStr);
tmpStr = Utils.getOption("password", options);
if (tmpStr.length() != 0)
setPassword(tmpStr);
tableString = Utils.getOption('T', options);
inputString = Utils.getOption('i', options);
if(tableString.length() != 0){
m_tableName = tableString;
m_tabName = false;
}
m_id = Utils.getFlag('P', options);
if(inputString.length() != 0){
try{
m_inputFile = inputString;
ArffLoader al = new ArffLoader();
File inputFile = new File(inputString);
al.setSource(inputFile);
setInstances(al.getDataSet());
//System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +getInstances());
if(tableString.length() == 0)
m_tableName = getInstances().relationName();
}catch(Exception ex) {
printException(ex);
ex.printStackTrace();
}
}
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 7499 $");
}
/**
* Main method.
*
* @param options should contain the options of a Saver.
*/
public static void main(String [] options) {
StringBuffer text = new StringBuffer();
text.append("\n\nDatabaseSaver options:\n");
try {
DatabaseSaver asv = new DatabaseSaver();
try {
Enumeration enumi = asv.listOptions();
while (enumi.hasMoreElements()) {
Option option = (Option)enumi.nextElement();
text.append(option.synopsis()+'\n');
text.append(option.description()+'\n');
}
asv.setOptions(options);
asv.setDestination();
} catch (Exception ex) {
ex.printStackTrace();
}
//incremental
/*asv.setRetrieval(INCREMENTAL);
Instances instances = asv.getInstances();
asv.setStructure(instances);
for(int i = 0; i < instances.numInstances(); i++){ //last instance is null and finishes incremental saving
asv.writeIncremental(instances.instance(i));
}
asv.writeIncremental(null);*/
//batch
asv.writeBatch();
} catch (Exception ex) {
ex.printStackTrace();
System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +text);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.managers.communication.GridMessageListener;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfo;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.distributed.dht.CacheGetFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtAffinityAssignmentRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLockRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtLockResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxFinishRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxFinishResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxPrepareRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTxPrepareResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridPartitionedSingleGetFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicAbstractUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicSingleUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicUpdateResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicAbstractUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicFullUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicSingleUpdateFilterRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicSingleUpdateInvokeRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicSingleUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicUpdateResponse;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtForceKeysRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtForceKeysResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearGetRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearGetResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearSingleGetRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearSingleGetResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxFinishRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxFinishResponse;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareResponse;
import org.apache.ignite.internal.processors.cache.query.GridCacheQueryRequest;
import org.apache.ignite.internal.processors.cache.query.GridCacheQueryResponse;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxState;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxStateAware;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.F0;
import org.apache.ignite.internal.util.GridLeanSet;
import org.apache.ignite.internal.util.StripedCompositeReadWriteLock;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteUuid;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static org.apache.ignite.internal.GridTopic.TOPIC_CACHE;
/**
* Cache communication manager.
*/
public class GridCacheIoManager extends GridCacheSharedManagerAdapter {
/** Communication topic prefix for distributed queries. */
private static final String QUERY_TOPIC_PREFIX = "QUERY";
/** Message ID generator. */
private static final AtomicLong idGen = new AtomicLong();
/** Delay in milliseconds between retries. */
private long retryDelay;
/** Number of retries using to send messages. */
private int retryCnt;
/** Indexed class handlers. */
private volatile Map<Integer, IgniteBiInClosure[]> idxClsHandlers = new HashMap<>();
/** Handler registry. */
private ConcurrentMap<ListenerKey, IgniteBiInClosure<UUID, GridCacheMessage>>
clsHandlers = new ConcurrentHashMap8<>();
/** Ordered handler registry. */
private ConcurrentMap<Object, IgniteBiInClosure<UUID, ? extends GridCacheMessage>> orderedHandlers =
new ConcurrentHashMap8<>();
/** Stopping flag. */
private boolean stopping;
/** Mutex. */
private final StripedCompositeReadWriteLock rw =
new StripedCompositeReadWriteLock(Runtime.getRuntime().availableProcessors());
/** Deployment enabled. */
private boolean depEnabled;
/** Message listener. */
private GridMessageListener lsnr = new GridMessageListener() {
@Override public void onMessage(final UUID nodeId, final Object msg) {
if (log.isDebugEnabled())
log.debug("Received unordered cache communication message [nodeId=" + nodeId +
", locId=" + cctx.localNodeId() + ", msg=" + msg + ']');
final GridCacheMessage cacheMsg = (GridCacheMessage)msg;
IgniteInternalFuture<?> fut = null;
if (cacheMsg.partitionExchangeMessage()) {
if (cacheMsg instanceof GridDhtAffinityAssignmentRequest) {
assert cacheMsg.topologyVersion() != null : cacheMsg;
AffinityTopologyVersion startTopVer = new AffinityTopologyVersion(cctx.localNode().order());
DynamicCacheDescriptor cacheDesc = cctx.cache().cacheDescriptor(cacheMsg.cacheId());
if (cacheDesc != null) {
if (cacheDesc.startTopologyVersion() != null)
startTopVer = cacheDesc.startTopologyVersion();
else if (cacheDesc.receivedFromStartVersion() != null)
startTopVer = cacheDesc.receivedFromStartVersion();
}
// Need to wait for exchange to avoid race between cache start and affinity request.
fut = cctx.exchange().affinityReadyFuture(startTopVer);
if (fut != null && !fut.isDone()) {
if (log.isDebugEnabled()) {
log.debug("Wait for exchange before processing message [msg=" + msg +
", node=" + nodeId +
", waitVer=" + startTopVer +
", cacheDesc=" + cacheDesc + ']');
}
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> fut) {
cctx.kernalContext().closure().runLocalSafe(new Runnable() {
@Override public void run() {
handleMessage(nodeId, cacheMsg);
}
});
}
});
return;
}
}
long locTopVer = cctx.discovery().topologyVersion();
long rmtTopVer = cacheMsg.topologyVersion().topologyVersion();
if (locTopVer < rmtTopVer) {
if (log.isDebugEnabled())
log.debug("Received message has higher topology version [msg=" + msg +
", locTopVer=" + locTopVer + ", rmtTopVer=" + rmtTopVer + ']');
fut = cctx.discovery().topologyFuture(rmtTopVer);
}
}
else {
AffinityTopologyVersion locAffVer = cctx.exchange().readyAffinityVersion();
AffinityTopologyVersion rmtAffVer = cacheMsg.topologyVersion();
if (locAffVer.compareTo(rmtAffVer) < 0) {
IgniteLogger log = cacheMsg.messageLogger(cctx);
if (log.isDebugEnabled()) {
StringBuilder msg0 = new StringBuilder("Received message has higher affinity topology version [");
appendMessageInfo(cacheMsg, nodeId, msg0);
msg0.append(", locTopVer=").append(locAffVer).
append(", rmtTopVer=").append(rmtAffVer).
append(']');
log.debug(msg0.toString());
}
fut = cctx.exchange().affinityReadyFuture(rmtAffVer);
}
}
if (fut != null && !fut.isDone()) {
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> t) {
cctx.kernalContext().closure().runLocalSafe(new Runnable() {
@Override public void run() {
IgniteLogger log = cacheMsg.messageLogger(cctx);
if (log.isDebugEnabled()) {
StringBuilder msg0 = new StringBuilder("Process cache message after wait for " +
"affinity topology version [");
appendMessageInfo(cacheMsg, nodeId, msg0).append(']');
log.debug(msg0.toString());
}
handleMessage(nodeId, cacheMsg);
}
});
}
});
return;
}
handleMessage(nodeId, cacheMsg);
}
};
/**
* @param nodeId Sender node ID.
* @param cacheMsg Message.
*/
@SuppressWarnings("unchecked")
private void handleMessage(UUID nodeId, GridCacheMessage cacheMsg) {
int msgIdx = cacheMsg.lookupIndex();
IgniteBiInClosure<UUID, GridCacheMessage> c = null;
if (msgIdx >= 0) {
Map<Integer, IgniteBiInClosure[]> idxClsHandlers0 = idxClsHandlers;
IgniteBiInClosure[] cacheClsHandlers = idxClsHandlers0.get(cacheMsg.cacheId());
if (cacheClsHandlers != null)
c = cacheClsHandlers[msgIdx];
}
if (c == null)
c = clsHandlers.get(new ListenerKey(cacheMsg.cacheId(), cacheMsg.getClass()));
if (c == null) {
IgniteLogger log = cacheMsg.messageLogger(cctx);
StringBuilder msg0 = new StringBuilder("Received message without registered handler (will ignore) [");
appendMessageInfo(cacheMsg, nodeId, msg0);
msg0.append(", locTopVer=").append(cctx.exchange().readyAffinityVersion()).
append(", msgTopVer=").append(cacheMsg.topologyVersion()).
append(", cacheDesc=").append(cctx.cache().cacheDescriptor(cacheMsg.cacheId())).
append(']');
msg0.append(U.nl()).append("Registered listeners:");
Map<Integer, IgniteBiInClosure[]> idxClsHandlers0 = idxClsHandlers;
for (Map.Entry<Integer, IgniteBiInClosure[]> e : idxClsHandlers0.entrySet())
msg0.append(U.nl()).append(e.getKey()).append("=").append(Arrays.toString(e.getValue()));
if (cctx.kernalContext().isStopping()) {
if (log.isDebugEnabled())
log.debug(msg0.toString());
}
else
U.error(log, msg0.toString());
return;
}
onMessage0(nodeId, cacheMsg, c);
}
/** {@inheritDoc} */
@Override public void start0() throws IgniteCheckedException {
retryDelay = cctx.gridConfig().getNetworkSendRetryDelay();
retryCnt = cctx.gridConfig().getNetworkSendRetryCount();
depEnabled = cctx.gridDeploy().enabled();
cctx.gridIO().addMessageListener(TOPIC_CACHE, lsnr);
}
/** {@inheritDoc} */
@SuppressWarnings("BusyWait")
@Override protected void onKernalStop0(boolean cancel) {
cctx.gridIO().removeMessageListener(TOPIC_CACHE);
for (Object ordTopic : orderedHandlers.keySet())
cctx.gridIO().removeMessageListener(ordTopic);
boolean interrupted = false;
// Busy wait is intentional.
while (true) {
try {
if (rw.writeLock().tryLock(200, TimeUnit.MILLISECONDS))
break;
else
Thread.sleep(200);
}
catch (InterruptedException ignore) {
// Preserve interrupt status & ignore.
// Note that interrupted flag is cleared.
interrupted = true;
}
}
if (interrupted)
Thread.currentThread().interrupt();
try {
stopping = true;
}
finally {
rw.writeLock().unlock();
}
}
/**
* @param nodeId Node ID.
* @param cacheMsg Cache message.
* @param c Handler closure.
*/
@SuppressWarnings({"unchecked", "ConstantConditions", "ThrowableResultOfMethodCallIgnored"})
private void onMessage0(final UUID nodeId, final GridCacheMessage cacheMsg,
final IgniteBiInClosure<UUID, GridCacheMessage> c) {
Lock lock = rw.readLock();
lock.lock();
try {
if (stopping) {
if (log.isDebugEnabled())
log.debug("Received cache communication message while stopping (will ignore) [nodeId=" +
nodeId + ", msg=" + cacheMsg + ']');
return;
}
if (depEnabled)
cctx.deploy().ignoreOwnership(true);
unmarshall(nodeId, cacheMsg);
if (cacheMsg.classError() != null)
processFailedMessage(nodeId, cacheMsg, c);
else
processMessage(nodeId, cacheMsg, c);
}
catch (Throwable e) {
U.error(log, "Failed to process message [senderId=" + nodeId + ", messageType=" + cacheMsg.getClass() + ']', e);
if (e instanceof Error)
throw (Error)e;
}
finally {
if (depEnabled)
cctx.deploy().ignoreOwnership(false);
lock.unlock();
}
}
/**
* Sends response on failed message.
*
* @param nodeId node id.
* @param res response.
* @param cctx shared context.
* @param plc grid io policy.
*/
private void sendResponseOnFailedMessage(UUID nodeId, GridCacheMessage res, GridCacheSharedContext cctx,
byte plc) {
try {
cctx.io().send(nodeId, res, plc);
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to send response to node (is node still alive?) [nodeId=" + nodeId +
",res=" + res + ']', e);
}
}
/**
* @param cacheMsg Cache message.
* @param nodeId Node ID.
* @param builder Message builder.
* @return Message builder.
*/
private StringBuilder appendMessageInfo(GridCacheMessage cacheMsg, UUID nodeId, StringBuilder builder) {
if (txId(cacheMsg) != null) {
builder.append("txId=").append(txId(cacheMsg)).
append(", dhtTxId=").append(dhtTxId(cacheMsg)).
append(", msg=").append(cacheMsg);
}
else if (atomicFututeId(cacheMsg) != null) {
builder.append("futId=").append(atomicFututeId(cacheMsg)).
append(", writeVer=").append(atomicWriteVersion(cacheMsg)).
append(", msg=").append(cacheMsg);
}
else
builder.append("msg=").append(cacheMsg);
builder.append(", node=").append(nodeId);
return builder;
}
/**
* @param cacheMsg Cache message.
* @return Transaction ID if applicable for message.
*/
@Nullable private GridCacheVersion txId(GridCacheMessage cacheMsg) {
if (cacheMsg instanceof GridDhtTxPrepareRequest)
return ((GridDhtTxPrepareRequest)cacheMsg).nearXidVersion();
else if (cacheMsg instanceof GridNearTxPrepareRequest)
return ((GridNearTxPrepareRequest)cacheMsg).version();
else if (cacheMsg instanceof GridNearTxPrepareResponse)
return ((GridNearTxPrepareResponse)cacheMsg).version();
else if (cacheMsg instanceof GridNearTxFinishRequest)
return ((GridNearTxFinishRequest)cacheMsg).version();
else if (cacheMsg instanceof GridNearTxFinishResponse)
return ((GridNearTxFinishResponse)cacheMsg).xid();
return null;
}
/**
* @param cacheMsg Cache message.
* @return Transaction ID if applicable for message.
*/
@Nullable private GridCacheVersion dhtTxId(GridCacheMessage cacheMsg) {
if (cacheMsg instanceof GridDhtTxPrepareRequest)
return ((GridDhtTxPrepareRequest)cacheMsg).version();
else if (cacheMsg instanceof GridDhtTxPrepareResponse)
return ((GridDhtTxPrepareResponse)cacheMsg).version();
else if (cacheMsg instanceof GridDhtTxFinishRequest)
return ((GridDhtTxFinishRequest)cacheMsg).version();
else if (cacheMsg instanceof GridDhtTxFinishResponse)
return ((GridDhtTxFinishResponse)cacheMsg).xid();
return null;
}
/**
* @param cacheMsg Cache message.
* @return Atomic future ID if applicable for message.
*/
@Nullable private GridCacheVersion atomicFututeId(GridCacheMessage cacheMsg) {
if (cacheMsg instanceof GridNearAtomicAbstractUpdateRequest)
return ((GridNearAtomicAbstractUpdateRequest)cacheMsg).futureVersion();
else if (cacheMsg instanceof GridNearAtomicUpdateResponse)
return ((GridNearAtomicUpdateResponse) cacheMsg).futureVersion();
else if (cacheMsg instanceof GridDhtAtomicAbstractUpdateRequest)
return ((GridDhtAtomicAbstractUpdateRequest)cacheMsg).futureVersion();
else if (cacheMsg instanceof GridDhtAtomicUpdateResponse)
return ((GridDhtAtomicUpdateResponse) cacheMsg).futureVersion();
return null;
}
/**
* @param cacheMsg Cache message.
* @return Atomic future ID if applicable for message.
*/
@Nullable private GridCacheVersion atomicWriteVersion(GridCacheMessage cacheMsg) {
if (cacheMsg instanceof GridNearAtomicAbstractUpdateRequest)
return ((GridNearAtomicAbstractUpdateRequest)cacheMsg).updateVersion();
else if (cacheMsg instanceof GridDhtAtomicAbstractUpdateRequest)
return ((GridDhtAtomicAbstractUpdateRequest)cacheMsg).writeVersion();
return null;
}
/**
* Processes failed messages.
*
* @param nodeId Node ID.
* @param msg Message.
* @throws IgniteCheckedException If failed.
*/
private void processFailedMessage(UUID nodeId, GridCacheMessage msg, IgniteBiInClosure<UUID, GridCacheMessage> c)
throws IgniteCheckedException {
GridCacheContext ctx = cctx.cacheContext(msg.cacheId());
switch (msg.directType()) {
case 14: {
GridCacheEvictionRequest req = (GridCacheEvictionRequest)msg;
GridCacheEvictionResponse res = new GridCacheEvictionResponse(
ctx.cacheId(),
req.futureId(),
req.classError() != null
);
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 30: {
GridDhtLockRequest req = (GridDhtLockRequest)msg;
GridDhtLockResponse res = new GridDhtLockResponse(
ctx.cacheId(),
req.version(),
req.futureId(),
req.miniId(),
0,
ctx.deploymentEnabled());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 34: {
GridDhtTxPrepareRequest req = (GridDhtTxPrepareRequest)msg;
GridDhtTxPrepareResponse res = new GridDhtTxPrepareResponse(
req.version(),
req.futureId(),
req.miniId(),
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, req.policy());
}
break;
case 38: {
GridDhtAtomicUpdateRequest req = (GridDhtAtomicUpdateRequest)msg;
GridDhtAtomicUpdateResponse res = new GridDhtAtomicUpdateResponse(
ctx.cacheId(),
req.futureVersion(),
ctx.deploymentEnabled());
res.onError(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 40: {
GridNearAtomicFullUpdateRequest req = (GridNearAtomicFullUpdateRequest)msg;
GridNearAtomicUpdateResponse res = new GridNearAtomicUpdateResponse(
ctx.cacheId(),
nodeId,
req.futureVersion(),
ctx.deploymentEnabled());
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 42: {
GridDhtForceKeysRequest req = (GridDhtForceKeysRequest)msg;
GridDhtForceKeysResponse res = new GridDhtForceKeysResponse(
ctx.cacheId(),
req.futureId(),
req.miniId(),
ctx.deploymentEnabled()
);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 45: {
processMessage(nodeId, msg, c);// Will be handled by Rebalance Demander.
}
break;
case 49: {
GridNearGetRequest req = (GridNearGetRequest)msg;
GridNearGetResponse res = new GridNearGetResponse(
ctx.cacheId(),
req.futureId(),
req.miniId(),
req.version(),
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 50: {
GridNearGetResponse res = (GridNearGetResponse)msg;
CacheGetFuture fut = (CacheGetFuture)ctx.mvcc().future(res.futureId());
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
res.error(res.classError());
fut.onResult(nodeId, res);
}
break;
case 51: {
GridNearLockRequest req = (GridNearLockRequest)msg;
GridNearLockResponse res = new GridNearLockResponse(
ctx.cacheId(),
req.version(),
req.futureId(),
req.miniId(),
false,
0,
req.classError(),
null,
ctx.deploymentEnabled());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 55: {
GridNearTxPrepareRequest req = (GridNearTxPrepareRequest)msg;
GridNearTxPrepareResponse res = new GridNearTxPrepareResponse(
req.version(),
req.futureId(),
req.miniId(),
req.version(),
req.version(),
null,
null,
null,
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, req.policy());
}
break;
case 58: {
GridCacheQueryRequest req = (GridCacheQueryRequest)msg;
GridCacheQueryResponse res = new GridCacheQueryResponse(
req.cacheId(),
req.id(),
req.classError(),
cctx.deploymentEnabled());
cctx.io().sendOrderedMessage(
ctx.node(nodeId),
TOPIC_CACHE.topic(QUERY_TOPIC_PREFIX, nodeId, req.id()),
res,
ctx.ioPolicy(),
Long.MAX_VALUE);
}
break;
case 114: {
processMessage(nodeId, msg, c);// Will be handled by Rebalance Demander.
}
break;
case 116: {
GridNearSingleGetRequest req = (GridNearSingleGetRequest)msg;
GridNearSingleGetResponse res = new GridNearSingleGetResponse(
ctx.cacheId(),
req.futureId(),
req.topologyVersion(),
null,
false,
req.deployInfo() != null);
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 117: {
GridNearSingleGetResponse res = (GridNearSingleGetResponse)msg;
GridPartitionedSingleGetFuture fut = (GridPartitionedSingleGetFuture)ctx.mvcc()
.future(new IgniteUuid(IgniteUuid.VM_ID, res.futureId()));
if (fut == null) {
if (log.isDebugEnabled())
log.debug("Failed to find future for get response [sender=" + nodeId + ", res=" + res + ']');
return;
}
res.error(res.classError());
fut.onResult(nodeId, res);
}
break;
case 125: {
GridNearAtomicSingleUpdateRequest req = (GridNearAtomicSingleUpdateRequest)msg;
GridNearAtomicUpdateResponse res = new GridNearAtomicUpdateResponse(
ctx.cacheId(),
nodeId,
req.futureVersion(),
ctx.deploymentEnabled());
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 126: {
GridNearAtomicSingleUpdateInvokeRequest req = (GridNearAtomicSingleUpdateInvokeRequest)msg;
GridNearAtomicUpdateResponse res = new GridNearAtomicUpdateResponse(
ctx.cacheId(),
nodeId,
req.futureVersion(),
ctx.deploymentEnabled());
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case 127: {
GridNearAtomicSingleUpdateFilterRequest req = (GridNearAtomicSingleUpdateFilterRequest)msg;
GridNearAtomicUpdateResponse res = new GridNearAtomicUpdateResponse(
ctx.cacheId(),
nodeId,
req.futureVersion(),
ctx.deploymentEnabled());
res.error(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
case -36: {
GridDhtAtomicSingleUpdateRequest req = (GridDhtAtomicSingleUpdateRequest)msg;
GridDhtAtomicUpdateResponse res = new GridDhtAtomicUpdateResponse(
ctx.cacheId(),
req.futureVersion(),
ctx.deploymentEnabled());
res.onError(req.classError());
sendResponseOnFailedMessage(nodeId, res, cctx, ctx.ioPolicy());
}
break;
default:
throw new IgniteCheckedException("Failed to send response to node. Unsupported direct type [message="
+ msg + "]", msg.classError());
}
}
/**
* @param nodeId Node ID.
* @param msg Message.
* @param c Closure.
*/
private void processMessage(UUID nodeId, GridCacheMessage msg, IgniteBiInClosure<UUID, GridCacheMessage> c) {
try {
c.apply(nodeId, msg);
if (log.isDebugEnabled())
log.debug("Finished processing cache communication message [nodeId=" + nodeId + ", msg=" + msg + ']');
}
catch (Throwable e) {
U.error(log, "Failed processing message [senderId=" + nodeId + ", msg=" + msg + ']', e);
if (e instanceof Error)
throw e;
}
finally {
// Reset thread local context.
cctx.tm().resetContext();
cctx.mvcc().contextReset();
// Unwind eviction notifications.
if (msg instanceof IgniteTxStateAware) {
IgniteTxState txState = ((IgniteTxStateAware)msg).txState();
if (txState != null)
txState.unwindEvicts(cctx);
}
else {
GridCacheContext ctx = cctx.cacheContext(msg.cacheId());
if (ctx != null)
CU.unwindEvicts(ctx);
}
}
}
/**
* Pre-processes message prior to send.
*
* @param msg Message to send.
* @param destNodeId Destination node ID.
* @return {@code True} if should send message.
* @throws IgniteCheckedException If failed.
*/
private boolean onSend(GridCacheMessage msg, @Nullable UUID destNodeId) throws IgniteCheckedException {
if (msg.error() != null && cctx.kernalContext().isStopping())
return false;
if (msg.messageId() < 0)
// Generate and set message ID.
msg.messageId(idGen.incrementAndGet());
if (destNodeId == null || !cctx.localNodeId().equals(destNodeId)) {
msg.prepareMarshal(cctx);
if (msg instanceof GridCacheDeployable && msg.addDeploymentInfo())
cctx.deploy().prepare((GridCacheDeployable)msg);
}
return true;
}
/**
* Sends communication message.
*
* @param node Node to send the message to.
* @param msg Message to send.
* @param plc IO policy.
* @throws IgniteCheckedException If sending failed.
* @throws ClusterTopologyCheckedException If receiver left.
*/
@SuppressWarnings("unchecked")
public void send(ClusterNode node, GridCacheMessage msg, byte plc) throws IgniteCheckedException {
assert !node.isLocal();
if (!onSend(msg, node.id()))
return;
if (log.isDebugEnabled())
log.debug("Sending cache message [msg=" + msg + ", node=" + U.toShortString(node) + ']');
int cnt = 0;
while (cnt <= retryCnt) {
try {
cnt++;
cctx.gridIO().send(node, TOPIC_CACHE, msg, plc);
return;
}
catch (IgniteCheckedException e) {
if (!cctx.discovery().alive(node.id()) || !cctx.discovery().pingNode(node.id()))
throw new ClusterTopologyCheckedException("Node left grid while sending message to: " + node.id(), e);
if (cnt == retryCnt || cctx.kernalContext().isStopping())
throw e;
else if (log.isDebugEnabled())
log.debug("Failed to send message to node (will retry): " + node.id());
}
U.sleep(retryDelay);
}
if (log.isDebugEnabled())
log.debug("Sent cache message [msg=" + msg + ", node=" + U.toShortString(node) + ']');
}
/**
* Sends message and automatically accounts for lefts nodes.
*
* @param nodes Nodes to send to.
* @param msg Message to send.
* @param plc IO policy.
* @param fallback Callback for failed nodes.
* @throws IgniteCheckedException If send failed.
*/
@SuppressWarnings({"BusyWait", "unchecked"})
public void safeSend(Collection<? extends ClusterNode> nodes, GridCacheMessage msg, byte plc,
@Nullable IgnitePredicate<ClusterNode> fallback) throws IgniteCheckedException {
assert nodes != null;
assert msg != null;
if (nodes.isEmpty()) {
if (log.isDebugEnabled())
log.debug("Message will not be sent as collection of nodes is empty: " + msg);
return;
}
if (!onSend(msg, null))
return;
if (log.isDebugEnabled())
log.debug("Sending cache message [msg=" + msg + ", nodes=" + U.toShortString(nodes) + ']');
final Collection<UUID> leftIds = new GridLeanSet<>();
int cnt = 0;
while (cnt < retryCnt) {
try {
Collection<? extends ClusterNode> nodesView = F.view(nodes, new P1<ClusterNode>() {
@Override public boolean apply(ClusterNode e) {
return !leftIds.contains(e.id());
}
});
cctx.gridIO().send(nodesView, TOPIC_CACHE, msg, plc);
boolean added = false;
// Even if there is no exception, we still check here, as node could have
// ignored the message during stopping.
for (ClusterNode n : nodes) {
if (!leftIds.contains(n.id()) && !cctx.discovery().alive(n.id())) {
leftIds.add(n.id());
if (fallback != null && !fallback.apply(n))
// If fallback signalled to stop.
return;
added = true;
}
}
if (added) {
if (!F.exist(F.nodeIds(nodes), F0.not(F.contains(leftIds)))) {
if (log.isDebugEnabled())
log.debug("Message will not be sent because all nodes left topology [msg=" + msg +
", nodes=" + U.toShortString(nodes) + ']');
return;
}
}
break;
}
catch (IgniteCheckedException e) {
boolean added = false;
for (ClusterNode n : nodes) {
if (!leftIds.contains(n.id()) &&
(!cctx.discovery().alive(n.id()) || !cctx.discovery().pingNode(n.id()))) {
leftIds.add(n.id());
if (fallback != null && !fallback.apply(n))
// If fallback signalled to stop.
return;
added = true;
}
}
if (!added) {
cnt++;
if (cnt == retryCnt)
throw e;
U.sleep(retryDelay);
}
if (!F.exist(F.nodeIds(nodes), F0.not(F.contains(leftIds)))) {
if (log.isDebugEnabled())
log.debug("Message will not be sent because all nodes left topology [msg=" + msg + ", nodes=" +
U.toShortString(nodes) + ']');
return;
}
if (log.isDebugEnabled())
log.debug("Message send will be retried [msg=" + msg + ", nodes=" + U.toShortString(nodes) +
", leftIds=" + leftIds + ']');
}
}
if (log.isDebugEnabled())
log.debug("Sent cache message [msg=" + msg + ", nodes=" + U.toShortString(nodes) + ']');
}
/**
* Sends communication message.
*
* @param nodeId ID of node to send the message to.
* @param msg Message to send.
* @param plc IO policy.
* @throws IgniteCheckedException If sending failed.
*/
public void send(UUID nodeId, GridCacheMessage msg, byte plc) throws IgniteCheckedException {
ClusterNode n = cctx.discovery().node(nodeId);
if (n == null)
throw new ClusterTopologyCheckedException("Failed to send message because node left grid [nodeId=" + nodeId +
", msg=" + msg + ']');
send(n, msg, plc);
}
/**
* @param node Destination node.
* @param topic Topic to send the message to.
* @param msg Message to send.
* @param plc IO policy.
* @param timeout Timeout to keep a message on receiving queue.
* @throws IgniteCheckedException Thrown in case of any errors.
*/
public void sendOrderedMessage(ClusterNode node, Object topic, GridCacheMessage msg, byte plc,
long timeout) throws IgniteCheckedException {
if (!onSend(msg, node.id()))
return;
int cnt = 0;
while (cnt <= retryCnt) {
try {
cnt++;
cctx.gridIO().sendOrderedMessage(node, topic, msg, plc, timeout, false);
if (log.isDebugEnabled())
log.debug("Sent ordered cache message [topic=" + topic + ", msg=" + msg +
", nodeId=" + node.id() + ']');
return;
}
catch (IgniteCheckedException e) {
if (cctx.discovery().node(node.id()) == null)
throw new ClusterTopologyCheckedException("Node left grid while sending ordered message to: " + node.id(), e);
if (cnt == retryCnt)
throw e;
else if (log.isDebugEnabled())
log.debug("Failed to send message to node (will retry): " + node.id());
}
U.sleep(retryDelay);
}
}
/**
* @return ID that auto-grows based on local counter and counters received from other nodes.
*/
public long nextIoId() {
return idGen.incrementAndGet();
}
/**
* Sends message without retries and node ping in case of error.
*
* @param node Node to send message to.
* @param msg Message.
* @param plc IO policy.
* @throws IgniteCheckedException If send failed.
*/
public void sendNoRetry(ClusterNode node,
GridCacheMessage msg,
byte plc)
throws IgniteCheckedException {
assert node != null;
assert msg != null;
if (!onSend(msg, null))
return;
try {
cctx.gridIO().send(node, TOPIC_CACHE, msg, plc);
if (log.isDebugEnabled())
log.debug("Sent cache message [msg=" + msg + ", node=" + U.toShortString(node) + ']');
}
catch (IgniteCheckedException e) {
if (!cctx.discovery().alive(node.id()))
throw new ClusterTopologyCheckedException("Node left grid while sending message to: " + node.id(), e);
else
throw e;
}
}
/**
* Adds message handler.
*
* @param cacheId Cache ID.
* @param type Type of message.
* @param c Handler.
*/
@SuppressWarnings({"unchecked"})
public void addHandler(
int cacheId,
Class<? extends GridCacheMessage> type,
IgniteBiInClosure<UUID, ? extends GridCacheMessage> c) {
int msgIdx = messageIndex(type);
if (msgIdx != -1) {
Map<Integer, IgniteBiInClosure[]> idxClsHandlers0 = idxClsHandlers;
IgniteBiInClosure[] cacheClsHandlers = idxClsHandlers0.get(cacheId);
if (cacheClsHandlers == null) {
cacheClsHandlers = new IgniteBiInClosure[GridCacheMessage.MAX_CACHE_MSG_LOOKUP_INDEX];
idxClsHandlers0.put(cacheId, cacheClsHandlers);
}
if (cacheClsHandlers[msgIdx] != null)
throw new IgniteException("Duplicate cache message ID found [cacheId=" + cacheId +
", type=" + type + ']');
cacheClsHandlers[msgIdx] = c;
idxClsHandlers = idxClsHandlers0;
return;
}
else {
ListenerKey key = new ListenerKey(cacheId, type);
if (clsHandlers.putIfAbsent(key,
(IgniteBiInClosure<UUID, GridCacheMessage>)c) != null)
assert false : "Handler for class already registered [cacheId=" + cacheId + ", cls=" + type +
", old=" + clsHandlers.get(key) + ", new=" + c + ']';
}
IgniteLogger log0 = log;
if (log0 != null && log0.isTraceEnabled())
log0.trace(
"Registered cache communication handler [cacheId=" + cacheId + ", type=" + type +
", msgIdx=" + msgIdx + ", handler=" + c + ']');
}
/**
* @param cacheId Cache ID to remove handlers for.
*/
public void removeHandlers(int cacheId) {
assert cacheId != 0;
idxClsHandlers.remove(cacheId);
for (Iterator<ListenerKey> iter = clsHandlers.keySet().iterator(); iter.hasNext(); ) {
ListenerKey key = iter.next();
if (key.cacheId == cacheId)
iter.remove();
}
}
/**
* @param cacheId Cache ID to remove handlers for.
* @param type Message type.
*/
public void removeHandler(int cacheId, Class<? extends GridCacheMessage> type) {
clsHandlers.remove(new ListenerKey(cacheId, type));
}
/**
* @param msgCls Message class to check.
* @return Message index.
*/
private int messageIndex(Class<?> msgCls) {
try {
Integer msgIdx = U.field(msgCls, GridCacheMessage.CACHE_MSG_INDEX_FIELD_NAME);
if (msgIdx == null || msgIdx < 0)
return -1;
return msgIdx;
}
catch (IgniteCheckedException ignored) {
return -1;
}
}
/**
* Adds ordered message handler.
*
* @param topic Topic.
* @param c Handler.
*/
@SuppressWarnings({"unchecked"})
public void addOrderedHandler(Object topic, IgniteBiInClosure<UUID, ? extends GridCacheMessage> c) {
IgniteLogger log0 = log;
if (orderedHandlers.putIfAbsent(topic, c) == null) {
cctx.gridIO().addMessageListener(topic, new OrderedMessageListener(
(IgniteBiInClosure<UUID, GridCacheMessage>)c));
if (log0 != null && log0.isTraceEnabled())
log0.trace("Registered ordered cache communication handler [topic=" + topic + ", handler=" + c + ']');
}
else if (log0 != null)
U.warn(log0, "Failed to register ordered cache communication handler because it is already " +
"registered for this topic [topic=" + topic + ", handler=" + c + ']');
}
/**
* Removed ordered message handler.
*
* @param topic Topic.
*/
public void removeOrderedHandler(Object topic) {
if (orderedHandlers.remove(topic) != null) {
cctx.gridIO().removeMessageListener(topic);
if (log != null && log.isDebugEnabled())
log.debug("Unregistered ordered cache communication handler for topic:" + topic);
}
else if (log != null)
U.warn(log, "Failed to unregister ordered cache communication handler because it was not found " +
"for topic: " + topic);
}
/**
* @param nodeId Sender node ID.
* @param cacheMsg Message.
*/
@SuppressWarnings({"ErrorNotRethrown", "unchecked"})
private void unmarshall(UUID nodeId, GridCacheMessage cacheMsg) {
if (cctx.localNodeId().equals(nodeId))
return;
GridDeploymentInfo bean = cacheMsg.deployInfo();
if (bean != null) {
assert depEnabled : "Received deployment info while peer class loading is disabled [nodeId=" + nodeId +
", msg=" + cacheMsg + ']';
cctx.deploy().p2pContext(nodeId, bean.classLoaderId(), bean.userVersion(),
bean.deployMode(), bean.participants(), bean.localDeploymentOwner());
if (log.isDebugEnabled())
log.debug("Set P2P context [senderId=" + nodeId + ", msg=" + cacheMsg + ']');
}
try {
cacheMsg.finishUnmarshal(cctx, cctx.deploy().globalLoader());
}
catch (IgniteCheckedException e) {
cacheMsg.onClassError(e);
}
catch (BinaryObjectException e) {
cacheMsg.onClassError(new IgniteCheckedException(e));
}
catch (Error e) {
if (cacheMsg.ignoreClassErrors() && X.hasCause(e, NoClassDefFoundError.class,
UnsupportedClassVersionError.class))
cacheMsg.onClassError(new IgniteCheckedException("Failed to load class during unmarshalling: " + e, e));
else
throw e;
}
}
/** {@inheritDoc} */
@Override public void printMemoryStats() {
X.println(">>> ");
X.println(">>> Cache IO manager memory stats [grid=" + cctx.gridName() + ']');
X.println(">>> clsHandlersSize: " + clsHandlers.size());
X.println(">>> orderedHandlersSize: " + orderedHandlers.size());
}
/**
* Ordered message listener.
*/
private class OrderedMessageListener implements GridMessageListener {
/** */
private final IgniteBiInClosure<UUID, GridCacheMessage> c;
/**
* @param c Handler closure.
*/
OrderedMessageListener(IgniteBiInClosure<UUID, GridCacheMessage> c) {
this.c = c;
}
/** {@inheritDoc} */
@SuppressWarnings({"CatchGenericClass", "unchecked"})
@Override public void onMessage(final UUID nodeId, Object msg) {
if (log.isDebugEnabled())
log.debug("Received cache ordered message [nodeId=" + nodeId + ", msg=" + msg + ']');
final GridCacheMessage cacheMsg = (GridCacheMessage)msg;
onMessage0(nodeId, cacheMsg, c);
}
}
/**
*
*/
private static class ListenerKey {
/** Cache ID. */
private int cacheId;
/** Message class. */
private Class<? extends GridCacheMessage> msgCls;
/**
* @param cacheId Cache ID.
* @param msgCls Message class.
*/
private ListenerKey(int cacheId, Class<? extends GridCacheMessage> msgCls) {
this.cacheId = cacheId;
this.msgCls = msgCls;
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof ListenerKey))
return false;
ListenerKey that = (ListenerKey)o;
return cacheId == that.cacheId && msgCls.equals(that.msgCls);
}
/** {@inheritDoc} */
@Override public int hashCode() {
int res = cacheId;
res = 31 * res + msgCls.hashCode();
return res;
}
}
}
| |
package com.solderbyte.openfit;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import android.app.Service;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattService;
import android.bluetooth.BluetoothManager;
import android.bluetooth.BluetoothProfile;
import android.bluetooth.BluetoothServerSocket;
import android.bluetooth.BluetoothSocket;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Binder;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.util.Log;
public class BluetoothLeService extends Service {
private static final String LOG_TAG = "OpenFit:BluetoothLeService";
private static Handler mHandler;
private static String mDeviceMac;
private String mBluetoothDeviceAddress;
public static InputStream mInStream;
public static OutputStream mOutStream;
public static CharSequence[] pairedEntries;
public static CharSequence[] pairedEntryValues;
public static CharSequence[] scannedEntries;
public static CharSequence[] scannedEntryValues;
private static EnableBluetoothThread eBluetooth;
private static BluetoothGatt mBluetoothGatt;
private static BluetoothSocket mBluetoothSocket;
private static BluetoothDevice mBluetoothDevice;
private static BluetoothAdapter mBluetoothAdapter;
private static BluetoothManager mBluetoothManager;
private static Set<BluetoothDevice> pairedDevices;
private static Set<BluetoothDevice> scannedDevices;
private static BluetoothServerSocket mBluetoothServerSocket;
public static BluetoothGattCharacteristic mWriteCharacteristic;
public int mConnectionState = 0;
public static boolean isEnabled = false;
public static boolean isConnected = false;
public static boolean isScanning = false;
public static volatile boolean isThreadRunning = false;
private static onConnectThread onconnect;
private static ConnectThread connect;
private static final int STATE_FORCE = 3;
private static final long SCAN_PERIOD = 5000;
private static final int STATE_CONNECTED = 2;
private static final int STATE_CONNECTING = 1;
private static final int STATE_DISCONNECTED = 0;
public final static String EXTRA_DATA = "EXTRA_DATA";
public final static String ACTION_DATA_AVAILABLE = "ACTION_DATA_AVAILABLE";
public final static String ACTION_GATT_CONNECTED = "ACTION_GATT_CONNECTED";
public final static String ACTION_GATT_DISCONNECTED = "ACTION_GATT_DISCONNECTED";
public final static String ACTION_GATT_SERVICES_DISCOVERED = "ACTION_GATT_SERVICES_DISCOVERED";
private static final UUID MY_UUID_SECURE = UUID.fromString("9c86c750-870d-11e3-baa7-0800200c9a66");
public static String[] gattStatus = {"Success", "Failure"};
public static String[] gattState = {"Disconnected", "Connecting", "Connected", "Disconnecting"};
public static String[] gattServiceType = {"Primary", "Secondary"};
//00001801-0000-1000-8000-00805f9b34fb
// bluetoothle callback
private final BluetoothGattCallback mGattCallback = new BluetoothGattCallback() {
@Override
public void onConnectionStateChange(BluetoothGatt gatt, int status, int newState) {
Log.d(LOG_TAG, "BluetoothLe onConnectionStateChange: "+status);
if(newState == BluetoothProfile.STATE_CONNECTED) {
Log.d(LOG_TAG, "BluetoothLe Connected to GATT: status:"+status+", state: "+gattState[newState]);
isConnected = true;
mConnectionState = STATE_CONNECTED;
broadcastUpdate(ACTION_GATT_CONNECTED);
if(mHandler != null) {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isConnected");
msg.setData(b);
mHandler.sendMessage(msg);
}
// attempts to discover services after successful connection.
Log.d(LOG_TAG, "Starting discoverServices");
mBluetoothGatt.discoverServices();
}
else if(newState == BluetoothProfile.STATE_DISCONNECTED) {
Log.d(LOG_TAG, "BluetoothLe Disconnected from GATT");
isConnected = false;
mConnectionState = STATE_DISCONNECTED;
broadcastUpdate(ACTION_GATT_DISCONNECTED);
if(mHandler != null) {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isDisconnected");
msg.setData(b);
mHandler.sendMessage(msg);
}
}
}
@Override
public void onServicesDiscovered(BluetoothGatt gatt, int status) {
Log.d(LOG_TAG, "onServicesDiscovered: "+status);
if(status == BluetoothGatt.GATT_SUCCESS) {
// loops through available GATT Services.
for(BluetoothGattService gattService : gatt.getServices()) {
String uuid = gattService.getUuid().toString();
String type = gattServiceType[gattService.getType()];
Log.d(LOG_TAG, "onServicesDiscovered type: "+type);
Log.d(LOG_TAG, "onServicesDiscovered uuid: "+uuid);
//Log.d(LOG_TAG, "onServicesDiscovered: getCharacteristic: "+mWriteCharacteristic);
for(BluetoothGattCharacteristic gattCharacteristic : gattService.getCharacteristics()) {
String cUuid = gattCharacteristic.getUuid().toString();
int cInstanceId = gattCharacteristic.getInstanceId();
int cPermissions = gattCharacteristic.getPermissions();
int cProperties = gattCharacteristic.getProperties();
byte[] cValue = gattCharacteristic.getValue();
int cWriteType = gattCharacteristic.getWriteType();
Log.d(LOG_TAG, "onServicesDiscovered cUuid: "+cUuid);
Log.d(LOG_TAG, "onServicesDiscovered cInstanceId: "+cInstanceId);
Log.d(LOG_TAG, "onServicesDiscovered cPermissions: "+cPermissions);
Log.d(LOG_TAG, "onServicesDiscovered cProperties: "+cProperties);
Log.d(LOG_TAG, "onServicesDiscovered cValue: "+cValue);
Log.d(LOG_TAG, "onServicesDiscovered cWriteType: "+cWriteType);
}
}
Log.d(LOG_TAG, "BluetoothLe Service discovered: "+status);
broadcastUpdate(ACTION_GATT_SERVICES_DISCOVERED);
}
else {
Log.d(LOG_TAG, "BluetoothLe onServicesDiscovered received: "+status);
}
}
@Override
public void onCharacteristicRead(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic, int status) {
if(status == BluetoothGatt.GATT_SUCCESS) {
Log.d(LOG_TAG, "BluetoothLe onCharacteristicRead received: "+status);
broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic);
}
}
@Override
public void onCharacteristicChanged(BluetoothGatt gatt, BluetoothGattCharacteristic characteristic) {
Log.d(LOG_TAG, "BluetoothLe onCharacteristicChanged received: "+characteristic);
broadcastUpdate(ACTION_DATA_AVAILABLE, characteristic);
}
};
private void broadcastUpdate(final String action) {
final Intent intent = new Intent(action);
sendBroadcast(intent);
}
private void broadcastUpdate(final String action, final BluetoothGattCharacteristic characteristic) {
Log.d(LOG_TAG, "BluetoothLe broadcastUpdate received: "+characteristic);
final Intent intent = new Intent(action);
// for all other profiles, writes the data formatted in HEX.
final byte[] data = characteristic.getValue();
if(data != null && data.length > 0) {
final StringBuilder stringBuilder = new StringBuilder(data.length);
for(byte byteChar : data) {
stringBuilder.append(String.format("%02X ", byteChar));
}
intent.putExtra(EXTRA_DATA, new String(data) + "\n" + stringBuilder.toString());
}
sendBroadcast(intent);
}
public class LocalBinder extends Binder {
BluetoothLeService getService() {
Log.d(LOG_TAG, "getService");
return BluetoothLeService.this;
}
}
@Override
public IBinder onBind(Intent intent) {
Log.d(LOG_TAG, "BluetoothLe onBind.");
return mBinder;
}
@Override
public boolean onUnbind(Intent intent) {
stopSelf();
close();
Log.d(LOG_TAG, "unbind.");
mHandler = null;
return super.onUnbind(intent);
}
private final IBinder mBinder = new LocalBinder();
public boolean initialize() {
Log.d(LOG_TAG, "BLE Initialize.");
if(mBluetoothManager == null) {
Log.d(LOG_TAG, "Initialize BluetoothManager.");
mBluetoothManager = (BluetoothManager)getSystemService(Context.BLUETOOTH_SERVICE);
if(mBluetoothManager == null) {
Log.e(LOG_TAG, "Unable to initialize BluetoothManager.");
return false;
}
}
scannedDevices = new LinkedHashSet<BluetoothDevice>();
mBluetoothAdapter = mBluetoothManager.getAdapter();
if(mBluetoothAdapter == null) {
Log.e(LOG_TAG, "Unable to obtain a BluetoothAdapter.");
return false;
}
if(mBluetoothAdapter.isEnabled()) {
isEnabled = true;
}
else {
Log.d(LOG_TAG, "Bluetooth is not enabled.");
isEnabled = false;
}
return true;
}
public boolean connect(final String address) {
Log.d(LOG_TAG, "BLE connect: "+address);
if(mBluetoothAdapter == null || address == null) {
Log.d(LOG_TAG, "BluetoothAdapter not initialized or unspecified address.");
return false;
}
if(mBluetoothDeviceAddress != null && address.equals(mBluetoothDeviceAddress) && mBluetoothGatt != null) {
Log.d(LOG_TAG, "Trying to use an existing mBluetoothGatt for connection.");
if(mBluetoothGatt.connect()) {
mConnectionState = STATE_CONNECTING;
forceConnect();
return true;
}
else {
return false;
}
}
final BluetoothDevice device = mBluetoothAdapter.getRemoteDevice(address);
if (device == null) {
Log.d(LOG_TAG, "Device not found. Unable to connect.");
return false;
}
// auto connect to the device
mBluetoothGatt = device.connectGatt(this, true, mGattCallback);
Log.d(LOG_TAG, "Trying to create a new connection to: "+address);
mBluetoothDeviceAddress = address;
mConnectionState = STATE_CONNECTING;
return true;
}
public void disconnect() {
Log.d(LOG_TAG, "BLE disconnect");
if(mBluetoothAdapter == null || mBluetoothGatt == null) {
Log.d(LOG_TAG, "BluetoothAdapter not initialized");
return;
}
mBluetoothGatt.disconnect();
}
public void forceConnect() {
if(mBluetoothDeviceAddress != null) {
Log.d(LOG_TAG, "Trying to force connection: "+mBluetoothDeviceAddress);
mConnectionState = STATE_FORCE;
mBluetoothGatt.disconnect();
final BluetoothDevice device = mBluetoothAdapter.getRemoteDevice(mBluetoothDeviceAddress);
mBluetoothGatt = device.connectGatt(this, true, mGattCallback);
}
else {
Log.d(LOG_TAG, "Force connect called without previous connection");
}
}
public void close() {
Log.d(LOG_TAG, "BLE close");
if(mBluetoothGatt != null) {
mBluetoothGatt.close();
mBluetoothGatt = null;
}
/*if(onconnect != null) {
onconnect.close();
onconnect = null;
}*/
if(connect != null) {
connect.close();
connect = null;
}
}
public void readCharacteristic(BluetoothGattCharacteristic characteristic) {
if(mBluetoothAdapter == null || mBluetoothGatt == null) {
Log.w(LOG_TAG, "BluetoothAdapter not initialized");
return;
}
mBluetoothGatt.readCharacteristic(characteristic);
}
public void setCharacteristicNotification(BluetoothGattCharacteristic characteristic, boolean enabled) {
if(mBluetoothAdapter == null || mBluetoothGatt == null) {
Log.w(LOG_TAG, "BluetoothAdapter not initialized");
return;
}
mBluetoothGatt.setCharacteristicNotification(characteristic, enabled);
}
public void writeCharacteristic(BluetoothGattCharacteristic characteristic) {
if(mBluetoothAdapter == null || mBluetoothGatt == null) {
Log.w(LOG_TAG, "BluetoothAdapter not initialized");
return;
}
mBluetoothGatt.writeCharacteristic(characteristic);
}
public List<BluetoothGattService> getSupportedGattServices() {
if(mBluetoothGatt == null) {
Log.w(LOG_TAG, "getSupportedGattServices mBluetoothGatt not initialized");
return null;
}
Log.d(LOG_TAG, "getSupportedGattServices");
return mBluetoothGatt.getServices();
}
/* Helper Functions */
public void setHandler(Handler mHndlr) {
Log.d(LOG_TAG, "Setting handler");
mHandler = mHndlr;
}
public void connectRfcomm() {
if(!mBluetoothAdapter.isEnabled()) {
Log.d(LOG_TAG, "connect called when Bluetooth is not enabled.");
return;
}
if(mBluetoothDevice != null) {
connect = new ConnectThread();
connect.start();
Log.d(LOG_TAG, "Connecting to Rfcomm");
}
else {
Log.d(LOG_TAG, "connectRfcomm called mBluetoothDevice is null");
}
}
public void disconnectRfcomm() {
if(mBluetoothDevice != null && isConnected) {
connect.close();
Log.d(LOG_TAG, "closing connectRmcomm");
}
else {
Log.d(LOG_TAG, "disconnectRfcomm called while not connected");
}
}
public void enableBluetooth() {
if(!mBluetoothAdapter.isEnabled()) {
eBluetooth = new EnableBluetoothThread();
eBluetooth.start();
}
else {
Log.d(LOG_TAG, "enableBluetooth called when BT enabled");
}
}
public void disableBluetooth() {
mBluetoothAdapter.disable();
isEnabled = false;
}
public void setDevice(String devMac) {
Log.d(LOG_TAG, "setDevice: " + devMac);
mDeviceMac = devMac;
setBluetoothDevice();
}
public static void setBluetoothDevice() {
Log.d(LOG_TAG, "setBluetoothDevice: " + mDeviceMac);
// loop through devices
if(pairedDevices != null) {
Log.d(LOG_TAG, "setting from paired devices");
for(BluetoothDevice device : pairedDevices) {
if(device.getAddress().equals(mDeviceMac)) {
Log.d(LOG_TAG, "Set paired device: "+device.getName()+":"+device.getAddress());
mBluetoothDevice = device;
}
}
}
if(scannedDevices.size() > 0) {
Log.d(LOG_TAG, "setting from scanned devices");
for(BluetoothDevice device : scannedDevices) {
if(device.getAddress().equals(mDeviceMac)) {
Log.d(LOG_TAG, "Set scanned device: "+device.getName()+":"+device.getAddress());
mBluetoothDevice = device;
}
}
}
if(pairedDevices == null && scannedDevices.size() <= 0) {
Log.d(LOG_TAG, "setBluetoothDevice called with empty no paired or scanned devices");
}
}
public void setEntries() {
Log.d(LOG_TAG, "setEntries");
if(isEnabled) {
List<CharSequence> entries = new ArrayList<CharSequence>();
List<CharSequence> values = new ArrayList<CharSequence>();
pairedDevices = mBluetoothAdapter.getBondedDevices();
// loop through paired devices
if(pairedDevices.size() > 0) {
for(BluetoothDevice device : pairedDevices) {
String deviceName = device.getName();
String deviceAddr = device.getAddress();
Log.d(LOG_TAG, "Paired Device: "+deviceName+":"+deviceAddr);
if(deviceName != null && !deviceName.isEmpty() && deviceAddr != null && !deviceAddr.isEmpty()) {
entries.add(deviceName);
values.add(deviceAddr);
}
}
}
else {
Log.d(LOG_TAG, "No pairedDevices");
}
// loop trough scanned devices
if(scannedDevices.size() > 0) {
for(BluetoothDevice device : scannedDevices) {
// make sure we dont add duplicates
if(!entries.contains(device.getName())) {
String deviceName = device.getName();
String deviceAddr = device.getAddress();
Log.d(LOG_TAG, "Scanned Device: "+deviceName+":"+deviceAddr);
if(deviceName != null && !deviceName.isEmpty() && deviceAddr != null && !deviceAddr.isEmpty()) {
entries.add(deviceName);
values.add(deviceAddr);
}
}
}
}
else {
Log.d(LOG_TAG, "No scannedDevices");
}
pairedEntries = entries.toArray(new CharSequence[entries.size()]);
pairedEntryValues = values.toArray(new CharSequence[values.size()]);
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetoothDevicesList", "bluetoothDevicesList");
b.putCharSequenceArray("bluetoothEntries", pairedEntries);
b.putCharSequenceArray("bluetoothEntryValues", pairedEntryValues);
msg.setData(b);
mHandler.sendMessage(msg);
}
else {
Log.d(LOG_TAG, "setEntries called without BT enabled");
}
}
public static CharSequence[] getEntries() {
if(pairedEntries != null && pairedEntries.length > 0) {
return pairedEntries;
}
else {
CharSequence[] entries = {"No Devices"};
return entries;
}
}
public static CharSequence[] getEntryValues() {
if(pairedEntryValues!= null && pairedEntryValues.length > 0) {
return pairedEntryValues;
}
else {
CharSequence[] entryValues = {"None"};
return entryValues;
}
}
public void scanLeDevice() {
Log.d(LOG_TAG, "scanLeDevice");
if(isEnabled) {
if(mHandler != null) {
if(!isScanning) {
/*if(Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
Log.d(LOG_TAG, "scanning with startLeScan");
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
isScanning = false;
mBluetoothAdapter.stopLeScan(mLeScanCallback);
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "scanStopped");
msg.setData(b);
mHandler.sendMessage(msg);
setEntries();
}
}, SCAN_PERIOD);
isScanning = true;
mBluetoothAdapter.startLeScan(mLeScanCallback);
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
Log.d(LOG_TAG, "scanning with startScan"+ mBluetoothAdapter);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
isScanning = false;
BluetoothLeScanner mBluetoothLeScanner = mBluetoothAdapter.getBluetoothLeScanner();
mBluetoothLeScanner.stopScan(mScanCallback);
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "scanStopped");
msg.setData(b);
mHandler.sendMessage(msg);
setEntries();
}
}, SCAN_PERIOD);
isScanning = true;
BluetoothLeScanner mBluetoothLeScanner = mBluetoothAdapter.getBluetoothLeScanner();
mBluetoothLeScanner.startScan(mScanCallback);
}*/
// Stops scanning after a pre-defined scan period.
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
isScanning = false;
mBluetoothAdapter.cancelDiscovery();
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "scanStopped");
msg.setData(b);
mHandler.sendMessage(msg);
setEntries();
}
}, SCAN_PERIOD);
Log.d(LOG_TAG, "scanLeDevice starting scan for: "+SCAN_PERIOD+"ms");
IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND);
registerReceiver(mScanReceiver, filter);
Log.d(LOG_TAG, "starting discovery");
isScanning = true;
mBluetoothAdapter.startDiscovery();
}
else {
Log.d(LOG_TAG, "scanLeDevice currently scanning");
}
}
else{
Log.d(LOG_TAG, "scanLeDevice no mHandler");
}
}
else {
Log.d(LOG_TAG, "scanLeDevice called without BT enabled");
}
}
public void write(byte[] bytes) {
if(onconnect != null) {
Log.d(LOG_TAG, "Writting bytes");
onconnect.write(bytes);
}
else {
Log.d(LOG_TAG, "write called without BT connected");
}
}
/*private LeScanCallback mLeScanCallback = new LeScanCallback() {
@Override
public void onLeScan(BluetoothDevice device, int rssi, byte[] scanRecord) {
if(scannedDevices.add(device)) {
Log.d(LOG_TAG, device.getName()+" : "+device.getAddress()+" : "+device.getType()+" : "+device.getBondState());
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetoothDevice", device.getName()+","+device.getAddress());
msg.setData(b);
mHandler.sendMessage(msg);
setEntries();
}
}
};
@SuppressLint("NewApi")
private ScanCallback mScanCallback = new ScanCallback() {
@Override
public void onScanResult(int callbackType, ScanResult result) {
BluetoothDevice device = result.getDevice();
if(scannedDevices.add(device)) {
Log.d(LOG_TAG, device.getName()+" : "+device.getAddress()+" : "+device.getType()+" : "+device.getBondState());
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetoothDevice", device.getName()+","+device.getAddress());
msg.setData(b);
mHandler.sendMessage(msg);
setEntries();
}
}
};*/
private final BroadcastReceiver mScanReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if(BluetoothDevice.ACTION_FOUND.equals(action)) {
BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
if(scannedDevices.add(device)) {
Log.d(LOG_TAG, device.getName()+" : "+device.getAddress()+" : "+device.getType()+" : "+device.getBondState());
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetoothDevice", device.getName()+","+device.getAddress());
msg.setData(b);
mHandler.sendMessage(msg);
setEntries();
}
}
}
};
private class EnableBluetoothThread extends Thread {
public void run() {
boolean bluetoothEnabled = true;
long timeStart = Calendar.getInstance().getTimeInMillis();
Log.d(LOG_TAG, "Enabling Bluetooth: "+timeStart);
mBluetoothAdapter.enable();
while(!mBluetoothAdapter.isEnabled()) {
try
{
long timeDiff = Calendar.getInstance().getTimeInMillis() - timeStart;
if(timeDiff >= 5000) {
bluetoothEnabled = false;
break;
}
Thread.sleep(100L);
}
catch (InterruptedException ie)
{
// unexpected interruption while enabling bluetooth
Thread.currentThread().interrupt(); // restore interrupted flag
return;
}
}
if(bluetoothEnabled) {
isEnabled = true;
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isEnabled");
msg.setData(b);
mHandler.sendMessage(msg);
Log.d(LOG_TAG, "Enabled");
}
else {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isEnabledFailed");
msg.setData(b);
mHandler.sendMessage(msg);
Log.d(LOG_TAG, "Enabling Bluetooth timed out");
}
}
}
private class ConnectThread extends Thread {
public ConnectThread() {
Log.d(LOG_TAG, "Initializing ConnectThread");
// get a BluetoothSocket to connect with the given BluetoothDevice
try {
Log.d(LOG_TAG, "try ConnectThread: "+mBluetoothDevice.getName()+" with UUID: "+MY_UUID_SECURE.toString());
mBluetoothSocket = mBluetoothDevice.createRfcommSocketToServiceRecord(MY_UUID_SECURE);
}
catch(Exception e) {
Log.e(LOG_TAG, "Error: mBluetoothDevice.createRfcommSocketToServiceRecord", e);
}
}
public void run() {
Log.d(LOG_TAG, "Running ConnectThread");
// Cancel discovery because it will slow down the connection
mBluetoothAdapter.cancelDiscovery();
try {
// connect the device through the socket. This will block until it succeeds or throws an exception
mBluetoothSocket.connect();
isConnected = true;
if(mHandler != null) {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isConnectedRfcomm");
msg.setData(b);
mHandler.sendMessage(msg);
}
}
catch(IOException connectException) {
Log.e(LOG_TAG, "Error: mBluetoothSocket.connect()", connectException);
try {
mBluetoothSocket.close();
if(mHandler != null) {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isConnectedRfcommFailed");
msg.setData(b);
mHandler.sendMessage(msg);
}
}
catch (IOException closeException) {
Log.e(LOG_TAG, "Error: mBluetoothSocket.close()", closeException);
}
return;
}
Log.d(LOG_TAG, "ConnectThread connected");
// Do work to manage the connection
onconnect = new onConnectThread();
onconnect.start();
}
public void close() {
if(onconnect != null) {
isThreadRunning = false;
onconnect.close();
isConnected = false;
Log.d(LOG_TAG, "Bluetooth rfComm Disconnected");
if(mHandler != null) {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetooth", "isDisconnectedRfComm");
msg.setData(b);
mHandler.sendMessage(msg);
}
}
}
}
public class onConnectThread extends Thread {
public onConnectThread() {
Log.d(LOG_TAG, "Initializing onConnectThread");
// get the input and output streams
try {
mInStream = mBluetoothSocket.getInputStream();
mOutStream = mBluetoothSocket.getOutputStream();
isThreadRunning = true;
}
catch(IOException e) {
close();
Log.e(LOG_TAG, "Error: mBluetoothSocket.getInputStream()/socket.getOutputStream()", e);
}
}
public void run() {
Log.d(LOG_TAG, "Running onConnectThread");
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
// listen to the InputStream
while(isThreadRunning) {
try {
int bytes = mInStream.read(buffer);
byteArray.write(buffer, 0, bytes);
Log.d(LOG_TAG, "Received: "+byteArray);
try {
Message msg = mHandler.obtainMessage();
Bundle b = new Bundle();
b.putString("bluetoothData", "bluetoothData");
b.putByteArray("data", byteArray.toByteArray());
msg.setData(b);
mHandler.sendMessage(msg);
}
catch(Exception e) {
Log.e(LOG_TAG, "Error: mHandler.obtainMessage()", e);
}
byteArray.reset();
}
catch (IOException e) {
if(isThreadRunning) {
Log.e(LOG_TAG, "Error: mInStream.read()", e);
close();
onconnect.close();
if(connect != null) {
connect.close();
connect = null;
}
}
}
}
}
public void write(byte[] bytes) {
try {
ByteArrayOutputStream byteArray = new ByteArrayOutputStream();
byteArray.write(bytes, 0, bytes.length);
Log.d(LOG_TAG, "Sending: "+byteArray);
mOutStream.write(bytes);
mOutStream.flush();
}
catch(IOException e) {
Log.e(LOG_TAG, "Error: mOutStream.write()", e);
}
}
public void close() {
try {
mInStream.close();
}
catch(IOException e) {
Log.e(LOG_TAG, "Error: mInStream.close()", e);
}
try {
mOutStream.close();
}
catch(IOException e) {
Log.e(LOG_TAG, "Error: mOutStream.close()", e);
}
try {
mBluetoothSocket.close();
}
catch(IOException e) {
Log.e(LOG_TAG, "Error: mBluetoothSocket.close()", e);
}
}
}
@SuppressWarnings("unused")
private class ServerThread extends Thread {
public ServerThread() {
Log.d(LOG_TAG, "Initializing ServerThread");
try {
Log.d(LOG_TAG, "try ServerThread with UUID: "+MY_UUID_SECURE);
mBluetoothServerSocket = mBluetoothAdapter.listenUsingRfcommWithServiceRecord("SessionManagerSecure", MY_UUID_SECURE);
} catch (IOException e1) {
Log.e(LOG_TAG, "Error listenUsingRfcommWithServiceRecord");
e1.printStackTrace();
}
}
public void run() {
Log.d(LOG_TAG, "Running ServerThread");
try {
mBluetoothServerSocket.accept();
Log.d(LOG_TAG, "mBluetoothServerSocket.accept() success");
} catch (IOException e1) {
Log.e(LOG_TAG, "Error mBluetoothServerSocket.accept()");
e1.printStackTrace();
}
}
public void close() {
try {
mBluetoothServerSocket.close();
}
catch (IOException e) {
Log.e(LOG_TAG, "Error: mmSocket.close()", e);
}
}
}
}
| |
package org.lantern;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.SocketTimeoutException;
import java.net.URI;
import org.junit.Test;
import org.lantern.event.Events;
import org.lantern.event.ProxyConnectionEvent;
import org.lantern.geoip.GeoIpLookupService;
import org.lantern.kscope.ReceivedKScopeAd;
import org.lantern.network.NetworkTracker;
import org.lantern.proxy.DefaultProxyTracker;
import org.lantern.proxy.ProxyHolder;
import org.lantern.proxy.ProxyInfo;
import org.lantern.state.Model;
import org.lantern.stubs.PeerFactoryStub;
import org.littleshoot.util.FiveTuple;
import com.google.common.eventbus.Subscribe;
public class DefaultProxyTrackerTest {
@Subscribe
public void onProxyConnectionEvent(final ProxyConnectionEvent pce) {
synchronized (this) {
this.notifyAll();
}
}
@Test
public void testDefaultProxyTracker() throws Exception {
Events.register(this);
final Censored censored = new DefaultCensored();
final CountryService countryService = new CountryService(censored);
Model model = new Model(countryService);
//assume that we are connected to the Internet
model.getConnectivity().setInternet(true);
final GeoIpLookupService geoIpLookupService = new GeoIpLookupService();
PeerFactory peerFactory = new PeerFactoryStub();
LanternTrustStore lanternTrustStore = mock(LanternTrustStore.class);
DefaultProxyTracker tracker = new DefaultProxyTracker(model,
peerFactory, lanternTrustStore, new NetworkTracker<String, URI, ReceivedKScopeAd>());
tracker.init();
tracker.start();
//proxy queue initially empty
ProxyHolder proxy = tracker.firstConnectedTcpProxy();
assertNotNull(proxy);
assertTrue("There should always be a flashlight proxy available", proxy.getJid().toString().contains("flashlight"));
final int port1 = 55077;
final int port2 = 55078;
Miniproxy miniproxy1 = new Miniproxy(port1);
new Thread(miniproxy1).start();
LanternUtils.waitForServer(miniproxy1.port, 4000);
Miniproxy miniproxy2 = new Miniproxy(port2);
new Thread(miniproxy2).start();
LanternUtils.waitForServer(miniproxy2.port, 4000);
InetAddress localhost = org.littleshoot.proxy.impl.NetworkUtils.getLocalHost();
final ProxyInfo info = new ProxyInfo(new URI("proxy1@example.com"), localhost.getHostAddress(), port1, 1000);
assertNotNull(info.fiveTuple());
tracker.addProxy(info);
// Leave time for proxy connectivity check to happen
Thread.sleep(1000);
proxy = waitForProxy(tracker);
assertNotNull(proxy);
assertEquals(port1, getProxyPort(proxy));
//now let's force the proxy to fail.
//miniproxy1.pause();
proxy = tracker.firstConnectedTcpProxy();
// first, we need to clear out the old proxy from the list, by having it
// fail.
tracker.onCouldNotConnect(proxy);
//now wait for the miniproxy to stop accepting.
Thread.sleep(10);
proxy = tracker.firstConnectedTcpProxy();
assertNotNull(proxy);
assertTrue("The remaining proxy should be a flashlight", proxy.getJid().toString().contains("flashlight"));
// now bring miniproxy1 back up
// miniproxy1.unpause();
Thread.sleep(10);
//let's turn off internet, which will restore the dead proxy
model.getConnectivity().setInternet(false);
//Events.eventBus().post(new ConnectivityChangedEvent(true));
tracker.init();
Thread.sleep(10);
proxy = tracker.firstConnectedTcpProxy();
assertNotNull("Recently deceased proxy not restored", proxy);
Thread.sleep(10);
model.getConnectivity().setInternet(true);
//Events.eventBus().post(new ConnectivityChangedEvent(true));
tracker.init();
tracker.firstConnectedTcpProxy();
Thread.sleep(10);
// with multiple proxies, we get a different proxy for each getProxy()
// call
tracker.addProxy(new ProxyInfo(new URI("proxy2@example.com"), localhost.getHostAddress(), port2, 1000));
/*
Thread.sleep(50);
ProxyHolder proxy1 = waitForProxy(tracker);
System.err.println(proxy1);
// Simulate a successful connection to proxy1 to bump its socket count
proxy1.connectionSucceeded();
ProxyHolder proxy2 = waitForProxy(tracker);
System.err.println(proxy2);
assertNotNull(proxy1);
assertNotNull(proxy2);
assertTrue(proxy1 != proxy2);
int port1 = getProxyPort(proxy1);
int port2 = getProxyPort(proxy2);
assertTrue((port1 == 55021 && port2 == 55022) || (port1 == 55022 && port2 == 55021));
*/
}
private ProxyHolder waitForProxy(DefaultProxyTracker tracker)
throws Exception {
int tries = 0;
while (tries < 1000) {
final ProxyHolder proxy = tracker.firstConnectedTcpProxy();
if (proxy != null) {
return proxy;
}
Thread.sleep(10);
tries ++;
//return tracker.firstConnectedTcpProxy();
}
return null;
}
private int getProxyPort(ProxyHolder proxy) {
final FiveTuple ft = proxy.getFiveTuple();
final InetSocketAddress remote = ft.getRemote();
return remote.getPort();
}
static class Miniproxy implements Runnable {
public volatile boolean done = false;
private final int port;
private boolean paused;
public Miniproxy(int port) {
this.port = port;
}
public void unpause() {
paused = false;
}
public void pause() {
paused = true;
}
@Override
public void run() {
ServerSocket sock;
try {
//InetAddress lh = org.littleshoot.proxy.impl.NetworkUtils.getLocalHost();
sock = new ServerSocket(port);
sock.setSoTimeout(1);
while (!done) {
try {
if (!paused) {
sock.accept();
}
} catch (SocketTimeoutException e) {
// no connections; just loop
}
try {
Thread.sleep(0);
} catch (InterruptedException e) {
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
| |
package org.apache.solr.handler;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.util.EntityUtils;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.core.ConfigOverlay;
import org.apache.solr.update.DirectUpdateHandler2;
import org.apache.solr.util.SimplePostTool;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.StringReader;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
public class TestBlobHandler extends AbstractFullDistribZkTestBase {
static final Logger log = LoggerFactory.getLogger(TestBlobHandler.class);
private void doBlobHandlerTest() throws Exception {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
try {
CollectionAdminResponse response1;
CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
createCollectionRequest.setCollectionName(".system");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setReplicationFactor(2);
response1 = createCollectionRequest.process(client);
assertEquals(0, response1.getStatus());
assertTrue(response1.isSuccess());
DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system");
Replica replica = sysColl.getActiveSlicesMap().values().iterator().next().getLeader();
String baseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP);
String url = baseUrl + "/.system/config/requestHandler";
Map map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
assertNotNull(map);
assertEquals("solr.BlobHandler", getObjectByPath(map, true, Arrays.asList(
"config",
"requestHandler",
"/blob",
"class")));
byte[] bytarr = new byte[1024];
for (int i = 0; i < bytarr.length; i++) bytarr[i]= (byte) (i % 127);
byte[] bytarr2 = new byte[2048];
for (int i = 0; i < bytarr2.length; i++) bytarr2[i]= (byte) (i % 127);
postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr), 1);
postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr2), 2);
url = baseUrl + "/.system/blob/test/1";
map = TestSolrConfigHandlerConcurrent.getAsMap(url,cloudClient);
List l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
assertNotNull(""+map, l);
assertTrue("" + map, l.size() > 0);
map = (Map) l.get(0);
assertEquals(""+bytarr.length,String.valueOf(map.get("size")));
compareInputAndOutput(baseUrl+"/.system/blob/test?wt=filestream", bytarr2);
compareInputAndOutput(baseUrl+"/.system/blob/test/1?wt=filestream", bytarr);
} finally {
client.shutdown();
}
}
public static void createSysColl(SolrClient client) throws SolrServerException, IOException {
CollectionAdminResponse response1;
CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
createCollectionRequest.setCollectionName(".system");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setReplicationFactor(2);
response1 = createCollectionRequest.process(client);
assertEquals(0, response1.getStatus());
assertTrue(response1.isSuccess());
}
@Override
public void tearDown() throws Exception {
super.tearDown();
System.clearProperty("numShards");
System.clearProperty("zkHost");
// insurance
DirectUpdateHandler2.commitOnClose = true;
}
public static void postAndCheck(CloudSolrClient cloudClient, String baseUrl, ByteBuffer bytes, int count) throws Exception {
postData(cloudClient, baseUrl, bytes);
String url;
Map map = null;
List l;
long start = System.currentTimeMillis();
int i=0;
for(;i<150;i++) {//10secs
url = baseUrl + "/.system/blob/test";
map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
String numFound = String.valueOf(ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "numFound")));
if(!(""+count).equals(numFound)) {
Thread.sleep(100);
continue;
}
l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
assertNotNull(l);
map = (Map) l.get(0);
assertEquals("" + bytes.limit(), String.valueOf(map.get("size")));
return;
}
fail(MessageFormat.format("Could not successfully add blob after {0} attempts. Expecting {1} items. time elapsed {2} output for url is {3}",
i,count, System.currentTimeMillis()-start, getAsString(map)));
}
public static String getAsString(Map map) {
return new String(ZkStateReader.toJSON(map), StandardCharsets.UTF_8);
}
private void compareInputAndOutput(String url, byte[] bytarr) throws IOException {
HttpClient httpClient = cloudClient.getLbClient().getHttpClient();
HttpGet httpGet = new HttpGet(url);
HttpResponse entity = httpClient.execute(httpGet);
ByteBuffer b = SimplePostTool.inputStreamToByteArray(entity.getEntity().getContent());
try {
assertEquals(b.limit(), bytarr.length);
for (int i = 0; i < bytarr.length; i++) {
assertEquals(b.get(i), bytarr[i]);
}
} finally {
httpGet.releaseConnection();
}
}
public static void postData(CloudSolrClient cloudClient, String baseUrl, ByteBuffer bytarr) throws IOException {
HttpPost httpPost = null;
HttpEntity entity;
String response = null;
try {
httpPost = new HttpPost(baseUrl+"/.system/blob/test");
httpPost.setHeader("Content-Type","application/octet-stream");
httpPost.setEntity(new ByteArrayEntity(bytarr.array(), bytarr.arrayOffset(), bytarr.limit()));
entity = cloudClient.getLbClient().getHttpClient().execute(httpPost).getEntity();
try {
response = EntityUtils.toString(entity, StandardCharsets.UTF_8);
Map m = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
assertFalse("Error in posting blob "+ getAsString(m),m.containsKey("error"));
} catch (JSONParser.ParseException e) {
log.error(response);
fail();
}
} finally {
httpPost.releaseConnection();
}
}
@Override
public void doTest() throws Exception {
doBlobHandlerTest();
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.replica.replicaisland;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.text.Html;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.ImageView;
public class MainMenuActivity extends Activity {
private boolean mPaused;
private View mStartButton;
private View mOptionsButton;
private View mExtrasButton;
private View mBackground;
private View mTicker;
private Animation mButtonFlickerAnimation;
private Animation mFadeOutAnimation;
private Animation mAlternateFadeOutAnimation;
private Animation mFadeInAnimation;
private boolean mJustCreated;
private String mSelectedControlsString;
private final static int WHATS_NEW_DIALOG = 0;
private final static int TILT_TO_SCREEN_CONTROLS_DIALOG = 1;
private final static int CONTROL_SETUP_DIALOG = 2;
// Create an anonymous implementation of OnClickListener
private View.OnClickListener sContinueButtonListener = new View.OnClickListener() {
public void onClick(View v) {
if (!mPaused) {
Intent i = new Intent(getBaseContext(), AndouKun.class);
v.startAnimation(mButtonFlickerAnimation);
mFadeOutAnimation.setAnimationListener(new StartActivityAfterAnimation(i));
mBackground.startAnimation(mFadeOutAnimation);
mOptionsButton.startAnimation(mAlternateFadeOutAnimation);
mExtrasButton.startAnimation(mAlternateFadeOutAnimation);
mTicker.startAnimation(mAlternateFadeOutAnimation);
mPaused = true;
}
}
};
private View.OnClickListener sOptionButtonListener = new View.OnClickListener() {
public void onClick(View v) {
if (!mPaused) {
Intent i = new Intent(getBaseContext(), SetPreferencesActivity.class);
v.startAnimation(mButtonFlickerAnimation);
mFadeOutAnimation.setAnimationListener(new StartActivityAfterAnimation(i));
mBackground.startAnimation(mFadeOutAnimation);
mStartButton.startAnimation(mAlternateFadeOutAnimation);
mExtrasButton.startAnimation(mAlternateFadeOutAnimation);
mTicker.startAnimation(mAlternateFadeOutAnimation);
mPaused = true;
}
}
};
private View.OnClickListener sExtrasButtonListener = new View.OnClickListener() {
public void onClick(View v) {
if (!mPaused) {
Intent i = new Intent(getBaseContext(), ExtrasMenuActivity.class);
v.startAnimation(mButtonFlickerAnimation);
mButtonFlickerAnimation.setAnimationListener(new StartActivityAfterAnimation(i));
mPaused = true;
}
}
};
private View.OnClickListener sStartButtonListener = new View.OnClickListener() {
public void onClick(View v) {
if (!mPaused) {
Intent i = new Intent(getBaseContext(), DifficultyMenuActivity.class);
i.putExtra("newGame", true);
v.startAnimation(mButtonFlickerAnimation);
mButtonFlickerAnimation.setAnimationListener(new StartActivityAfterAnimation(i));
mPaused = true;
}
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.mainmenu);
mPaused = true;
mStartButton = findViewById(R.id.startButton);
mOptionsButton = findViewById(R.id.optionButton);
mBackground = findViewById(R.id.mainMenuBackground);
if (mOptionsButton != null) {
mOptionsButton.setOnClickListener(sOptionButtonListener);
}
mExtrasButton = findViewById(R.id.extrasButton);
mExtrasButton.setOnClickListener(sExtrasButtonListener);
mButtonFlickerAnimation = AnimationUtils.loadAnimation(this, R.anim.button_flicker);
mFadeOutAnimation = AnimationUtils.loadAnimation(this, R.anim.fade_out);
mAlternateFadeOutAnimation = AnimationUtils.loadAnimation(this, R.anim.fade_out);
mFadeInAnimation = AnimationUtils.loadAnimation(this, R.anim.fade_in);
SharedPreferences prefs = getSharedPreferences(PreferenceConstants.PREFERENCE_NAME, MODE_PRIVATE);
final int row = prefs.getInt(PreferenceConstants.PREFERENCE_LEVEL_ROW, 0);
final int index = prefs.getInt(PreferenceConstants.PREFERENCE_LEVEL_INDEX, 0);
int levelTreeResource = R.xml.level_tree;
if (row != 0 || index != 0) {
final int linear = prefs.getInt(PreferenceConstants.PREFERENCE_LINEAR_MODE, 0);
if (linear != 0) {
levelTreeResource = R.xml.linear_level_tree;
}
}
if (!LevelTree.isLoaded(levelTreeResource)) {
LevelTree.loadLevelTree(levelTreeResource, this);
LevelTree.loadAllDialog(this);
}
mTicker = findViewById(R.id.ticker);
if (mTicker != null) {
mTicker.setFocusable(true);
mTicker.requestFocus();
mTicker.setSelected(true);
}
mJustCreated = true;
// Keep the volume control type consistent across all activities.
setVolumeControlStream(AudioManager.STREAM_MUSIC);
//MediaPlayer mp = MediaPlayer.create(this, R.raw.bwv_115);
//mp.start();
}
@Override
protected void onPause() {
super.onPause();
mPaused = true;
}
@Override
protected void onResume() {
super.onResume();
mPaused = false;
mButtonFlickerAnimation.setAnimationListener(null);
if (mStartButton != null) {
// Change "start" to "continue" if there's a saved game.
SharedPreferences prefs = getSharedPreferences(PreferenceConstants.PREFERENCE_NAME, MODE_PRIVATE);
final int row = prefs.getInt(PreferenceConstants.PREFERENCE_LEVEL_ROW, 0);
final int index = prefs.getInt(PreferenceConstants.PREFERENCE_LEVEL_INDEX, 0);
if (row != 0 || index != 0) {
((ImageView)mStartButton).setImageDrawable(getResources().getDrawable(R.drawable.ui_button_continue));
mStartButton.setOnClickListener(sContinueButtonListener);
} else {
((ImageView)mStartButton).setImageDrawable(getResources().getDrawable(R.drawable.ui_button_start));
mStartButton.setOnClickListener(sStartButtonListener);
}
TouchFilter touch;
final int sdkVersion = Integer.parseInt(Build.VERSION.SDK);
if (sdkVersion < Build.VERSION_CODES.ECLAIR) {
touch = new SingleTouchFilter();
} else {
touch = new MultiTouchFilter();
}
final int lastVersion = prefs.getInt(PreferenceConstants.PREFERENCE_LAST_VERSION, 0);
if (lastVersion == 0) {
// This is the first time the game has been run.
// Pre-configure the control options to match the device.
// The resource system can tell us what this device has.
// TODO: is there a better way to do this? Seems like a kind of neat
// way to do custom device profiles.
final String navType = getString(R.string.nav_type);
mSelectedControlsString = getString(R.string.control_setup_dialog_trackball);
if (navType != null) {
if (navType.equalsIgnoreCase("DPad")) {
// Turn off the click-to-attack pref on devices that have a dpad.
SharedPreferences.Editor editor = prefs.edit();
editor.putBoolean(PreferenceConstants.PREFERENCE_CLICK_ATTACK, false);
editor.commit();
mSelectedControlsString = getString(R.string.control_setup_dialog_dpad);
} else if (navType.equalsIgnoreCase("None")) {
SharedPreferences.Editor editor = prefs.edit();
// This test relies on the PackageManager if api version >= 5.
if (touch.supportsMultitouch(this)) {
// Default to screen controls.
editor.putBoolean(PreferenceConstants.PREFERENCE_SCREEN_CONTROLS, true);
mSelectedControlsString = getString(R.string.control_setup_dialog_screen);
} else {
// Turn on tilt controls if there's nothing else.
editor.putBoolean(PreferenceConstants.PREFERENCE_TILT_CONTROLS, true);
mSelectedControlsString = getString(R.string.control_setup_dialog_tilt);
}
editor.commit();
}
}
}
if (Math.abs(lastVersion) < Math.abs(AndouKun.VERSION)) {
// This is a new install or an upgrade.
// Check the safe mode option.
// Useful reference: http://en.wikipedia.org/wiki/List_of_Android_devices
if (Build.PRODUCT.contains("morrison") || // Motorola Cliq/Dext
Build.MODEL.contains("Pulse") || // Huawei Pulse
Build.MODEL.contains("U8220") || // Huawei Pulse
Build.MODEL.contains("U8230") || // Huawei U8230
Build.MODEL.contains("MB300") || // Motorola Backflip
Build.MODEL.contains("MB501") || // Motorola Quench / Cliq XT
Build.MODEL.contains("Behold+II")) { // Samsung Behold II
// These are all models that users have complained about. They likely use
// the same buggy QTC graphics driver. Turn on Safe Mode by default
// for these devices.
SharedPreferences.Editor editor = prefs.edit();
editor.putBoolean(PreferenceConstants.PREFERENCE_SAFE_MODE, true);
editor.commit();
}
SharedPreferences.Editor editor = prefs.edit();
if (lastVersion > 0 && lastVersion < 14) {
// if the user has beat the game once, go ahead and unlock stuff for them.
if (prefs.getInt(PreferenceConstants.PREFERENCE_LAST_ENDING, -1) != -1) {
editor.putBoolean(PreferenceConstants.PREFERENCE_EXTRAS_UNLOCKED, true);
}
}
// show what's new message
editor.putInt(PreferenceConstants.PREFERENCE_LAST_VERSION, AndouKun.VERSION);
editor.commit();
showDialog(WHATS_NEW_DIALOG);
// screen controls were added in version 14
if (lastVersion > 0 && lastVersion < 14 &&
prefs.getBoolean(PreferenceConstants.PREFERENCE_TILT_CONTROLS, false)) {
if (touch.supportsMultitouch(this)) {
// show message about switching from tilt to screen controls
showDialog(TILT_TO_SCREEN_CONTROLS_DIALOG);
}
} else if (lastVersion == 0) {
// show message about auto-selected control schemes.
showDialog(CONTROL_SETUP_DIALOG);
}
}
}
if (mBackground != null) {
mBackground.clearAnimation();
}
if (mTicker != null) {
mTicker.clearAnimation();
mTicker.setAnimation(mFadeInAnimation);
}
if (mJustCreated) {
if (mStartButton != null) {
mStartButton.startAnimation(AnimationUtils.loadAnimation(this, R.anim.button_slide));
}
if (mExtrasButton != null) {
Animation anim = AnimationUtils.loadAnimation(this, R.anim.button_slide);
anim.setStartOffset(500L);
mExtrasButton.startAnimation(anim);
}
if (mOptionsButton != null) {
Animation anim = AnimationUtils.loadAnimation(this, R.anim.button_slide);
anim.setStartOffset(1000L);
mOptionsButton.startAnimation(anim);
}
mJustCreated = false;
} else {
mStartButton.clearAnimation();
mOptionsButton.clearAnimation();
mExtrasButton.clearAnimation();
}
}
@Override
protected Dialog onCreateDialog(int id) {
Dialog dialog;
if (id == WHATS_NEW_DIALOG) {
dialog = new AlertDialog.Builder(this)
.setTitle(R.string.whats_new_dialog_title)
.setPositiveButton(R.string.whats_new_dialog_ok, null)
.setMessage(R.string.whats_new_dialog_message)
.create();
} else if (id == TILT_TO_SCREEN_CONTROLS_DIALOG) {
dialog = new AlertDialog.Builder(this)
.setTitle(R.string.onscreen_tilt_dialog_title)
.setPositiveButton(R.string.onscreen_tilt_dialog_ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
SharedPreferences prefs = getSharedPreferences(PreferenceConstants.PREFERENCE_NAME, MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
editor.putBoolean(PreferenceConstants.PREFERENCE_SCREEN_CONTROLS, true);
editor.commit();
}
})
.setNegativeButton(R.string.onscreen_tilt_dialog_cancel, null)
.setMessage(R.string.onscreen_tilt_dialog_message)
.create();
} else if (id == CONTROL_SETUP_DIALOG) {
String messageFormat = getResources().getString(R.string.control_setup_dialog_message);
String message = String.format(messageFormat, mSelectedControlsString);
CharSequence sytledMessage = Html.fromHtml(message); // lame.
dialog = new AlertDialog.Builder(this)
.setTitle(R.string.control_setup_dialog_title)
.setPositiveButton(R.string.control_setup_dialog_ok, null)
.setNegativeButton(R.string.control_setup_dialog_change, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
Intent i = new Intent(getBaseContext(), SetPreferencesActivity.class);
i.putExtra("controlConfig", true);
startActivity(i);
}
})
.setMessage(sytledMessage)
.create();
} else {
dialog = super.onCreateDialog(id);
}
return dialog;
}
protected class StartActivityAfterAnimation implements Animation.AnimationListener {
private Intent mIntent;
StartActivityAfterAnimation(Intent intent) {
mIntent = intent;
}
public void onAnimationEnd(Animation animation) {
startActivity(mIntent);
if (UIConstants.mOverridePendingTransition != null) {
try {
UIConstants.mOverridePendingTransition.invoke(MainMenuActivity.this, R.anim.activity_fade_in, R.anim.activity_fade_out);
} catch (InvocationTargetException ite) {
DebugLog.d("Activity Transition", "Invocation Target Exception");
} catch (IllegalAccessException ie) {
DebugLog.d("Activity Transition", "Illegal Access Exception");
}
}
}
public void onAnimationRepeat(Animation animation) {
// TODO Auto-generated method stub
}
public void onAnimationStart(Animation animation) {
// TODO Auto-generated method stub
}
}
}
| |
/*
* Copyright 2015-2021 OpenEstate.org.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openestate.io.examples;
import com.thedeanda.lorem.Lorem;
import com.thedeanda.lorem.LoremIpsum;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.io.output.NullWriter;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.openestate.io.is24_xml.Is24XmlDocument;
import org.openestate.io.is24_xml.Is24XmlUtils;
import org.openestate.io.is24_xml.xml.AktionsTyp;
import org.openestate.io.is24_xml.xml.AusstattungsqualitaetsTyp;
import org.openestate.io.is24_xml.xml.BauphaseTyp;
import org.openestate.io.is24_xml.xml.EnergieausweistypTyp;
import org.openestate.io.is24_xml.xml.GenehmigungTyp;
import org.openestate.io.is24_xml.xml.HausKategorienTyp;
import org.openestate.io.is24_xml.xml.HausKauf;
import org.openestate.io.is24_xml.xml.HausMiete;
import org.openestate.io.is24_xml.xml.HeizungsartTyp;
import org.openestate.io.is24_xml.xml.ISOLaenderCodeTyp;
import org.openestate.io.is24_xml.xml.ImmobilieBaseTyp;
import org.openestate.io.is24_xml.xml.ImmobilienTransferTyp;
import org.openestate.io.is24_xml.xml.ImmobilienTransferTyp.Anbieter;
import org.openestate.io.is24_xml.xml.MMAnhangArtenTyp;
import org.openestate.io.is24_xml.xml.MultimediaAnhangTyp;
import org.openestate.io.is24_xml.xml.ObjectFactory;
import org.openestate.io.is24_xml.xml.ObjektZustandTyp;
import org.openestate.io.is24_xml.xml.StatusTyp;
import org.openestate.io.is24_xml.xml.StellplatzKategorieTyp;
import org.openestate.io.is24_xml.xml.WaehrungTyp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Example for writing IS24-XML files.
* <p>
* This example illustrates the programmatic creation of IS24-XML documents and how they are written into XML.
*
* @author Andreas Rudolph
* @since 1.0
*/
public class Is24XmlWritingExample {
@SuppressWarnings("unused")
private final static Logger LOGGER = LoggerFactory.getLogger(Is24XmlWritingExample.class);
private final static ObjectFactory FACTORY = Is24XmlUtils.getFactory();
private final static Lorem RANDOMIZER = new LoremIpsum();
private final static boolean PRETTY_PRINT = true;
/**
* Start the example application.
*
* @param args command line arguments
*/
@SuppressWarnings("Duplicates")
public static void main(String[] args) {
// create a ImmobilienTransferTyp object with some example data
// this object corresponds to the <IS24ImmobilienTransfer> root element in XML
ImmobilienTransferTyp transfer = FACTORY.createImmobilienTransferTyp();
transfer.setEmailBeiFehler(RANDOMIZER.getEmail());
transfer.setErstellerSoftware(RANDOMIZER.getName());
transfer.setErstellerSoftwareVersion(RandomStringUtils.randomNumeric(2));
transfer.setAnbieter(createAnbieter());
// convert the ImmobilienTransferTyp object into an XML document
Is24XmlDocument doc = null;
try {
doc = Is24XmlDocument.newDocument(transfer);
} catch (Exception ex) {
LOGGER.error("Can't create XML document!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
// write XML document into a java.io.File
try {
write(doc, File.createTempFile("output-", ".xml"));
} catch (IOException ex) {
LOGGER.error("Can't create temporary file!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
// write XML document into a java.io.OutputStream
write(doc, NullOutputStream.NULL_OUTPUT_STREAM);
// write XML document into a java.io.Writer
write(doc, new NullWriter());
// write XML document into a string and send it to the console
writeToConsole(doc);
}
/**
* Create an {@link Anbieter} with some example data.
*
* @return created example object
*/
private static Anbieter createAnbieter() {
// create an example agency
Anbieter anbieter = FACTORY.createImmobilienTransferTypAnbieter();
anbieter.setScoutKundenID(RandomStringUtils.randomAlphanumeric(2, 5));
// add some real estates to the agency
int hausKaufCount = RandomUtils.nextInt(1, 5);
for (int i = 0; i < hausKaufCount; i++) {
anbieter.getImmobilie().add(createImmobilieHausKauf());
}
int hausMieteCount = RandomUtils.nextInt(1, 5);
for (int i = 0; i < hausMieteCount; i++) {
anbieter.getImmobilie().add(createImmobilieHausMiete());
}
return anbieter;
}
/**
* Create an {@link HausKauf} with some example data.
*
* @return created example object
*/
@SuppressWarnings("Duplicates")
private static HausKauf createImmobilieHausKauf() {
// create an example real estate
HausKauf.Type obj = FACTORY.createHausKaufType();
initImmobilie(obj);
obj.setAlsFerienwohnungGeeignet(RandomUtils.nextBoolean());
obj.setAnzahlBadezimmer(RandomUtils.nextLong(1, 5));
obj.setAnzahlGaragenStellplaetze(RandomUtils.nextLong(0, 3));
obj.setAnzahlSchlafzimmer(RandomUtils.nextLong(1, 5));
obj.setAusstattungsqualitaet(randomValue(AusstattungsqualitaetsTyp.values()));
obj.setBarrierefrei(RandomUtils.nextBoolean());
obj.setBaujahr(RandomUtils.nextLong(1900, 2010));
obj.setBauphase(randomValue(BauphaseTyp.values()));
obj.setDenkmalschutzobjekt(RandomUtils.nextBoolean());
obj.setEtagenzahl(RandomUtils.nextLong(1, 10));
obj.setFreiAb(RANDOMIZER.getWords(3, 10));
obj.setGaesteWC(RandomUtils.nextBoolean());
obj.setGrundstuecksFlaeche(BigDecimal.valueOf(RandomUtils.nextDouble(100, 1500)));
obj.setHausKategorie(randomValue(HausKategorienTyp.values()));
obj.setHeizungsart(randomValue(HeizungsartTyp.values()));
obj.setJahrLetzteModernisierung(RandomUtils.nextLong(1980, 2000));
obj.setKeller(RandomUtils.nextBoolean());
obj.setMitEinliegerwohnung(RandomUtils.nextBoolean());
obj.setNutzflaeche(BigDecimal.valueOf(RandomUtils.nextDouble(100, 1000)));
obj.setObjektzustand(randomValue(ObjektZustandTyp.values()));
obj.setParkplatz(randomValue(StellplatzKategorieTyp.values()));
obj.setRollstuhlgerecht(RandomUtils.nextBoolean());
obj.setVermietet(RandomUtils.nextBoolean());
obj.setWohnflaeche(BigDecimal.valueOf(RandomUtils.nextDouble(50, 500)));
obj.setZimmer(BigDecimal.valueOf(RandomUtils.nextDouble(1, 10)));
obj.setBefeuerungsArt(FACTORY.createBefeuerungsArtTyp());
obj.getBefeuerungsArt().setOel(
FACTORY.createBefeuerungsArtTypOel(RandomUtils.nextBoolean()));
obj.getBefeuerungsArt().setGas(
FACTORY.createBefeuerungsArtTypGas(RandomUtils.nextBoolean()));
obj.setEnergieausweis(FACTORY.createEnergieausweisTyp());
obj.getEnergieausweis().setEnergieausweistyp(randomValue(EnergieausweistypTyp.values()));
obj.getEnergieausweis().setEnergieverbrauchskennwert(BigDecimal.valueOf(RandomUtils.nextDouble(50, 500)));
obj.getEnergieausweis().setWarmwasserEnthalten(RandomUtils.nextBoolean());
obj.setKaufpreise(FACTORY.createVermarktungWohnKaufTyp());
obj.getKaufpreise().setKaufpreis(BigDecimal.valueOf(RandomUtils.nextDouble(100000, 9999999)));
obj.getKaufpreise().setMieteinnahmenProMonat(BigDecimal.valueOf(RandomUtils.nextDouble(5000, 50000)));
obj.getKaufpreise().setStellplatzKaufpreis(BigDecimal.valueOf(RandomUtils.nextDouble(1000, 10000)));
obj.getKaufpreise().setWohngeld(BigDecimal.valueOf(RandomUtils.nextDouble(500, 5000)));
return FACTORY.createHausKauf(obj);
}
/**
* Create an {@link HausMiete} with some example data.
*
* @return created example object
*/
@SuppressWarnings("Duplicates")
private static HausMiete createImmobilieHausMiete() {
// create an example real estate
HausMiete.Type obj = FACTORY.createHausMieteType();
initImmobilie(obj);
obj.setAnzahlBadezimmer(RandomUtils.nextLong(1, 5));
obj.setAnzahlGaragenStellplaetze(RandomUtils.nextLong(0, 3));
obj.setAnzahlSchlafzimmer(RandomUtils.nextLong(1, 5));
obj.setAusstattungsqualitaet(randomValue(AusstattungsqualitaetsTyp.values()));
obj.setBarrierefrei(RandomUtils.nextBoolean());
obj.setBaujahr(RandomUtils.nextLong(1900, 2010));
obj.setBetreutesWohnen(RandomUtils.nextBoolean());
obj.setEinbaukueche(RandomUtils.nextBoolean());
obj.setEtagenzahl(RandomUtils.nextLong(1, 10));
obj.setFreiAb(RANDOMIZER.getWords(3, 10));
obj.setGaesteWC(RandomUtils.nextBoolean());
obj.setGrundstuecksFlaeche(BigDecimal.valueOf(RandomUtils.nextDouble(100, 1500)));
obj.setHausKategorie(randomValue(HausKategorienTyp.values()));
obj.setHaustiere(randomValue(GenehmigungTyp.values()));
obj.setHeizungsart(randomValue(HeizungsartTyp.values()));
obj.setJahrLetzteModernisierung(RandomUtils.nextLong(1980, 2000));
obj.setKeller(RandomUtils.nextBoolean());
obj.setNutzflaeche(BigDecimal.valueOf(RandomUtils.nextDouble(150, 500)));
obj.setObjektzustand(randomValue(ObjektZustandTyp.values()));
obj.setParkplatz(randomValue(StellplatzKategorieTyp.values()));
obj.setRollstuhlgerecht(RandomUtils.nextBoolean());
obj.setWohnflaeche(BigDecimal.valueOf(RandomUtils.nextDouble(50, 300)));
obj.setZimmer(BigDecimal.valueOf(RandomUtils.nextDouble(1, 5)));
obj.setBefeuerungsArt(FACTORY.createBefeuerungsArtTyp());
obj.getBefeuerungsArt().setErdwaerme(
FACTORY.createBefeuerungsArtTypErdwaerme(RandomUtils.nextBoolean()));
obj.getBefeuerungsArt().setPelletheizung(
FACTORY.createBefeuerungsArtTypPelletheizung(RandomUtils.nextBoolean()));
obj.setEnergieausweis(FACTORY.createEnergieausweisTyp());
obj.getEnergieausweis().setEnergieausweistyp(randomValue(EnergieausweistypTyp.values()));
obj.getEnergieausweis().setEnergieverbrauchskennwert(BigDecimal.valueOf(RandomUtils.nextDouble(50, 500)));
obj.getEnergieausweis().setWarmwasserEnthalten(RandomUtils.nextBoolean());
obj.setMietpreise(FACTORY.createVermarktungWohnMieteTyp());
obj.getMietpreise().setHeizkosten(BigDecimal.valueOf(RandomUtils.nextDouble(100, 500)));
obj.getMietpreise().setHeizkostenInWarmmieteEnthalten(RandomUtils.nextBoolean());
obj.getMietpreise().setKaltmiete(BigDecimal.valueOf(RandomUtils.nextDouble(150, 1500)));
obj.getMietpreise().setKaution(RANDOMIZER.getWords(3, 10));
obj.getMietpreise().setNebenkosten(BigDecimal.valueOf(RandomUtils.nextDouble(50, 500)));
obj.getMietpreise().setStellplatzMiete(BigDecimal.valueOf(RandomUtils.nextDouble(50, 500)));
obj.getMietpreise().setWarmmiete(BigDecimal.valueOf(RandomUtils.nextDouble(250, 2500)));
return FACTORY.createHausMiete(obj);
}
/**
* Init common values of a property.
*
* @param immobilie property object
*/
private static void initImmobilie(ImmobilieBaseTyp immobilie) {
immobilie.setAdressdruck(RandomUtils.nextBoolean());
immobilie.setAktiveGruppen(RANDOMIZER.getWords(1, 5));
immobilie.setAnbieterObjektID(RandomStringUtils.randomNumeric(2, 5));
immobilie.setAusstattung(RANDOMIZER.getWords(5, 50));
immobilie.setGruppierungsID(RandomUtils.nextLong(1, 9999));
immobilie.setImportmodus(AktionsTyp.AKTUALISIEREN);
immobilie.setLage(RANDOMIZER.getWords(5, 50));
immobilie.setObjektbeschreibung(RANDOMIZER.getWords(5, 50));
immobilie.setProvision(RANDOMIZER.getWords(1, 10));
immobilie.setProvisionshinweis(RANDOMIZER.getWords(5, 50));
immobilie.setProvisionspflichtig(RandomUtils.nextBoolean());
immobilie.setScoutObjektID(BigInteger.valueOf(RandomUtils.nextInt(1, 1000)));
immobilie.setSonstigeAngaben(RANDOMIZER.getWords(5, 50));
immobilie.setStatusHP(randomValue(StatusTyp.values()));
immobilie.setStatusIS24(randomValue(StatusTyp.values()));
immobilie.setStatusVBM(randomValue(StatusTyp.values()));
immobilie.setUeberschrift(RANDOMIZER.getWords(1, 5));
immobilie.setWaehrung(randomValue(WaehrungTyp.values()));
immobilie.setAdresse(FACTORY.createImmobilienAdresseTyp());
immobilie.getAdresse().setHausnummer(RandomStringUtils.randomNumeric(1, 4));
immobilie.getAdresse().setInternationaleRegion(RANDOMIZER.getStateFull());
immobilie.getAdresse().setLaenderkennzeichen(randomValue(ISOLaenderCodeTyp.values()));
immobilie.getAdresse().setOrt(RANDOMIZER.getCity());
immobilie.getAdresse().setPostleitzahl(RANDOMIZER.getZipCode());
immobilie.getAdresse().setStrasse(RANDOMIZER.getWords(1, 5));
immobilie.setApiSuchfelder(FACTORY.createImmobilieBaseTypApiSuchfelder(FACTORY.createApiSuchfelderTyp()));
immobilie.getApiSuchfelder().getValue().setApiSuchfeld1(FACTORY.createApiSuchfelderTypApiSuchfeld1("value1"));
immobilie.getApiSuchfelder().getValue().setApiSuchfeld2(FACTORY.createApiSuchfelderTypApiSuchfeld2("value2"));
immobilie.getApiSuchfelder().getValue().setApiSuchfeld3(FACTORY.createApiSuchfelderTypApiSuchfeld3("value3"));
immobilie.setKontaktperson(FACTORY.createKontaktAdresseTyp());
immobilie.getKontaktperson().setAnrede(RANDOMIZER.getWords(1));
immobilie.getKontaktperson().setEMail(RANDOMIZER.getEmail());
immobilie.getKontaktperson().setHausnummer(RandomStringUtils.randomNumeric(1, 4));
immobilie.getKontaktperson().setLaenderkennzeichen(randomValue(ISOLaenderCodeTyp.values()));
immobilie.getKontaktperson().setMobiltelefon(RANDOMIZER.getPhone());
immobilie.getKontaktperson().setNachname(RANDOMIZER.getLastName());
immobilie.getKontaktperson().setOrt(RANDOMIZER.getCity());
immobilie.getKontaktperson().setPostleitzahl(RANDOMIZER.getZipCode());
immobilie.getKontaktperson().setStrasse(RANDOMIZER.getWords(1, 5));
immobilie.getKontaktperson().setTelefax(RANDOMIZER.getPhone());
immobilie.getKontaktperson().setTelefon(RANDOMIZER.getPhone());
immobilie.getKontaktperson().setVorname(RANDOMIZER.getFirstName());
//noinspection CatchMayIgnoreException
try {
immobilie.getKontaktperson().setHomepage(new URI("https://www.example.com"));
} catch (URISyntaxException ex) {
}
immobilie.setManuelleGeoCodierung(FACTORY.createManuellGeoCodingTyp());
immobilie.getManuelleGeoCodierung().setTermsRegion(RANDOMIZER.getStateFull());
immobilie.getManuelleGeoCodierung().setTermsStadt(RANDOMIZER.getCity());
immobilie.getManuelleGeoCodierung().setTermsStadtTeil(RANDOMIZER.getWords(1, 3));
int attachmentCount = RandomUtils.nextInt(3, 10);
for (int i = 0; i < attachmentCount; i++) {
MultimediaAnhangTyp attachment = FACTORY.createMultimediaAnhangTyp();
attachment.setAnhangArt(randomValue(MMAnhangArtenTyp.values()));
attachment.setDateiname("attachment-" + i + ".jpg");
attachment.setDateityp("jpg");
attachment.setTitel(RANDOMIZER.getWords(2, 10));
immobilie.getMultimediaAnhang().add(attachment);
}
}
/**
* Get a random value from an array.
*
* @param values array containing values to select from
* @param <T> type of contained values
* @return randomly selected value
*/
private static <T> T randomValue(T[] values) {
return (values != null && values.length > 0) ?
values[RandomUtils.nextInt(0, values.length)] :
null;
}
/**
* Write an {@link Is24XmlDocument} into a {@link File}.
*
* @param doc the document to write
* @param file the file, where the document is written to
*/
@SuppressWarnings("Duplicates")
private static void write(Is24XmlDocument doc, File file) {
LOGGER.info("writing document");
try {
doc.toXml(file, PRETTY_PRINT);
LOGGER.info("> written to: " + file.getAbsolutePath());
} catch (Exception ex) {
LOGGER.error("Can't write document into a file!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
/**
* Write an {@link Is24XmlDocument} into an {@link OutputStream}.
*
* @param doc the document to write
* @param output the stream, where the document is written to
*/
@SuppressWarnings({"Duplicates", "SameParameterValue"})
private static void write(Is24XmlDocument doc, OutputStream output) {
LOGGER.info("writing document");
try {
doc.toXml(output, PRETTY_PRINT);
LOGGER.info("> written to a java.io.OutputStream");
} catch (Exception ex) {
LOGGER.error("Can't write document into an OutputStream!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
/**
* Write an {@link Is24XmlDocument} into a {@link Writer}.
*
* @param doc the document to write
* @param output the writer, where the document is written to
*/
@SuppressWarnings("Duplicates")
private static void write(Is24XmlDocument doc, Writer output) {
LOGGER.info("writing document");
try {
doc.toXml(output, PRETTY_PRINT);
LOGGER.info("> written to a java.io.Writer");
} catch (Exception ex) {
LOGGER.error("Can't write document into a Writer!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
/**
* Write an {@link Is24XmlDocument} into a {@link String} and print the
* results to the console.
*
* @param doc the document to write
*/
@SuppressWarnings("Duplicates")
private static void writeToConsole(Is24XmlDocument doc) {
LOGGER.info("writing document");
try {
String xml = doc.toXmlString(PRETTY_PRINT);
LOGGER.info(StringUtils.repeat("-", 50)
+ System.lineSeparator() + xml);
} catch (Exception ex) {
LOGGER.error("Can't write document into a string!");
LOGGER.error("> " + ex.getLocalizedMessage(), ex);
System.exit(1);
}
}
}
| |
/*
* Copyright 2012 NGDATA nv
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lilyproject.indexer.batchbuild.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import com.google.common.base.Charsets;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.ByteStreams;
import com.ngdata.hbaseindexer.SolrConnectionParams;
import com.ngdata.hbaseindexer.model.api.IndexerDefinition;
import com.ngdata.hbaseindexer.model.api.IndexerDefinitionBuilder;
import com.ngdata.hbaseindexer.model.api.WriteableIndexerModel;
import org.apache.commons.io.IOUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.lilyproject.client.LilyClient;
import org.lilyproject.indexer.derefmap.DependantRecordIdsIterator;
import org.lilyproject.indexer.derefmap.DerefMap;
import org.lilyproject.indexer.derefmap.DerefMapHbaseImpl;
import org.lilyproject.indexer.hbase.mapper.LilyIndexerComponentFactory;
import org.lilyproject.indexer.model.api.LResultToSolrMapper;
import org.lilyproject.lilyservertestfw.LilyProxy;
import org.lilyproject.lilyservertestfw.LilyServerProxy;
import org.lilyproject.repository.api.AbsoluteRecordId;
import org.lilyproject.repository.api.FieldType;
import org.lilyproject.repository.api.LRepository;
import org.lilyproject.repository.api.LTable;
import org.lilyproject.repository.api.Link;
import org.lilyproject.repository.api.QName;
import org.lilyproject.repository.api.Record;
import org.lilyproject.repository.api.RecordId;
import org.lilyproject.repository.api.RecordType;
import org.lilyproject.repository.api.SchemaId;
import org.lilyproject.repository.api.Scope;
import org.lilyproject.repository.api.TypeManager;
import org.lilyproject.solrtestfw.SolrProxy;
import org.lilyproject.util.hbase.LilyHBaseSchema.Table;
import org.lilyproject.util.io.Closer;
import org.lilyproject.util.repo.VersionTag;
public class BatchBuildTest {
private static LilyProxy lilyProxy;
private static LilyClient lilyClient;
private static LRepository repository;
private static LTable table;
private static TypeManager typeManager;
private static SolrServer solrServer;
private static SolrProxy solrProxy;
private static LilyServerProxy lilyServerProxy;
private static WriteableIndexerModel model;
private static int BUILD_TIMEOUT = 240000;
private FieldType ft1;
private FieldType ft2;
private RecordType rt1;
private final static String REPO_NAME = "batchtestrepo";
private final static String INDEX_NAME = "batchtest";
private static final String COUNTER_NUM_FAILED_RECORDS =
"org.lilyproject.indexer.batchbuild.IndexBatchBuildCounters:NUM_FAILED_RECORDS";
@BeforeClass
public static void setUpBeforeClass() throws Exception {
lilyProxy = new LilyProxy(null, null, null, true);
InputStream is = BatchBuildTest.class.getResourceAsStream("solrschema.xml");
byte[] solrSchema = IOUtils.toByteArray(is);
IOUtils.closeQuietly(is);
lilyProxy.start(solrSchema);
solrProxy = lilyProxy.getSolrProxy();
solrServer = solrProxy.getSolrServer();
lilyServerProxy = lilyProxy.getLilyServerProxy();
lilyServerProxy.createRepository(REPO_NAME);
lilyClient = lilyServerProxy.getClient();
repository = lilyClient.getRepository(REPO_NAME);
table = repository.getDefaultTable();
typeManager = repository.getTypeManager();
FieldType ft1 = typeManager.createFieldType("STRING", new QName("batchindex-test", "field1"),
Scope.NON_VERSIONED);
FieldType ft2 =
typeManager.createFieldType("LINK", new QName("batchindex-test", "linkField"), Scope.NON_VERSIONED);
typeManager.recordTypeBuilder()
.defaultNamespace("batchindex-test")
.name("rt1")
.fieldEntry().use(ft1).add()
.fieldEntry().use(ft2).add()
.create();
model = lilyServerProxy.getIndexerModel();
is = BatchBuildTest.class.getResourceAsStream("indexerconf.xml");
byte[] indexerConfiguration = ByteStreams.toByteArray(is);
Map<String, String> connectionParams = Maps.newHashMap();
connectionParams.put(SolrConnectionParams.ZOOKEEPER, "localhost:2181/solr");
connectionParams.put(SolrConnectionParams.COLLECTION, "core0");
connectionParams.put(LResultToSolrMapper.REPO_KEY, REPO_NAME);
connectionParams.put(LResultToSolrMapper.ZOOKEEPER_KEY, "localhost:2181");
IndexerDefinition index = new IndexerDefinitionBuilder()
.name(INDEX_NAME)
.connectionType("solr")
.connectionParams(connectionParams)
/*
Map<String, String> solrShards = new HashMap<String, String>();
solrShards.put("shard1", "http://localhost:8983/solr/core0");
index.setRepositoryName(REPO_NAME);
*/
.indexerComponentFactory(LilyIndexerComponentFactory.class.getName())
.configuration(indexerConfiguration)
.incrementalIndexingState(IndexerDefinition.IncrementalIndexingState.DO_NOT_SUBSCRIBE)
.build();
model.addIndexer(index);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
Closer.close(lilyClient);
Closer.close(solrServer);
Closer.close(solrProxy);
Closer.close(lilyServerProxy);
lilyProxy.stop();
}
@Before
public void setup() throws Exception {
this.ft1 = typeManager.getFieldTypeByName(new QName("batchindex-test", "field1"));
this.ft2 = typeManager.getFieldTypeByName(new QName("batchindex-test", "linkField"));
this.rt1 = typeManager.getRecordTypeByName(new QName("batchindex-test", "rt1"), null);
}
@Test
public void testBatchIndex() throws Exception {
String assertId = "batch-index-test";
//
// First create some content
//
table.recordBuilder()
.id(assertId)
.recordType(rt1.getName())
.field(ft1.getName(), "test1")
.create();
this.buildAndCommit();
QueryResponse response = solrServer.query(new SolrQuery("field1:test1*"));
assertEquals(1, response.getResults().size());
assertEquals("USER." + assertId, response.getResults().get(0).getFieldValue("lily.id"));
}
@Test
public void testClearIndex() throws Exception {
doTestClearIndex("clearIndex", true);
}
@Test
public void testNoClearIndex() throws Exception {
doTestClearIndex("dontClearIndex", false);
}
public void doTestClearIndex(String assertId, boolean clear) throws Exception {
String[] defaultConf = getBatchCliArgs(String.format("batchIndexCliArgs-testClearIndex-%s.txt", clear));
setBatchIndexConf(defaultConf, null, false);
SolrInputDocument extraDoc = new SolrInputDocument();
extraDoc.addField("field1", assertId + "extra");
extraDoc.addField("lily.id", "doesnotmatter");
extraDoc.addField("lily.key", "doesnotmatter2");
extraDoc.addField("lily.table", "record");
solrServer.add(extraDoc);
solrServer.commit();
//
// First create some content
//
table.recordBuilder()
.id(assertId)
.recordType(rt1.getName())
.field(ft1.getName(), assertId)
.create();
this.buildAndCommit();
QueryResponse response = solrServer.query(new SolrQuery("field1:" + assertId + "*"));
if (clear) {
assertEquals(1, response.getResults().size());
assertEquals("USER." + assertId, response.getResults().get(0).getFieldValue("lily.id"));
} else {
assertEquals(2, response.getResults().size());
}
}
private String[] getBatchCliArgs(String name) throws IOException {
String argString = new String(getResourceAsByteArray(name), Charsets.UTF_8);
return Iterables.toArray(Splitter.on(" ").trimResults().omitEmptyStrings().split(argString), String.class);
}
/**
* Test if the default batch index conf setting works
*/
@Test
public void testDefaultBatchIndexConf() throws Exception {
String[] defaultConf = getBatchCliArgs("defaultBatchIndexCliArgs-test2.txt");
setBatchIndexConf(defaultConf, null, false);
String assertId = "batch-index-test2";
//
// First create some content
//
table.recordBuilder()
.id(assertId)
.recordType(rt1.getName())
.field(ft1.getName(), "test2 index")
.create();
table.recordBuilder()
.id("batch-noindex-test2")
.recordType(rt1.getName())
.field(ft1.getName(), "test2 noindex")
.create();
// Now start the batch index
this.buildAndCommit();
// Check if 1 record and not 2 are in the index
QueryResponse response = solrServer.query(new SolrQuery("field1:test2*"));
assertEquals(1, response.getResults().size());
assertEquals("USER." + assertId, response.getResults().get(0).getFieldValue("lily.id"));
// check that the last used batch index conf = default
IndexerDefinition index = model.getIndexer(INDEX_NAME);
assertTrue(Lists.newArrayList(index.getLastBatchBuildInfo().getBatchIndexCliArguments())
.containsAll(Lists.newArrayList(defaultConf)));
}
/**
* Test setting a custom batch index conf.
*/
@Test
public void testCustomBatchIndexConf() throws Exception {
String[] defaultConf = getBatchCliArgs("defaultBatchIndexCliArgs-test2.txt");
setBatchIndexConf(defaultConf, null, false);
String assertId1 = "batch-index-custom-test3";
String assertId2 = "batch-index-test3";
//
// First create some content
//
Record recordToChange1 = table.recordBuilder()
.id(assertId2)
.recordType(rt1.getName())
.field(ft1.getName(), "test3 index run1")
.create();
Record recordToChange2 = table.recordBuilder()
.id(assertId1)
.recordType(rt1.getName())
.field(ft1.getName(), "test3 index run1")
.create();
table.recordBuilder()
.id("batch-noindex-test3")
.recordType(rt1.getName())
.field(ft1.getName(), "test3 noindex run1")
.create();
// Index everything with the default conf
this.buildAndCommit();
SolrDocumentList results = solrServer.query(new SolrQuery("field1:test3*").
addSortField("lily.id", ORDER.asc)).getResults();
assertEquals(2, results.size());
assertEquals("USER." + assertId1, results.get(0).getFieldValue("lily.id"));
assertEquals("USER." + assertId2, results.get(1).getFieldValue("lily.id"));
// change some fields and reindex using a specific configuration. Only one of the 2 changes should be picked up
recordToChange1.setField(ft1.getName(), "test3 index run2");
recordToChange2.setField(ft1.getName(), "test3 index run2");
table.update(recordToChange1);
table.update(recordToChange2);
String[] batchConf = getBatchCliArgs("batchIndexCliArgs-test3.txt");
setBatchIndexConf(defaultConf, batchConf, true);
waitForIndexAndCommit(BUILD_TIMEOUT);
// Check if 1 record and not 2 are in the index
QueryResponse response = solrServer.query(new SolrQuery("field1:test3\\ index\\ run2"));
assertEquals(1, response.getResults().size());
assertEquals("USER." + assertId1, response.getResults().get(0).getFieldValue("lily.id"));
// check that the last used batch index conf = default
assertTrue(Lists.newArrayList(model.getIndexer(INDEX_NAME).getLastBatchBuildInfo().getBatchIndexCliArguments())
.containsAll(Lists.newArrayList(batchConf)));
// Set things up for run 3 where the default configuration should be used again
recordToChange1.setField(ft1.getName(), "test3 index run3");
recordToChange2.setField(ft1.getName(), "test3 index run3");
table.update(recordToChange1);
table.update(recordToChange2);
// Now rebuild the index and see if the default indexer has kicked in
this.buildAndCommit();
response = solrServer.query(new SolrQuery("field1:test3\\ index\\ run3").
addSortField("lily.id", ORDER.asc));
assertEquals(2, response.getResults().size());
assertEquals("USER." + assertId1, response.getResults().get(0).getFieldValue("lily.id"));
assertEquals("USER." + assertId2, response.getResults().get(1).getFieldValue("lily.id"));
// check that the last used batch index conf = default
assertTrue(Lists.newArrayList(model.getIndexer(INDEX_NAME).getLastBatchBuildInfo().getBatchIndexCliArguments())
.containsAll(Lists.newArrayList(defaultConf)));
}
/**
* This test should cause a failure when adding a custom batchindex conf without setting a buildrequest
*/
@Test(expected = com.ngdata.hbaseindexer.model.api.IndexerValidityException.class)
public void testCustomBatchIndexConf_NoBuild() throws Exception {
setBatchIndexConf(getBatchCliArgs("defaultBatchIndexCliArgs-test2.txt"),
getBatchCliArgs("batchIndexCliArgs-test3.txt"), false);
//waitForIndexAndCommit(BUILD_TIMEOUT);
// remove when we can do this with hbase-indexer
buildAndCommit();
}
private byte[] getResourceAsByteArray(String name) throws IOException {
InputStream is = null;
try {
is = BatchBuildTest.class.getResourceAsStream(name);
return IOUtils.toByteArray(is);
} finally {
IOUtils.closeQuietly(is);
}
}
@Test
@Ignore
public void testClearDerefMap() throws Exception {
DerefMap derefMap = DerefMapHbaseImpl
.create(REPO_NAME, INDEX_NAME, lilyProxy.getHBaseProxy().getConf(), null, repository.getIdGenerator());
Record linkedRecord = table.recordBuilder()
.id("deref-test-linkedrecord")
.recordType(rt1.getName())
.field(ft1.getName(), "deref test linkedrecord")
.create();
Record record = table.recordBuilder()
.id("deref-test-main")
.recordType(rt1.getName())
.field(ft1.getName(), "deref test main")
.field(ft2.getName(), new Link(linkedRecord.getId()))
.create();
SchemaId vtag = typeManager.getFieldTypeByName(VersionTag.LAST).getId();
DependantRecordIdsIterator it = null;
try {
it = derefMap.findDependantsOf(absId(linkedRecord.getId()),
ft1.getId(), vtag);
assertTrue(!it.hasNext());
} finally {
it.close();
}
setBatchIndexConf(getBatchCliArgs("batchIndexCliArgs-testClearDerefmap-false.txt"), null, false);
buildAndCommit();
QueryResponse response = solrServer.query(new SolrQuery("field1:deref\\ test\\ main"));
assertEquals(1, response.getResults().size());
try {
it = derefMap.findDependantsOf(absId(linkedRecord.getId()), ft1.getId(), vtag);
assertTrue(it.hasNext());
} finally {
it.close();
}
setBatchIndexConf(null, getBatchCliArgs("batchIndexCliArgs-testClearDerefmap-true.txt"), true);
//waitForIndexAndCommit(BUILD_TIMEOUT);
// remove when we can do this with hbase-indexer
buildAndCommit();
try {
it = derefMap.findDependantsOf(absId(linkedRecord.getId()), ft1.getId(), vtag);
assertTrue(!it.hasNext());
} finally {
it.close();
}
}
private void buildAndCommit() throws Exception {
lilyServerProxy.batchBuildIndex(INDEX_NAME, BUILD_TIMEOUT);
solrServer.commit();
}
private void waitForIndexAndCommit(long timeout) throws Exception {
boolean indexSuccess = false;
try {
// Now wait until its finished
long tryUntil = System.currentTimeMillis() + timeout;
while (System.currentTimeMillis() < tryUntil) {
Thread.sleep(100);
IndexerDefinition definition = model.getIndexer(INDEX_NAME);
if (definition.getBatchIndexingState() == IndexerDefinition.BatchIndexingState.INACTIVE) {
Long amountFailed = null;
//amountFailed = definition.getLastBatchBuildInfo().getCounters().get(COUNTER_NUM_FAILED_RECORDS);
boolean successFlag = definition.getLastBatchBuildInfo().isFinishedSuccessful();
indexSuccess = successFlag && (amountFailed == null || amountFailed == 0L);
if (!indexSuccess) {
fail("Batch index build did not finish successfully: success flag = " +
successFlag + ", amount failed records = " + amountFailed + ", job url = " +
definition.getLastBatchBuildInfo().getMapReduceJobTrackingUrls());
} else {
break;
}
}
}
} catch (Exception e) {
throw new Exception("Error checking if batch index job ended.", e);
}
if (!indexSuccess) {
fail("Batch build did not end after " + BUILD_TIMEOUT + " millis");
} else {
solrServer.commit();
}
}
private static void setBatchIndexConf(String[] defaultConf, String[] customConf, boolean buildNow) throws Exception {
String lock = model.lockIndexer(INDEX_NAME);
try {
IndexerDefinitionBuilder index = new IndexerDefinitionBuilder().startFrom(model.getIndexer(INDEX_NAME));
if (defaultConf != null) {
index.defaultBatchIndexCliArguments(defaultConf);
}
if (customConf != null) {
index.batchIndexCliArguments(customConf);
}
if (buildNow) {
index.batchIndexingState(IndexerDefinition.BatchIndexingState.BUILD_REQUESTED);
}
model.updateIndexer(index.build(), lock);
} finally {
model.unlockIndexer(lock);
}
}
private static AbsoluteRecordId absId(RecordId recordId) {
return repository.getIdGenerator().newAbsoluteRecordId(Table.RECORD.name, recordId);
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import com.facebook.buck.android.dalvik.EstimateDexWeightStep;
import com.facebook.buck.android.toolchain.AndroidPlatformTarget;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.jvm.core.JavaLibrary;
import com.facebook.buck.jvm.java.DefaultJavaLibrary;
import com.facebook.buck.jvm.java.FakeJavaLibrary;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildOutputInitializer;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultSourcePathResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildContext;
import com.facebook.buck.rules.FakeBuildableContext;
import com.facebook.buck.rules.SingleThreadedBuildRuleResolver;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TestBuildRuleParams;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.MoreAsserts;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
public class DexProducedFromJavaLibraryThatContainsClassFilesTest {
@Test
public void testGetBuildStepsWhenThereAreClassesToDex() throws IOException, InterruptedException {
ProjectFilesystem filesystem = FakeProjectFilesystem.createJavaOnlyFilesystem();
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver =
DefaultSourcePathResolver.from(new SourcePathRuleFinder(resolver));
FakeJavaLibrary javaLibraryRule =
new FakeJavaLibrary(
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//foo:bar"),
filesystem,
ImmutableSortedSet.of()) {
@Override
public ImmutableSortedMap<String, HashCode> getClassNamesToHashes() {
return ImmutableSortedMap.of("com/example/Foo", HashCode.fromString("cafebabe"));
}
};
resolver.addToIndex(javaLibraryRule);
Path jarOutput =
BuildTargets.getGenPath(filesystem, javaLibraryRule.getBuildTarget(), "%s.jar");
javaLibraryRule.setOutputFile(jarOutput.toString());
BuildContext context =
FakeBuildContext.withSourcePathResolver(pathResolver)
.withBuildCellRootPath(filesystem.getRootPath());
FakeBuildableContext buildableContext = new FakeBuildableContext();
Path dexOutput =
BuildTargets.getGenPath(
filesystem,
javaLibraryRule.getBuildTarget().withFlavors(AndroidBinaryGraphEnhancer.DEX_FLAVOR),
"%s.dex.jar");
createFiles(filesystem, dexOutput.toString(), jarOutput.toString());
AndroidPlatformTarget androidPlatformTarget =
AndroidPlatformTarget.of(
"android",
Paths.get(""),
Collections.emptyList(),
Paths.get(""),
Paths.get(""),
Paths.get(""),
Paths.get(""),
Paths.get(""),
Paths.get("/usr/bin/dx"),
Paths.get(""),
Paths.get(""),
Paths.get(""),
Paths.get(""));
BuildTarget buildTarget =
BuildTargetFactory.newInstance(filesystem.getRootPath(), "//foo:bar#dex");
BuildRuleParams params = TestBuildRuleParams.create();
DexProducedFromJavaLibrary preDex =
new DexProducedFromJavaLibrary(
buildTarget, filesystem, androidPlatformTarget, params, javaLibraryRule, DxStep.DX);
List<Step> steps = preDex.getBuildSteps(context, buildableContext);
ExecutionContext executionContext = TestExecutionContext.newBuilder().build();
String expectedDxCommand =
String.format(
"%s --dex --no-optimize --force-jumbo --output %s %s",
Paths.get("/usr/bin/dx"), filesystem.resolve(dexOutput), filesystem.resolve(jarOutput));
MoreAsserts.assertSteps(
"Generate bar.dex.jar.",
ImmutableList.of(
String.format("rm -f %s", dexOutput),
String.format("mkdir -p %s", dexOutput.getParent()),
"estimate_dex_weight",
"(cd " + filesystem.getRootPath() + " && " + expectedDxCommand + ")",
String.format("zip-scrub %s", filesystem.resolve(dexOutput)),
"record_dx_success"),
steps,
executionContext);
((EstimateDexWeightStep) steps.get(2)).setWeightEstimateForTesting(250);
Step recordArtifactAndMetadataStep = steps.get(5);
int exitCode = recordArtifactAndMetadataStep.execute(executionContext).getExitCode();
assertEquals(0, exitCode);
MoreAsserts.assertContainsOne(
"The generated .dex.jar file should be in the set of recorded artifacts.",
buildableContext.getRecordedArtifacts(),
BuildTargets.getGenPath(filesystem, buildTarget, "%s.dex.jar"));
BuildOutputInitializer<DexProducedFromJavaLibrary.BuildOutput> outputInitializer =
preDex.getBuildOutputInitializer();
outputInitializer.initializeFromDisk();
assertEquals(250, outputInitializer.getBuildOutput().weightEstimate);
}
private void createFiles(ProjectFilesystem filesystem, String... paths) throws IOException {
Path root = filesystem.getRootPath();
for (String path : paths) {
Path resolved = root.resolve(path);
Files.createDirectories(resolved.getParent());
Files.write(resolved, "".getBytes(UTF_8));
}
}
@Test
public void testGetBuildStepsWhenThereAreNoClassesToDex() throws Exception {
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
DefaultJavaLibrary javaLibrary = JavaLibraryBuilder.createBuilder("//foo:bar").build(resolver);
javaLibrary
.getBuildOutputInitializer()
.setBuildOutputForTests(new JavaLibrary.Data(ImmutableSortedMap.of()));
BuildContext context = FakeBuildContext.NOOP_CONTEXT;
FakeBuildableContext buildableContext = new FakeBuildableContext();
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuildTarget buildTarget = BuildTargetFactory.newInstance("//foo:bar#dex");
BuildRuleParams params = TestBuildRuleParams.create();
DexProducedFromJavaLibrary preDex =
new DexProducedFromJavaLibrary(
buildTarget,
projectFilesystem,
TestAndroidPlatformTargetFactory.create(),
params,
javaLibrary,
DxStep.DX);
List<Step> steps = preDex.getBuildSteps(context, buildableContext);
Path dexOutput = BuildTargets.getGenPath(projectFilesystem, buildTarget, "%s.dex.jar");
ExecutionContext executionContext = TestExecutionContext.newBuilder().build();
MoreAsserts.assertSteps(
"Do not generate a .dex.jar file.",
ImmutableList.of(
String.format("rm -f %s", dexOutput),
String.format("mkdir -p %s", dexOutput.getParent()),
"record_empty_dx"),
steps,
executionContext);
Step recordArtifactAndMetadataStep = steps.get(2);
assertThat(recordArtifactAndMetadataStep.getShortName(), startsWith("record_"));
int exitCode = recordArtifactAndMetadataStep.execute(executionContext).getExitCode();
assertEquals(0, exitCode);
}
@Test
public void testObserverMethods() throws Exception {
BuildRuleResolver resolver =
new SingleThreadedBuildRuleResolver(
TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
DefaultJavaLibrary accumulateClassNames =
JavaLibraryBuilder.createBuilder("//foo:bar").build(resolver);
accumulateClassNames
.getBuildOutputInitializer()
.setBuildOutputForTests(
new JavaLibrary.Data(
ImmutableSortedMap.of("com/example/Foo", HashCode.fromString("cafebabe"))));
BuildTarget buildTarget = BuildTargetFactory.newInstance("//foo:bar");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
BuildRuleParams params = TestBuildRuleParams.create();
DexProducedFromJavaLibrary preDexWithClasses =
new DexProducedFromJavaLibrary(
buildTarget,
projectFilesystem,
TestAndroidPlatformTargetFactory.create(),
params,
accumulateClassNames,
DxStep.DX);
assertNull(preDexWithClasses.getSourcePathToOutput());
assertEquals(
BuildTargets.getGenPath(projectFilesystem, buildTarget, "%s.dex.jar"),
preDexWithClasses.getPathToDex());
}
@Test
public void testComputeAbiKey() {
ImmutableSortedMap<String, HashCode> classNamesAndHashes =
ImmutableSortedMap.of(
"com/example/Foo", HashCode.fromString("e4fccb7520b7795e632651323c63217c9f59f72a"),
"com/example/Bar", HashCode.fromString("087b7707a5f8e0a2adf5652e3cd2072d89a197dc"),
"com/example/Baz", HashCode.fromString("62b1c2510840c0de55c13f66065a98a719be0f19"));
String observedSha1 = DexProducedFromJavaLibrary.computeAbiKey(classNamesAndHashes).getHash();
String expectedSha1 =
Hashing.sha1()
.newHasher()
.putUnencodedChars("com/example/Bar")
.putByte((byte) 0)
.putUnencodedChars("087b7707a5f8e0a2adf5652e3cd2072d89a197dc")
.putByte((byte) 0)
.putUnencodedChars("com/example/Baz")
.putByte((byte) 0)
.putUnencodedChars("62b1c2510840c0de55c13f66065a98a719be0f19")
.putByte((byte) 0)
.putUnencodedChars("com/example/Foo")
.putByte((byte) 0)
.putUnencodedChars("e4fccb7520b7795e632651323c63217c9f59f72a")
.putByte((byte) 0)
.hash()
.toString();
assertEquals(expectedSha1, observedSha1);
}
}
| |
// Copyright 2012 Cloudera Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.impala.analysis;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.impala.authorization.Privilege;
import com.cloudera.impala.authorization.PrivilegeRequestBuilder;
import com.cloudera.impala.catalog.Column;
import com.cloudera.impala.catalog.HBaseTable;
import com.cloudera.impala.catalog.HdfsTable;
import com.cloudera.impala.catalog.Table;
import com.cloudera.impala.catalog.Type;
import com.cloudera.impala.catalog.View;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.planner.DataSink;
import com.cloudera.impala.thrift.THdfsFileFormat;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
/**
* Representation of a single insert statement, including the select statement
* whose results are to be inserted.
*/
public class InsertStmt extends StatementBase {
private final static Logger LOG = LoggerFactory.getLogger(InsertStmt.class);
// Insert formats currently supported by Impala.
private final static EnumSet<THdfsFileFormat> SUPPORTED_INSERT_FORMATS =
EnumSet.of(THdfsFileFormat.PARQUET, THdfsFileFormat.TEXT);
// List of inline views that may be referenced in queryStmt.
private final WithClause withClause_;
// Target table name as seen by the parser
private final TableName originalTableName_;
// Target table into which to insert. May be qualified by analyze()
private TableName targetTableName_;
// Differentiates between INSERT INTO and INSERT OVERWRITE.
private final boolean overwrite_;
// List of column:value elements from the PARTITION (...) clause.
// Set to null if no partition was given.
private final List<PartitionKeyValue> partitionKeyValues_;
// User-supplied hints to control hash partitioning before the table sink in the plan.
private final List<String> planHints_;
// Select or union whose results are to be inserted. If null, will be set after
// analysis.
private QueryStmt queryStmt_;
// False if the original insert statement had a query statement, true if we need to
// auto-generate one (for insert into tbl();) during analysis.
private final boolean needsGeneratedQueryStatement_;
// Set in analyze(). Contains metadata of target table to determine type of sink.
private Table table_;
// Set in analyze(). Exprs corresponding to the partitionKeyValues,
private final List<Expr> partitionKeyExprs_ = new ArrayList<Expr>();
// True to force re-partitioning before the table sink, false to prevent it. Set in
// analyze() based on planHints_. Null if no explicit hint was given (the planner
// should decide whether to re-partition or not).
private Boolean isRepartition_ = null;
// Output expressions that produce the final results to write to the target table. May
// include casts, and NullLiterals where an output column isn't explicitly mentioned.
// Set in prepareExpressions(). The i'th expr produces the i'th column of the target
// table.
private final ArrayList<Expr> resultExprs_ = new ArrayList<Expr>();
// The column permutation is specified by writing INSERT INTO tbl(col3, col1, col2...)
//
// It is a mapping from select-list expr index to (non-partition) output column. If
// null, will be set to the default permutation of all non-partition columns in Hive
// order.
//
// A column is said to be 'mentioned' if it occurs either in the column permutation, or
// the PARTITION clause. If columnPermutation is null, all non-partition columns are
// considered mentioned.
//
// Between them, the columnPermutation and the set of partitionKeyValues must mention to
// every partition column in the target table exactly once. Other columns, if not
// explicitly mentioned, will be assigned NULL values. Partition columns are not
// defaulted to NULL by design, and are not just for NULL-valued partition slots.
//
// Dynamic partition keys may occur in either the permutation or the PARTITION
// clause. Partition columns with static values may only be mentioned in the PARTITION
// clause, where the static value is specified.
private final List<String> columnPermutation_;
public InsertStmt(WithClause withClause, TableName targetTable, boolean overwrite,
List<PartitionKeyValue> partitionKeyValues, List<String> planHints,
QueryStmt queryStmt, List<String> columnPermutation) {
withClause_ = withClause;
targetTableName_ = targetTable;
originalTableName_ = targetTableName_;
overwrite_ = overwrite;
partitionKeyValues_ = partitionKeyValues;
planHints_ = planHints;
queryStmt_ = queryStmt;
needsGeneratedQueryStatement_ = (queryStmt == null);
columnPermutation_ = columnPermutation;
table_ = null;
}
/**
* C'tor used in clone().
*/
public InsertStmt(InsertStmt other) {
withClause_ = other.withClause_ != null ? other.withClause_.clone() : null;
targetTableName_ = other.targetTableName_;
originalTableName_ = other.targetTableName_;
overwrite_ = other.overwrite_;
partitionKeyValues_ = other.partitionKeyValues_;
planHints_ = other.planHints_;
queryStmt_ = other.queryStmt_ != null ? other.queryStmt_.clone() : null;
needsGeneratedQueryStatement_ = other.needsGeneratedQueryStatement_;
columnPermutation_ = other.columnPermutation_;
table_ = other.table_;
}
@Override
public InsertStmt clone() { return new InsertStmt(this); }
@Override
public void analyze(Analyzer analyzer) throws AnalysisException {
if (isExplain_) analyzer.setIsExplain();
try {
if (withClause_ != null) withClause_.analyze(analyzer);
} catch (AnalysisException e) {
// Ignore AnalysisExceptions if tables are missing to ensure the maximum number
// of missing tables can be collected before failing analyze().
if (analyzer.getMissingTbls().isEmpty()) throw e;
}
List<Expr> selectListExprs = null;
if (!needsGeneratedQueryStatement_) {
try {
// Use a child analyzer for the query stmt to properly scope WITH-clause
// views and to ignore irrelevant ORDER BYs.
Analyzer queryStmtAnalyzer = new Analyzer(analyzer);
queryStmt_.analyze(queryStmtAnalyzer);
if (analyzer.containsSubquery()) {
Preconditions.checkState(queryStmt_ instanceof SelectStmt);
StmtRewriter.rewriteStatement((SelectStmt)queryStmt_, queryStmtAnalyzer);
queryStmt_ = queryStmt_.clone();
queryStmtAnalyzer = new Analyzer(analyzer);
queryStmt_.analyze(queryStmtAnalyzer);
}
selectListExprs = Expr.cloneList(queryStmt_.getBaseTblResultExprs());
} catch (AnalysisException e) {
if (analyzer.getMissingTbls().isEmpty()) throw e;
}
} else {
selectListExprs = Lists.newArrayList();
}
// Set target table and perform table-type specific analysis and auth checking.
// Also checks if the target table is missing.
setTargetTable(analyzer);
// Abort analysis if there are any missing tables beyond this point.
if (!analyzer.getMissingTbls().isEmpty()) {
throw new AnalysisException("Found missing tables. Aborting analysis.");
}
boolean isHBaseTable = (table_ instanceof HBaseTable);
int numClusteringCols = isHBaseTable ? 0 : table_.getNumClusteringCols();
// Analysis of the INSERT statement from this point is basically the act of matching
// the set of output columns (which come from a column permutation, perhaps
// implicitly, and the PARTITION clause) to the set of input columns (which come from
// the select-list and any statically-valued columns in the PARTITION clause).
//
// First, we compute the set of mentioned columns, and reject statements that refer to
// non-existent columns, or duplicates (we must check both the column permutation, and
// the set of partition keys). Next, we check that all partition columns are
// mentioned. During this process we build the map from select-list expr index to
// column in the targeted table.
//
// Then we check that the select-list contains exactly the right number of expressions
// for all mentioned columns which are not statically-valued partition columns (which
// get their expressions from partitionKeyValues).
//
// Finally, prepareExpressions analyzes the expressions themselves, and confirms that
// they are type-compatible with the target columns. Where columns are not mentioned
// (and by this point, we know that missing columns are not partition columns),
// prepareExpressions assigns them a NULL literal expressions.
// An null permutation clause is the same as listing all non-partition columns in
// order.
List<String> analysisColumnPermutation = columnPermutation_;
if (analysisColumnPermutation == null) {
analysisColumnPermutation = Lists.newArrayList();
ArrayList<Column> tableColumns = table_.getColumns();
for (int i = numClusteringCols; i < tableColumns.size(); ++i) {
analysisColumnPermutation.add(tableColumns.get(i).getName());
}
}
// selectExprTargetColumns maps from select expression index to a column in the target
// table. It will eventually include all mentioned columns that aren't static-valued
// partition columns.
ArrayList<Column> selectExprTargetColumns = Lists.newArrayList();
// Tracks the name of all columns encountered in either the permutation clause or the
// partition clause to detect duplicates.
Set<String> mentionedColumnNames = Sets.newHashSet();
for (String columnName: analysisColumnPermutation) {
Column column = table_.getColumn(columnName);
if (column == null) {
throw new AnalysisException(
"Unknown column '" + columnName + "' in column permutation");
}
if (!mentionedColumnNames.add(columnName)) {
throw new AnalysisException(
"Duplicate column '" + columnName + "' in column permutation");
}
selectExprTargetColumns.add(column);
}
int numStaticPartitionExprs = 0;
if (partitionKeyValues_ != null) {
for (PartitionKeyValue pkv: partitionKeyValues_) {
Column column = table_.getColumn(pkv.getColName());
if (column == null) {
throw new AnalysisException("Unknown column '" + pkv.getColName() +
"' in partition clause");
}
if (column.getPosition() >= numClusteringCols) {
throw new AnalysisException(
"Column '" + pkv.getColName() + "' is not a partition column");
}
if (!mentionedColumnNames.add(pkv.getColName())) {
throw new AnalysisException(
"Duplicate column '" + pkv.getColName() + "' in partition clause");
}
if (!pkv.isDynamic()) {
numStaticPartitionExprs++;
} else {
selectExprTargetColumns.add(column);
}
}
}
// Checks that exactly all columns in the target table are assigned an expr.
checkColumnCoverage(selectExprTargetColumns, mentionedColumnNames,
selectListExprs.size(), numStaticPartitionExprs);
// Make sure static partition key values only contain const exprs.
if (partitionKeyValues_ != null) {
for (PartitionKeyValue kv: partitionKeyValues_) {
kv.analyze(analyzer);
}
}
// Populate partitionKeyExprs from partitionKeyValues and selectExprTargetColumns
prepareExpressions(selectExprTargetColumns, selectListExprs, table_, analyzer);
// Analyze plan hints at the end to prefer reporting other error messages first
// (e.g., the PARTITION clause is not applicable to unpartitioned and HBase tables).
analyzePlanHints(analyzer);
}
/**
* Sets table_ based on targetTableName_ and performs table-type specific analysis:
* - Partition clause is invalid for unpartitioned Hdfs tables and HBase tables
* - Overwrite is invalid for HBase tables
* - Check INSERT privileges as well as write access to Hdfs paths
* - Cannot insert into a view
* Adds table_ to the analyzer's descriptor table if analysis succeeds.
*/
private void setTargetTable(Analyzer analyzer) throws AnalysisException {
// If the table has not yet been set, load it from the Catalog. This allows for
// callers to set a table to analyze that may not actually be created in the Catalog.
// One example use case is CREATE TABLE AS SELECT which must run analysis on the
// INSERT before the table has actually been created.
if (table_ == null) {
if (!targetTableName_.isFullyQualified()) {
targetTableName_ =
new TableName(analyzer.getDefaultDb(), targetTableName_.getTbl());
}
table_ = analyzer.getTable(targetTableName_, Privilege.INSERT);
} else {
targetTableName_ = new TableName(table_.getDb().getName(), table_.getName());
PrivilegeRequestBuilder pb = new PrivilegeRequestBuilder();
analyzer.registerPrivReq(pb.onTable(table_.getDb().getName(), table_.getName())
.allOf(Privilege.INSERT).toRequest());
}
// We do not support inserting into views.
if (table_ instanceof View) {
throw new AnalysisException(
String.format("Impala does not support inserting into views: %s",
table_.getFullName()));
}
boolean isHBaseTable = (table_ instanceof HBaseTable);
int numClusteringCols = isHBaseTable ? 0 : table_.getNumClusteringCols();
if (partitionKeyValues_ != null && numClusteringCols == 0) {
if (isHBaseTable) {
throw new AnalysisException("PARTITION clause is not valid for INSERT into " +
"HBase tables. '" + targetTableName_ + "' is an HBase table");
} else {
// Unpartitioned table, but INSERT has PARTITION clause
throw new AnalysisException("PARTITION clause is only valid for INSERT into " +
"partitioned table. '" + targetTableName_ + "' is not partitioned");
}
}
if (table_ instanceof HdfsTable) {
HdfsTable hdfsTable = (HdfsTable) table_;
if (!hdfsTable.hasWriteAccess()) {
throw new AnalysisException(String.format("Unable to INSERT into target table " +
"(%s) because Impala does not have WRITE access to at least one HDFS path" +
": %s", targetTableName_, hdfsTable.getFirstLocationWithoutWriteAccess()));
}
for (int colIdx = 0; colIdx < numClusteringCols; ++colIdx) {
Column col = hdfsTable.getColumns().get(colIdx);
// Hive has a number of issues handling BOOLEAN partition columns (see HIVE-6590).
// Instead of working around the Hive bugs, INSERT is disabled for BOOLEAN
// partitions in Impala. Once the Hive JIRA is resolved, we can remove this
// analysis check.
if (col.getType() == Type.BOOLEAN) {
throw new AnalysisException(String.format("INSERT into table with BOOLEAN " +
"partition column (%s) is not supported: %s", col.getName(),
targetTableName_));
}
}
}
if (isHBaseTable && overwrite_) {
throw new AnalysisException("HBase doesn't have a way to perform INSERT OVERWRITE");
}
// Add target table to descriptor table.
analyzer.getDescTbl().addReferencedTable(table_);
}
/**
* Checks that the column permutation + select list + static partition exprs +
* dynamic partition exprs collectively cover exactly all columns in the target table
* (not more of fewer).
*/
private void checkColumnCoverage(ArrayList<Column> selectExprTargetColumns,
Set<String> mentionedColumnNames, int numSelectListExprs,
int numStaticPartitionExprs) throws AnalysisException {
boolean isHBaseTable = (table_ instanceof HBaseTable);
int numClusteringCols = isHBaseTable ? 0 : table_.getNumClusteringCols();
// Check that all columns are mentioned by the permutation and partition clauses
if (selectExprTargetColumns.size() + numStaticPartitionExprs !=
table_.getColumns().size()) {
// We've already ruled out too many columns in the permutation and partition clauses
// by checking that there are no duplicates and that every column mentioned actually
// exists. So all columns aren't mentioned in the query. If the unmentioned columns
// include partition columns, this is an error.
List<String> missingColumnNames = Lists.newArrayList();
for (Column column: table_.getColumns()) {
if (!mentionedColumnNames.contains(column.getName())) {
// HBase tables have a single row-key column which is always in position 0. It
// must be mentioned, since it is invalid to set it to NULL (which would
// otherwise happen by default).
if (isHBaseTable && column.getPosition() == 0) {
throw new AnalysisException("Row-key column '" + column.getName() +
"' must be explicitly mentioned in column permutation.");
}
if (column.getPosition() < numClusteringCols) {
missingColumnNames.add(column.getName());
}
}
}
if (!missingColumnNames.isEmpty()) {
throw new AnalysisException(
"Not enough partition columns mentioned in query. Missing columns are: " +
Joiner.on(", ").join(missingColumnNames));
}
}
// Expect the selectListExpr to have entries for every target column
if (selectExprTargetColumns.size() != numSelectListExprs) {
String comparator =
(selectExprTargetColumns.size() < numSelectListExprs) ? "fewer" : "more";
String partitionClause =
(partitionKeyValues_ == null) ? "returns" : "and PARTITION clause return";
// If there was no column permutation provided, the error is that the select-list
// has the wrong number of expressions compared to the number of columns in the
// table. If there was a column permutation, then the mismatch is between the
// select-list and the permutation itself.
if (columnPermutation_ == null) {
int totalColumnsMentioned = numSelectListExprs + numStaticPartitionExprs;
throw new AnalysisException(String.format(
"Target table '%s' has %s columns (%s) than the SELECT / VALUES clause %s" +
" (%s)", table_.getFullName(), comparator,
table_.getColumns().size(), partitionClause, totalColumnsMentioned));
} else {
String partitionPrefix =
(partitionKeyValues_ == null) ? "mentions" : "and PARTITION clause mention";
throw new AnalysisException(String.format(
"Column permutation %s %s columns (%s) than " +
"the SELECT / VALUES clause %s (%s)", partitionPrefix, comparator,
selectExprTargetColumns.size(), partitionClause, numSelectListExprs));
}
}
}
/**
* Performs three final parts of the analysis:
* 1. Checks type compatibility between all expressions and their targets
*
* 2. Populates partitionKeyExprs with type-compatible expressions, in Hive
* partition-column order, for all partition columns
*
* 3. Populates resultExprs_ with type-compatible expressions, in Hive column order,
* for all expressions in the select-list. Unmentioned columns are assigned NULL literal
* expressions.
*
* If necessary, adds casts to the expressions to make them compatible with the type of
* the corresponding column.
*
* @throws AnalysisException
* If an expression is not compatible with its target column
*/
private void prepareExpressions(List<Column> selectExprTargetColumns,
List<Expr> selectListExprs, Table tbl, Analyzer analyzer)
throws AnalysisException {
// Temporary lists of partition key exprs and names in an arbitrary order.
List<Expr> tmpPartitionKeyExprs = new ArrayList<Expr>();
List<String> tmpPartitionKeyNames = new ArrayList<String>();
int numClusteringCols = (tbl instanceof HBaseTable) ? 0 : tbl.getNumClusteringCols();
// Check dynamic partition columns for type compatibility.
for (int i = 0; i < selectListExprs.size(); ++i) {
Column targetColumn = selectExprTargetColumns.get(i);
Expr compatibleExpr = checkTypeCompatibility(targetColumn, selectListExprs.get(i));
if (targetColumn.getPosition() < numClusteringCols) {
// This is a dynamic clustering column
tmpPartitionKeyExprs.add(compatibleExpr);
tmpPartitionKeyNames.add(targetColumn.getName());
}
selectListExprs.set(i, compatibleExpr);
}
// Check static partition columns, dynamic entries in partitionKeyValues will already
// be in selectExprTargetColumns and therefore are ignored in this loop
if (partitionKeyValues_ != null) {
for (PartitionKeyValue pkv: partitionKeyValues_) {
if (pkv.isStatic()) {
// tableColumns is guaranteed to exist after the earlier analysis checks
Column tableColumn = table_.getColumn(pkv.getColName());
Expr compatibleExpr = checkTypeCompatibility(tableColumn, pkv.getValue());
tmpPartitionKeyExprs.add(compatibleExpr);
tmpPartitionKeyNames.add(pkv.getColName());
}
}
}
// Reorder the partition key exprs and names to be consistent with the target table
// declaration. We need those exprs in the original order to create the corresponding
// Hdfs folder structure correctly.
for (Column c: table_.getColumns()) {
for (int j = 0; j < tmpPartitionKeyNames.size(); ++j) {
if (c.getName().equals(tmpPartitionKeyNames.get(j))) {
partitionKeyExprs_.add(tmpPartitionKeyExprs.get(j));
break;
}
}
}
Preconditions.checkState(partitionKeyExprs_.size() == numClusteringCols);
// Make sure we have stats for partitionKeyExprs
for (Expr expr: partitionKeyExprs_) {
expr.analyze(analyzer);
}
// Finally, 'undo' the permutation so that the selectListExprs are in Hive column
// order, and add NULL expressions to all missing columns.
for (Column tblColumn: table_.getColumnsInHiveOrder()) {
boolean matchFound = false;
for (int i = 0; i < selectListExprs.size(); ++i) {
if (selectExprTargetColumns.get(i).getName().equals(tblColumn.getName())) {
resultExprs_.add(selectListExprs.get(i));
matchFound = true;
break;
}
}
// If no match is found, either the column is a clustering column with a static
// value, or it was unmentioned and therefore should have a NULL select-list
// expression.
if (!matchFound) {
if (tblColumn.getPosition() >= numClusteringCols) {
// Unmentioned non-clustering columns get NULL literals with the appropriate
// target type because Parquet cannot handle NULL_TYPE (IMPALA-617).
resultExprs_.add(NullLiteral.create(tblColumn.getType()));
}
}
}
// TODO: Check that HBase row-key columns are not NULL? See IMPALA-406
if (needsGeneratedQueryStatement_) {
// Build a query statement that returns NULL for every column
List<SelectListItem> selectListItems = Lists.newArrayList();
for(Expr e: resultExprs_) {
selectListItems.add(new SelectListItem(e, null));
}
SelectList selectList = new SelectList(selectListItems);
queryStmt_ = new SelectStmt(selectList, null, null, null, null, null, null);
queryStmt_.analyze(analyzer);
}
}
/**
* Checks for type compatibility of column and expr.
* Returns compatible (possibly cast) expr.
*/
private Expr checkTypeCompatibility(Column column, Expr expr)
throws AnalysisException {
// Check for compatible type, and add casts to the selectListExprs if necessary.
// We don't allow casting to a lower precision type.
Type colType = column.getType();
Type exprType = expr.getType();
// Trivially compatible.
if (colType.equals(exprType)) return expr;
Type compatibleType =
Type.getAssignmentCompatibleType(colType, exprType);
// Incompatible types.
if (!compatibleType.isValid()) {
throw new AnalysisException(
String.format(
"Target table '%s' is incompatible with SELECT / PARTITION expressions.\n" +
"Expression '%s' (type: %s) is not compatible with column '%s' (type: %s)",
targetTableName_, expr.toSql(), exprType, column.getName(), colType));
}
// Loss of precision when inserting into the table.
if (!compatibleType.equals(colType) && !compatibleType.isNull()) {
throw new AnalysisException(
String.format("Possible loss of precision for target table '%s'.\n" +
"Expression '%s' (type: %s) would need to be cast to %s" +
" for column '%s'",
targetTableName_, expr.toSql(), exprType, colType,
column.getName()));
}
// Add a cast to the selectListExpr to the higher type.
return expr.castTo(compatibleType);
}
private void analyzePlanHints(Analyzer analyzer) throws AnalysisException {
if (planHints_ == null) return;
if (!planHints_.isEmpty() &&
(partitionKeyValues_ == null || table_ instanceof HBaseTable)) {
throw new AnalysisException("INSERT hints are only supported for inserting into " +
"partitioned Hdfs tables.");
}
for (String hint: planHints_) {
if (hint.equalsIgnoreCase("SHUFFLE")) {
if (isRepartition_ != null && !isRepartition_) {
throw new AnalysisException("Conflicting INSERT hint: " + hint);
}
isRepartition_ = Boolean.TRUE;
analyzer.setHasPlanHints();
} else if (hint.equalsIgnoreCase("NOSHUFFLE")) {
if (isRepartition_ != null && isRepartition_) {
throw new AnalysisException("Conflicting INSERT hint: " + hint);
}
isRepartition_ = Boolean.FALSE;
analyzer.setHasPlanHints();
} else {
analyzer.addWarning("INSERT hint not recognized: " + hint);
}
}
}
public List<String> getPlanHints() { return planHints_; }
public TableName getTargetTableName() { return targetTableName_; }
public Table getTargetTable() { return table_; }
public void setTargetTable(Table table) { this.table_ = table; }
public boolean isOverwrite() { return overwrite_; }
/**
* Only valid after analysis
*/
public QueryStmt getQueryStmt() { return queryStmt_; }
public void setQueryStmt(QueryStmt stmt) { queryStmt_ = stmt; }
public List<Expr> getPartitionKeyExprs() { return partitionKeyExprs_; }
public Boolean isRepartition() { return isRepartition_; }
public ArrayList<Expr> getResultExprs() { return resultExprs_; }
public DataSink createDataSink() {
// analyze() must have been called before.
Preconditions.checkState(table_ != null);
return DataSink.createDataSink(table_, partitionKeyExprs_, overwrite_);
}
@Override
public String toSql() {
StringBuilder strBuilder = new StringBuilder();
if (withClause_ != null) strBuilder.append(withClause_.toSql() + " ");
strBuilder.append("INSERT ");
if (overwrite_) {
strBuilder.append("OVERWRITE ");
} else {
strBuilder.append("INTO ");
}
strBuilder.append("TABLE " + originalTableName_);
if (columnPermutation_ != null) {
strBuilder.append("(");
strBuilder.append(Joiner.on(", ").join(columnPermutation_));
strBuilder.append(")");
}
if (partitionKeyValues_ != null) {
List<String> values = Lists.newArrayList();
for (PartitionKeyValue pkv: partitionKeyValues_) {
values.add(pkv.getColName() +
(pkv.getValue() != null ? ("=" + pkv.getValue().toSql()) : ""));
}
strBuilder.append(" PARTITION (" + Joiner.on(", ").join(values) + ")");
}
if (planHints_ != null) {
strBuilder.append(" " + ToSqlUtils.getPlanHintsSql(planHints_));
}
if (!needsGeneratedQueryStatement_) {
strBuilder.append(" " + queryStmt_.toSql());
}
return strBuilder.toString();
}
}
| |
package net.ME1312.SubServers.Bungee.Host.External;
import net.ME1312.Galaxi.Library.Config.YAMLConfig;
import net.ME1312.Galaxi.Library.Container.ContainedPair;
import net.ME1312.Galaxi.Library.Container.Container;
import net.ME1312.Galaxi.Library.Container.Pair;
import net.ME1312.Galaxi.Library.Container.Value;
import net.ME1312.Galaxi.Library.Map.ObjectMap;
import net.ME1312.Galaxi.Library.Try;
import net.ME1312.Galaxi.Library.Util;
import net.ME1312.Galaxi.Library.Version.Version;
import net.ME1312.SubData.Server.SubDataClient;
import net.ME1312.SubServers.Bungee.Event.SubCreateEvent;
import net.ME1312.SubServers.Bungee.Event.SubCreatedEvent;
import net.ME1312.SubServers.Bungee.Host.Host;
import net.ME1312.SubServers.Bungee.Host.SubCreator;
import net.ME1312.SubServers.Bungee.Host.SubLogger;
import net.ME1312.SubServers.Bungee.Host.SubServer;
import net.ME1312.SubServers.Bungee.Host.SubServer.StopAction;
import net.ME1312.SubServers.Bungee.Library.Compatibility.Logger;
import net.ME1312.SubServers.Bungee.Network.Packet.PacketExConfigureHost;
import net.ME1312.SubServers.Bungee.Network.Packet.PacketExCreateServer;
import net.ME1312.SubServers.Bungee.Network.Packet.PacketExDownloadTemplates;
import net.ME1312.SubServers.Bungee.Network.Packet.PacketExUploadTemplates;
import net.ME1312.SubServers.Bungee.SubAPI;
import net.ME1312.SubServers.Bungee.SubProxy;
import com.google.common.collect.Range;
import net.md_5.bungee.api.ChatColor;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.net.InetSocketAddress;
import java.util.*;
import java.util.function.Consumer;
/**
* External SubCreator Class
*/
@SuppressWarnings("unchecked")
public class ExternalSubCreator extends SubCreator {
private HashMap<String, ServerTemplate> templates = new HashMap<String, ServerTemplate>();
private HashMap<String, ServerTemplate> templatesR = new HashMap<String, ServerTemplate>();
private Boolean enableRT = false;
private ExternalHost host;
private Range<Integer> ports;
private Value<Boolean> log;
private String gitBash;
private TreeMap<String, Pair<Integer, ExternalSubLogger>> thread;
/**
* Creates an External SubCreator
*
* @param host Host
* @param ports The range of ports to auto-select from
* @param log Whether SubCreator should log to console
* @param gitBash The Git Bash directory
*/
public ExternalSubCreator(ExternalHost host, Range<Integer> ports, boolean log, String gitBash) {
if (!ports.hasLowerBound() || !ports.hasUpperBound()) throw new IllegalArgumentException("Port range is not bound");
Util.nullpo(host, ports, log, gitBash);
this.host = host;
this.ports = ports;
this.log = new Container<Boolean>(log);
this.gitBash = gitBash;
this.thread = new TreeMap<String, Pair<Integer, ExternalSubLogger>>();
reload();
}
@Override
public void reload() {
templatesR.clear();
if (new File(host.plugin.dir, "SubServers/Templates").exists()) for (File file : new File(host.plugin.dir, "SubServers/Templates").listFiles()) {
try {
if (file.isDirectory() && !file.getName().endsWith(".x")) {
ObjectMap<String> config = (new File(file, "template.yml").exists())? new YAMLConfig(new File(file, "template.yml")).get().getMap("Template", new ObjectMap<String>()) : new ObjectMap<String>();
ServerTemplate template = loadTemplate(file.getName(), config.getBoolean("Enabled", true), config.getBoolean("Internal", false), config.getString("Icon", "::NULL::"), file, config.getMap("Build", new ObjectMap<String>()), config.getMap("Settings", new ObjectMap<String>()));
templatesR.put(file.getName().toLowerCase(), template);
if (config.getKeys().contains("Display")) template.setDisplayName(Util.unescapeJavaString(config.getString("Display")));
}
} catch (Exception e) {
Logger.get(host.getName()).severe("Couldn't load template: " + file.getName());
e.printStackTrace();
}
}
if (host.available && !Try.all.get(() -> Util.reflect(SubProxy.class.getDeclaredField("reloading"), host.plugin), false)) {
host.queue(new PacketExConfigureHost(host.plugin, host), new PacketExUploadTemplates(host.plugin, () -> {
if (enableRT == null || enableRT) host.queue(new PacketExDownloadTemplates(host.plugin, host));
}));
}
}
@Override
public boolean create(UUID player, String name, ServerTemplate template, Version version, Integer port, Consumer<SubServer> callback) {
Util.nullpo(name, template);
if (host.isAvailable() && host.isEnabled() && template.isEnabled() && !SubAPI.getInstance().getSubServers().keySet().contains(name.toLowerCase()) && !SubCreator.isReserved(name) && (version != null || !template.requiresVersion())) {
StackTraceElement[] origin = new Throwable().getStackTrace();
if (port == null) {
Container<Integer> i = new Container<Integer>(ports.lowerEndpoint() - 1);
port = Util.getNew(getAllReservedAddresses(), () -> {
do {
++i.value;
if (i.value > ports.upperEndpoint()) throw new IllegalStateException("There are no more ports available in range: " + ports.toString());
} while (!ports.contains(i.value));
return new InetSocketAddress(host.getAddress(), i.value);
}).getPort();
}
String prefix = name + File.separator + "Creator";
ExternalSubLogger logger = new ExternalSubLogger(this, prefix, log, null);
thread.put(name.toLowerCase(), new ContainedPair<>(port, logger));
final int fport = port;
final SubCreateEvent event = new SubCreateEvent(player, host, name, template, version, port);
host.plugin.getPluginManager().callEvent(event);
if (!event.isCancelled()) {
logger.start();
host.queue(new PacketExCreateServer(player, name, template, version, port, logger.getExternalAddress(), data -> {
finish(player, null, name, template, version, fport, prefix, origin, data, callback);
this.thread.remove(name.toLowerCase());
}));
return true;
} else {
thread.remove(name.toLowerCase());
return false;
}
} else return false;
} private <T> void callback(StackTraceElement[] origin, Consumer<T> callback, T value) {
if (callback != null) try {
callback.accept(value);
} catch (Throwable e) {
Throwable ew = new InvocationTargetException(e);
ew.setStackTrace(origin);
ew.printStackTrace();
}
}
@Override
public boolean update(UUID player, SubServer server, ServerTemplate template, Version version, Consumer<Boolean> callback) {
Util.nullpo(server);
final ServerTemplate ft = (template == null)?server.getTemplate():template;
if (host.isAvailable() && host.isEnabled() && host == server.getHost() && server.isAvailable() && !server.isRunning() && ft != null && ft.isEnabled() && ft.canUpdate() && (version != null || !ft.requiresVersion())) {
StackTraceElement[] origin = new Throwable().getStackTrace();
String name = server.getName();
String prefix = name + File.separator + "Updater";
((ExternalSubServer) server).updating(true);
ExternalSubLogger logger = new ExternalSubLogger(this, prefix, log, null);
thread.put(name.toLowerCase(), new ContainedPair<>(server.getAddress().getPort(), logger));
final SubCreateEvent event = new SubCreateEvent(player, server, ft, version);
host.plugin.getPluginManager().callEvent(event);
if (!event.isCancelled()) {
logger.start();
host.queue(new PacketExCreateServer(player, server, ft, version, logger.getExternalAddress(), data -> {
finish(player, server, server.getName(), ft, version, server.getAddress().getPort(), prefix, origin, data, s -> {
((ExternalSubServer) server).updating(false);
if (callback != null) callback.accept(s != null);
});
this.thread.remove(name.toLowerCase());
}));
return true;
} else {
thread.remove(name.toLowerCase());
return false;
}
} else return false;
}
private void finish(UUID player, SubServer update, String name, ServerTemplate template, Version version, int port, String prefix, StackTraceElement[] origin, ObjectMap<Integer> data, Consumer<SubServer> callback) {
try {
if (data.getInt(0x0001) == 0) {
Logger.get(prefix).info("Saving...");
SubServer subserver = update;
if (update == null || update.getTemplate() != template || template.getBuildOptions().getBoolean("Update-Settings", false)) {
if (host.plugin.exServers.keySet().contains(name.toLowerCase()))
host.plugin.exServers.remove(name.toLowerCase());
ObjectMap<String> server = new ObjectMap<String>();
ObjectMap<String> config = new ObjectMap<String>((Map<String, ?>) data.getObject(0x0002));
if (config.contains("Directory") && (update != null || !template.getConfigOptions().contains("Directory"))) config.remove("Directory");
if (update == null) {
server.set("Enabled", true);
server.set("Display", "");
server.set("Host", host.getName());
server.set("Template", template.getName());
server.set("Group", new ArrayList<String>());
server.set("Port", port);
server.set("Motd", "Some SubServer");
server.set("Log", true);
server.set("Directory", "./" + name);
server.set("Executable", "java -Xmx1024M -jar " + template.getType().toString() + ".jar");
server.set("Stop-Command", "stop");
server.set("Stop-Action", "NONE");
server.set("Run-On-Launch", false);
server.set("Restricted", false);
server.set("Incompatible", new ArrayList<String>());
server.set("Hidden", false);
} else {
server.setAll(host.plugin.servers.get().getMap("Servers").getMap(name, new HashMap<>()));
server.set("Template", template.getName());
}
server.setAll(config);
if (update != null) Try.all.run(() -> update.getHost().forceRemoveSubServer(name));
subserver = host.constructSubServer(name, server.getBoolean("Enabled"), port, ChatColor.translateAlternateColorCodes('&', Util.unescapeJavaString(server.getString("Motd"))), server.getBoolean("Log"),
server.getString("Directory"), server.getString("Executable"), server.getString("Stop-Command"), server.getBoolean("Hidden"), server.getBoolean("Restricted"));
if (server.getString("Display").length() > 0) subserver.setDisplayName(Util.unescapeJavaString(server.getString("Display")));
subserver.setTemplate(server.getString("Template"));
for (String group : server.getStringList("Group")) subserver.addGroup(group);
SubServer.StopAction action = Try.all.get(() -> SubServer.StopAction.valueOf(server.getString("Stop-Action").toUpperCase().replace('-', '_').replace(' ', '_')));
if (action != null) subserver.setStopAction(action);
if (server.contains("Extra")) for (String extra : server.getMap("Extra").getKeys())
subserver.addExtra(extra, server.getMap("Extra").getObject(extra));
if ((update != null && host.plugin.servers.get().getMap("Servers").contains(name)) ||
!(subserver.getStopAction() == StopAction.REMOVE_SERVER || subserver.getStopAction() == StopAction.RECYCLE_SERVER || subserver.getStopAction() == StopAction.DELETE_SERVER)) {
host.plugin.servers.get().getMap("Servers").set(name, server);
host.plugin.servers.save();
}
host.addSubServer(subserver);
if (update == null && template.getBuildOptions().getBoolean("Run-On-Finish", true)) {
while (!subserver.isAvailable() && host.isAvailable()) {
Thread.sleep(250);
}
if (subserver.isAvailable()) {
subserver.start();
}
}
}
host.plugin.getPluginManager().callEvent(new SubCreatedEvent(player, host, name, template, version, port, subserver, update != null, true));
callback(origin, callback, subserver);
} else {
Logger.get(prefix).info(data.getString(0x0003));
host.plugin.getPluginManager().callEvent(new SubCreatedEvent(player, host, name, template, version, port, update, update != null, false));
callback(origin, callback, null);
}
} catch (Exception e) {
e.printStackTrace();
callback(origin, callback, null);
}
}
@Override
public void terminate() {
HashMap<String, Pair<Integer, ExternalSubLogger>> thread = new HashMap<String, Pair<Integer, ExternalSubLogger>>();
thread.putAll(this.thread);
for (String i : thread.keySet()) {
terminate(i);
}
}
@Override
public void terminate(String name) {
if (this.thread.keySet().contains(name.toLowerCase())) {
((SubDataClient) host.getSubData()[0]).sendPacket(new PacketExCreateServer(name.toLowerCase()));
thread.remove(name.toLowerCase());
}
}
@Override
public void waitFor() throws InterruptedException {
HashMap<String, Pair<Integer, ExternalSubLogger>> thread = new HashMap<String, Pair<Integer, ExternalSubLogger>>();
thread.putAll(this.thread);
for (String i : thread.keySet()) {
waitFor(i);
}
}
@Override
public void waitFor(String name) throws InterruptedException {
while (this.thread.keySet().contains(name.toLowerCase()) && host.getSubData()[0] != null) {
Thread.sleep(250);
}
}
@Override
public Host getHost() {
return host;
}
@Override
public Range getPortRange() {
return ports;
}
@Override
public void setPortRange(Range<Integer> value) {
if (!value.hasLowerBound() || !value.hasUpperBound()) throw new IllegalArgumentException("Port range is not bound");
ports = value;
}
@Override
public String getBashDirectory() {
return gitBash;
}
@Override
public List<SubLogger> getLoggers() {
List<SubLogger> loggers = new ArrayList<SubLogger>();
HashMap<String, Pair<Integer, ExternalSubLogger>> temp = new HashMap<String, Pair<Integer, ExternalSubLogger>>();
temp.putAll(thread);
for (String i : temp.keySet()) {
loggers.add(getLogger(i));
}
return loggers;
}
@Override
public SubLogger getLogger(String name) {
return this.thread.get(name.toLowerCase()).value();
}
@Override
public boolean isLogging() {
return log.value();
}
@Override
public void setLogging(boolean value) {
Util.nullpo(value);
log.value(value);
}
@Override
public List<String> getReservedNames() {
return new ArrayList<String>(thread.keySet());
}
@Override
public List<Integer> getReservedPorts() {
List<Integer> ports = new ArrayList<Integer>();
for (Pair<Integer, ExternalSubLogger> task : thread.values()) ports.add(task.key());
return ports;
}
@Override
public Map<String, ServerTemplate> getTemplates() {
TreeMap<String, ServerTemplate> map = new TreeMap<String, ServerTemplate>();
if (enableRT != null && enableRT) for (Map.Entry<String, ServerTemplate> template : templatesR.entrySet()) {
if (!template.getValue().isInternal()) map.put(template.getKey(), template.getValue());
}
for (Map.Entry<String, ServerTemplate> template : templates.entrySet()) {
if (!template.getValue().isInternal()) map.put(template.getKey(), template.getValue());
}
return map;
}
@Override
public ServerTemplate getTemplate(String name) {
Util.nullpo(name);
name = name.toLowerCase();
ServerTemplate template = templates.getOrDefault(name, null);
if (template == null && enableRT != null && enableRT) template = templatesR.getOrDefault(name, null);
if (template == null || template.isInternal()) {
return null;
} else {
return template;
}
}
}
| |
/*
* Copyright 2010-2011 LinkedIn Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.linkedin.bowser.core.objects;
import java.util.Iterator;
import com.linkedin.bowser.core.exceptions.IndexError;
import com.linkedin.bowser.core.exceptions.TypeError;
import com.linkedin.bowser.core.expn.Type;
public class StringObject extends NQLObject implements Sequence
{
private final String _value;
public StringObject(String value)
{
super();
_value = value;
}
/*
* (non-Javadoc)
*
* @see com.linkedin.nql.core.objects.NQLObject#getType()
*/
@Override
public Type getType()
{
return Type.STRING;
}
/*
* (non-Javadoc)
*
* @see com.linkedin.nql.core.objects.Sizable#len()
*/
@Override
public int len()
{
return _value.length();
}
/*
* (non-Javadoc)
*
* @see
* com.linkedin.nql.core.objects.Subscriptable#get(com.linkedin.nql.core.objects.NQLObject
* )
*/
@Override
public NQLObject get(NQLObject index)
{
if (!(index instanceof Numeric))
throw new TypeError(index, "string indices must be integer, not '%s'");
int i = ((Numeric) index).getAsInt();
if (i < 0 || i >= _value.length())
throw new IndexError(this);
return Objects.create(_value.substring(i, i + 1));
}
/*
* (non-Javadoc)
*
* @see java.lang.Iterable#iterator()
*/
@Override
public Iterator<NQLObject> iterator()
{
return new Iterator<NQLObject>()
{
private int _index;
/*
* (non-Javadoc)
*
* @see java.util.Iterator#hasNext()
*/
@Override
public boolean hasNext()
{
return _index < _value.length();
}
/*
* (non-Javadoc)
*
* @see java.util.Iterator#next()
*/
@Override
public NQLObject next()
{
return Objects.create(_value.substring(_index, _index + 1));
}
/*
* (non-Javadoc)
*
* @see java.util.Iterator#remove()
*/
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
};
}
/*
* (non-Javadoc)
*
* @see
* com.linkedin.nql.core.objects.Sequence#contains(com.linkedin.nql.core.objects.NQLObject
* )
*/
@Override
public boolean contains(NQLObject o)
{
if (o.getType() != Type.STRING)
return false;
return _value.contains(o.toString());
}
/*
* (non-Javadoc)
*
* @see com.linkedin.nql.core.objects.Sequence#range(int, int)
*/
@Override
public NQLObject range(int start, int end)
{
if (start < 0)
start = Math.max(_value.length() + start, 0);
end = Math.min(_value.length(), end);
if (start >= end)
return new StringObject("");
else
return new StringObject(_value.substring(start, end));
}
/*
* (non-Javadoc)
*
* @see com.linkedin.nql.core.objects.Printable#str()
*/
@Override
public String str()
{
return "'" + _value + "'";
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((_value == null) ? 0 : _value.hashCode());
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
StringObject other = (StringObject) obj;
if (_value == null)
{
if (other._value != null)
return false;
}
else if (!_value.equals(other._value))
return false;
return true;
}
/**
* @return
*/
public String getValue()
{
return _value;
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Portions Copyright (c) Microsoft Corporation
*/
package com.azure.cosmos.implementation.guava25.collect;
import static com.azure.cosmos.implementation.guava25.base.Preconditions.checkArgument;
import static com.azure.cosmos.implementation.guava25.base.Preconditions.checkNotNull;
import static com.azure.cosmos.implementation.guava25.collect.CollectPreconditions.checkRemove;
import com.azure.cosmos.implementation.guava25.base.Function;
import com.azure.cosmos.implementation.guava25.base.Optional;
import com.azure.cosmos.implementation.guava25.base.Predicate;
import com.azure.cosmos.implementation.guava25.base.Predicates;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.RandomAccess;
import java.util.Set;
import java.util.Spliterator;
import java.util.function.Consumer;
import java.util.stream.Stream;
/**
* An assortment of mainly legacy static utility methods that operate on or return objects of type
* {@code Iterable}. Except as noted, each method has a corresponding {@link Iterator}-based method
* in the {@link Iterators} class.
*
* <p><b>Java 8 users:</b> several common uses for this class are now more comprehensively addressed
* by the new {@link java.util.stream.Stream} library. Read the method documentation below for
* comparisons. This class is not being deprecated, but we gently encourage you to migrate to
* streams.
*
* <p><i>Performance notes:</i> Unless otherwise noted, all of the iterables produced in this class
* are <i>lazy</i>, which means that their iterators only advance the backing iteration when
* absolutely necessary.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/CollectionUtilitiesExplained#iterables"> {@code
* Iterables}</a>.
*
* @author Kevin Bourrillion
* @author Jared Levy
* @since 2.0
*/
public final class Iterables {
private Iterables() {}
/** Returns an unmodifiable view of {@code iterable}. */
public static <T> Iterable<T> unmodifiableIterable(final Iterable<? extends T> iterable) {
checkNotNull(iterable);
if (iterable instanceof UnmodifiableIterable || iterable instanceof ImmutableCollection) {
@SuppressWarnings({"unchecked", "rawtypes"}) // Since it's unmodifiable, the covariant cast is safe
Iterable<T> result = (Iterable<T>) iterable;
return result;
}
return new UnmodifiableIterable<>(iterable);
}
/**
* Simply returns its argument.
*
* @deprecated no need to use this
* @since 10.0
*/
@Deprecated
public static <E> Iterable<E> unmodifiableIterable(ImmutableCollection<E> iterable) {
return checkNotNull(iterable);
}
private static final class UnmodifiableIterable<T> extends FluentIterable<T> {
private final Iterable<? extends T> iterable;
private UnmodifiableIterable(Iterable<? extends T> iterable) {
this.iterable = iterable;
}
@Override
public Iterator<T> iterator() {
return Iterators.unmodifiableIterator(iterable.iterator());
}
@Override
public void forEach(Consumer<? super T> action) {
iterable.forEach(action);
}
@SuppressWarnings({"unchecked", "rawtypes"}) // safe upcast, assuming no one has a crazy Spliterator subclass
@Override
public Spliterator<T> spliterator() {
return (Spliterator<T>) iterable.spliterator();
}
@Override
public String toString() {
return iterable.toString();
}
// no equals and hashCode; it would break the contract!
}
/** Returns the number of elements in {@code iterable}. */
public static int size(Iterable<?> iterable) {
return (iterable instanceof Collection)
? ((Collection<?>) iterable).size()
: Iterators.size(iterable.iterator());
}
/**
* Returns {@code true} if {@code iterable} contains any element {@code o} for which {@code
* Objects.equals(o, element)} would return {@code true}. Otherwise returns {@code false}, even in
* cases where {@link Collection#contains} might throw {@link NullPointerException} or {@link
* ClassCastException}.
*/
public static boolean contains(Iterable<?> iterable, Object element) {
if (iterable instanceof Collection) {
Collection<?> collection = (Collection<?>) iterable;
return Collections2.safeContains(collection, element);
}
return Iterators.contains(iterable.iterator(), element);
}
/**
* Removes, from an iterable, every element that belongs to the provided collection.
*
* <p>This method calls {@link Collection#removeAll} if {@code iterable} is a collection, and
* {@link Iterators#removeAll} otherwise.
*
* @param removeFrom the iterable to (potentially) remove elements from
* @param elementsToRemove the elements to remove
* @return {@code true} if any element was removed from {@code iterable}
*/
public static boolean removeAll(Iterable<?> removeFrom, Collection<?> elementsToRemove) {
return (removeFrom instanceof Collection)
? ((Collection<?>) removeFrom).removeAll(checkNotNull(elementsToRemove))
: Iterators.removeAll(removeFrom.iterator(), elementsToRemove);
}
/**
* Removes, from an iterable, every element that does not belong to the provided collection.
*
* <p>This method calls {@link Collection#retainAll} if {@code iterable} is a collection, and
* {@link Iterators#retainAll} otherwise.
*
* @param removeFrom the iterable to (potentially) remove elements from
* @param elementsToRetain the elements to retain
* @return {@code true} if any element was removed from {@code iterable}
*/
public static boolean retainAll(Iterable<?> removeFrom, Collection<?> elementsToRetain) {
return (removeFrom instanceof Collection)
? ((Collection<?>) removeFrom).retainAll(checkNotNull(elementsToRetain))
: Iterators.retainAll(removeFrom.iterator(), elementsToRetain);
}
/**
* Removes, from an iterable, every element that satisfies the provided predicate.
*
* <p>Removals may or may not happen immediately as each element is tested against the predicate.
* The behavior of this method is not specified if {@code predicate} is dependent on {@code
* removeFrom}.
*
* <p><b>Java 8 users:</b> if {@code removeFrom} is a {@link Collection}, use {@code
* removeFrom.removeIf(predicate)} instead.
*
* @param removeFrom the iterable to (potentially) remove elements from
* @param predicate a predicate that determines whether an element should be removed
* @return {@code true} if any elements were removed from the iterable
* @throws UnsupportedOperationException if the iterable does not support {@code remove()}.
* @since 2.0
*/
public static <T> boolean removeIf(Iterable<T> removeFrom, Predicate<? super T> predicate) {
if (removeFrom instanceof Collection) {
return ((Collection<T>) removeFrom).removeIf(predicate);
}
return Iterators.removeIf(removeFrom.iterator(), predicate);
}
/** Removes and returns the first matching element, or returns {@code null} if there is none. */
static <T> T removeFirstMatching(Iterable<T> removeFrom, Predicate<? super T> predicate) {
checkNotNull(predicate);
Iterator<T> iterator = removeFrom.iterator();
while (iterator.hasNext()) {
T next = iterator.next();
if (predicate.apply(next)) {
iterator.remove();
return next;
}
}
return null;
}
/**
* Determines whether two iterables contain equal elements in the same order. More specifically,
* this method returns {@code true} if {@code iterable1} and {@code iterable2} contain the same
* number of elements and every element of {@code iterable1} is equal to the corresponding element
* of {@code iterable2}.
*/
public static boolean elementsEqual(Iterable<?> iterable1, Iterable<?> iterable2) {
if (iterable1 instanceof Collection && iterable2 instanceof Collection) {
Collection<?> collection1 = (Collection<?>) iterable1;
Collection<?> collection2 = (Collection<?>) iterable2;
if (collection1.size() != collection2.size()) {
return false;
}
}
return Iterators.elementsEqual(iterable1.iterator(), iterable2.iterator());
}
/**
* Returns a string representation of {@code iterable}, with the format {@code [e1, e2, ..., en]}
* (that is, identical to {@link java.util.Arrays Arrays}{@code
* .toString(Iterables.toArray(iterable))}). Note that for <i>most</i> implementations of {@link
* Collection}, {@code collection.toString()} also gives the same result, but that behavior is not
* generally guaranteed.
*/
public static String toString(Iterable<?> iterable) {
return Iterators.toString(iterable.iterator());
}
/**
* Returns the single element contained in {@code iterable}.
*
* <p><b>Java 8 users:</b> the {@code Stream} equivalent to this method is {@code
* stream.collect(MoreCollectors.onlyElement())}.
*
* @throws NoSuchElementException if the iterable is empty
* @throws IllegalArgumentException if the iterable contains multiple elements
*/
public static <T> T getOnlyElement(Iterable<T> iterable) {
return Iterators.getOnlyElement(iterable.iterator());
}
/**
* Returns the single element contained in {@code iterable}, or {@code defaultValue} if the
* iterable is empty.
*
* <p><b>Java 8 users:</b> the {@code Stream} equivalent to this method is {@code
* stream.collect(MoreCollectors.toOptional()).orElse(defaultValue)}.
*
* @throws IllegalArgumentException if the iterator contains multiple elements
*/
public static <T> T getOnlyElement(Iterable<? extends T> iterable, T defaultValue) {
return Iterators.getOnlyElement(iterable.iterator(), defaultValue);
}
/**
* Copies an iterable's elements into an array.
*
* @param iterable the iterable to copy
* @param type the type of the elements
* @return a newly-allocated array into which all the elements of the iterable have been copied
*/
public static <T> T[] toArray(Iterable<? extends T> iterable, Class<T> type) {
return toArray(iterable, ObjectArrays.newArray(type, 0));
}
static <T> T[] toArray(Iterable<? extends T> iterable, T[] array) {
Collection<? extends T> collection = castOrCopyToCollection(iterable);
return collection.toArray(array);
}
/**
* Copies an iterable's elements into an array.
*
* @param iterable the iterable to copy
* @return a newly-allocated array into which all the elements of the iterable have been copied
*/
static Object[] toArray(Iterable<?> iterable) {
return castOrCopyToCollection(iterable).toArray();
}
/**
* Converts an iterable into a collection. If the iterable is already a collection, it is
* returned. Otherwise, an {@link java.util.ArrayList} is created with the contents of the
* iterable in the same iteration order.
*/
private static <E> Collection<E> castOrCopyToCollection(Iterable<E> iterable) {
return (iterable instanceof Collection)
? (Collection<E>) iterable
: Lists.newArrayList(iterable.iterator());
}
/**
* Adds all elements in {@code iterable} to {@code collection}.
*
* @return {@code true} if {@code collection} was modified as a result of this operation.
*/
public static <T> boolean addAll(Collection<T> addTo, Iterable<? extends T> elementsToAdd) {
if (elementsToAdd instanceof Collection) {
Collection<? extends T> c = Collections2.cast(elementsToAdd);
return addTo.addAll(c);
}
return Iterators.addAll(addTo, checkNotNull(elementsToAdd).iterator());
}
/**
* Returns the number of elements in the specified iterable that equal the specified object. This
* implementation avoids a full iteration when the iterable is a {@link Multiset} or {@link Set}.
*
* <p><b>Java 8 users:</b> In most cases, the {@code Stream} equivalent of this method is {@code
* stream.filter(element::equals).count()}. If {@code element} might be null, use {@code
* stream.filter(Predicate.isEqual(element)).count()} instead.
*
* @see java.util.Collections#frequency(Collection, Object) Collections.frequency(Collection,
* Object)
*/
public static int frequency(Iterable<?> iterable, Object element) {
if ((iterable instanceof Multiset)) {
return ((Multiset<?>) iterable).count(element);
} else if ((iterable instanceof Set)) {
return ((Set<?>) iterable).contains(element) ? 1 : 0;
}
return Iterators.frequency(iterable.iterator(), element);
}
/**
* Returns an iterable whose iterators cycle indefinitely over the elements of {@code iterable}.
*
* <p>That iterator supports {@code remove()} if {@code iterable.iterator()} does. After {@code
* remove()} is called, subsequent cycles omit the removed element, which is no longer in {@code
* iterable}. The iterator's {@code hasNext()} method returns {@code true} until {@code iterable}
* is empty.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You
* should use an explicit {@code break} or be certain that you will eventually remove all the
* elements.
*
* <p>To cycle over the iterable {@code n} times, use the following: {@code
* Iterables.concat(Collections.nCopies(n, iterable))}
*
* <p><b>Java 8 users:</b> The {@code Stream} equivalent of this method is {@code
* Stream.generate(() -> iterable).flatMap(Streams::stream)}.
*/
public static <T> Iterable<T> cycle(final Iterable<T> iterable) {
checkNotNull(iterable);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.cycle(iterable);
}
@Override
public Spliterator<T> spliterator() {
return Stream.generate(() -> iterable).flatMap(Streams::stream).spliterator();
}
@Override
public String toString() {
return iterable.toString() + " (cycled)";
}
};
}
/**
* Returns an iterable whose iterators cycle indefinitely over the provided elements.
*
* <p>After {@code remove} is invoked on a generated iterator, the removed element will no longer
* appear in either that iterator or any other iterator created from the same source iterable.
* That is, this method behaves exactly as {@code Iterables.cycle(Lists.newArrayList(elements))}.
* The iterator's {@code hasNext} method returns {@code true} until all of the original elements
* have been removed.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You
* should use an explicit {@code break} or be certain that you will eventually remove all the
* elements.
*
* <p>To cycle over the elements {@code n} times, use the following: {@code
* Iterables.concat(Collections.nCopies(n, Arrays.asList(elements)))}
*
* <p><b>Java 8 users:</b> If passing a single element {@code e}, the {@code Stream} equivalent of
* this method is {@code Stream.generate(() -> e)}. Otherwise, put the elements in a collection
* and use {@code Stream.generate(() -> collection).flatMap(Collection::stream)}.
*/
@SafeVarargs
@SuppressWarnings("varargs")
public static <T> Iterable<T> cycle(T... elements) {
return cycle(Lists.newArrayList(elements));
}
/**
* Combines two iterables into a single iterable. The returned iterable has an iterator that
* traverses the elements in {@code a}, followed by the elements in {@code b}. The source
* iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>Java 8 users:</b> The {@code Stream} equivalent of this method is {@code Stream.concat(a,
* b)}.
*/
public static <T> Iterable<T> concat(Iterable<? extends T> a, Iterable<? extends T> b) {
return FluentIterable.concat(a, b);
}
/**
* Combines three iterables into a single iterable. The returned iterable has an iterator that
* traverses the elements in {@code a}, followed by the elements in {@code b}, followed by the
* elements in {@code c}. The source iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>Java 8 users:</b> The {@code Stream} equivalent of this method is {@code
* Streams.concat(a, b, c)}.
*/
public static <T> Iterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) {
return FluentIterable.concat(a, b, c);
}
/**
* Combines four iterables into a single iterable. The returned iterable has an iterator that
* traverses the elements in {@code a}, followed by the elements in {@code b}, followed by the
* elements in {@code c}, followed by the elements in {@code d}. The source iterators are not
* polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>Java 8 users:</b> The {@code Stream} equivalent of this method is {@code
* Streams.concat(a, b, c, d)}.
*/
public static <T> Iterable<T> concat(
Iterable<? extends T> a,
Iterable<? extends T> b,
Iterable<? extends T> c,
Iterable<? extends T> d) {
return FluentIterable.concat(a, b, c, d);
}
/**
* Combines multiple iterables into a single iterable. The returned iterable has an iterator that
* traverses the elements of each iterable in {@code inputs}. The input iterators are not polled
* until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>Java 8 users:</b> The {@code Stream} equivalent of this method is {@code
* Streams.concat(...)}.
*
* @throws NullPointerException if any of the provided iterables is null
*/
@SafeVarargs
@SuppressWarnings("varargs")
public static <T> Iterable<T> concat(Iterable<? extends T>... inputs) {
return FluentIterable.concat(inputs);
}
/**
* Combines multiple iterables into a single iterable. The returned iterable has an iterator that
* traverses the elements of each iterable in {@code inputs}. The input iterators are not polled
* until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it. The methods of the returned iterable may throw {@code
* NullPointerException} if any of the input iterators is null.
*
* <p><b>Java 8 users:</b> The {@code Stream} equivalent of this method is {@code
* streamOfStreams.flatMap(s -> s)}.
*/
public static <T> Iterable<T> concat(Iterable<? extends Iterable<? extends T>> inputs) {
return FluentIterable.concat(inputs);
}
/**
* Divides an iterable into unmodifiable sublists of the given size (the final iterable may be
* smaller). For example, partitioning an iterable containing {@code [a, b, c, d, e]} with a
* partition size of 3 yields {@code [[a, b, c], [d, e]]} -- an outer iterable containing two
* inner lists of three and two elements, all in the original order.
*
* <p>Iterators returned by the returned iterable do not support the {@link Iterator#remove()}
* method. The returned lists implement {@link RandomAccess}, whether or not the input list does.
*
* <p><b>Note:</b> if {@code iterable} is a {@link List}, use {@link Lists#partition(List, int)}
* instead.
*
* @param iterable the iterable to return a partitioned view of
* @param size the desired size of each partition (the last may be smaller)
* @return an iterable of unmodifiable lists containing the elements of {@code iterable} divided
* into partitions
* @throws IllegalArgumentException if {@code size} is nonpositive
*/
public static <T> Iterable<List<T>> partition(final Iterable<T> iterable, final int size) {
checkNotNull(iterable);
checkArgument(size > 0);
return new FluentIterable<List<T>>() {
@Override
public Iterator<List<T>> iterator() {
return Iterators.partition(iterable.iterator(), size);
}
};
}
/**
* Divides an iterable into unmodifiable sublists of the given size, padding the final iterable
* with null values if necessary. For example, partitioning an iterable containing {@code [a, b,
* c, d, e]} with a partition size of 3 yields {@code [[a, b, c], [d, e, null]]} -- an outer
* iterable containing two inner lists of three elements each, all in the original order.
*
* <p>Iterators returned by the returned iterable do not support the {@link Iterator#remove()}
* method.
*
* @param iterable the iterable to return a partitioned view of
* @param size the desired size of each partition
* @return an iterable of unmodifiable lists containing the elements of {@code iterable} divided
* into partitions (the final iterable may have trailing null elements)
* @throws IllegalArgumentException if {@code size} is nonpositive
*/
public static <T> Iterable<List<T>> paddedPartition(final Iterable<T> iterable, final int size) {
checkNotNull(iterable);
checkArgument(size > 0);
return new FluentIterable<List<T>>() {
@Override
public Iterator<List<T>> iterator() {
return Iterators.paddedPartition(iterable.iterator(), size);
}
};
}
/**
* Returns a view of {@code unfiltered} containing all elements that satisfy the input predicate
* {@code retainIfTrue}. The returned iterable's iterator does not support {@code remove()}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#filter}.
*/
public static <T> Iterable<T> filter(
final Iterable<T> unfiltered, final Predicate<? super T> retainIfTrue) {
checkNotNull(unfiltered);
checkNotNull(retainIfTrue);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.filter(unfiltered.iterator(), retainIfTrue);
}
@Override
public void forEach(Consumer<? super T> action) {
checkNotNull(action);
unfiltered.forEach(
(T a) -> {
if (retainIfTrue.test(a)) {
action.accept(a);
}
});
}
@Override
public Spliterator<T> spliterator() {
return CollectSpliterators.filter(unfiltered.spliterator(), retainIfTrue);
}
};
}
/**
* Returns a view of {@code unfiltered} containing all elements that are of the type {@code
* desiredType}. The returned iterable's iterator does not support {@code remove()}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(type::isInstance).map(type::cast)}.
* This does perform a little more work than necessary, so another option is to insert an
* unchecked cast at some later point:
*
* <pre>
* {@code @SuppressWarnings({"unchecked", "rawtypes"}) // safe because of ::isInstance check
* ImmutableList<NewType> result =
* (ImmutableList) stream.filter(NewType.class::isInstance).collect(toImmutableList());}
* </pre>
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static <T> Iterable<T> filter(final Iterable<?> unfiltered, final Class<T> desiredType) {
checkNotNull(unfiltered);
checkNotNull(desiredType);
return (Iterable<T>) filter(unfiltered, Predicates.instanceOf(desiredType));
}
/**
* Returns {@code true} if any element in {@code iterable} satisfies the predicate.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#anyMatch}.
*/
public static <T> boolean any(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.any(iterable.iterator(), predicate);
}
/**
* Returns {@code true} if every element in {@code iterable} satisfies the predicate. If {@code
* iterable} is empty, {@code true} is returned.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#allMatch}.
*/
public static <T> boolean all(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.all(iterable.iterator(), predicate);
}
/**
* Returns the first element in {@code iterable} that satisfies the given predicate; use this
* method only when such an element is known to exist. If it is possible that <i>no</i> element
* will match, use {@link #tryFind} or {@link #find(Iterable, Predicate, Object)} instead.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(predicate).findFirst().get()}
*
* @throws NoSuchElementException if no element in {@code iterable} matches the given predicate
*/
public static <T> T find(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.find(iterable.iterator(), predicate);
}
/**
* Returns the first element in {@code iterable} that satisfies the given predicate, or {@code
* defaultValue} if none found. Note that this can usually be handled more naturally using {@code
* tryFind(iterable, predicate).or(defaultValue)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* stream.filter(predicate).findFirst().orElse(defaultValue)}
*
* @since 7.0
*/
public static <T> T find(
Iterable<? extends T> iterable,
Predicate<? super T> predicate,
T defaultValue) {
return Iterators.find(iterable.iterator(), predicate, defaultValue);
}
/**
* Returns an {@link Optional} containing the first element in {@code iterable} that satisfies the
* given predicate, if such an element exists.
*
* <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code null}. If {@code null}
* is matched in {@code iterable}, a NullPointerException will be thrown.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(predicate).findFirst()}
*
* @since 11.0
*/
public static <T> Optional<T> tryFind(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.tryFind(iterable.iterator(), predicate);
}
/**
* Returns the index in {@code iterable} of the first element that satisfies the provided {@code
* predicate}, or {@code -1} if the Iterable has no such elements.
*
* <p>More formally, returns the lowest index {@code i} such that {@code
* predicate.apply(Iterables.get(iterable, i))} returns {@code true}, or {@code -1} if there is no
* such index.
*
* @since 2.0
*/
public static <T> int indexOf(Iterable<T> iterable, Predicate<? super T> predicate) {
return Iterators.indexOf(iterable.iterator(), predicate);
}
/**
* Returns a view containing the result of applying {@code function} to each element of {@code
* fromIterable}.
*
* <p>The returned iterable's iterator supports {@code remove()} if {@code fromIterable}'s
* iterator does. After a successful {@code remove()} call, {@code fromIterable} no longer
* contains the corresponding element.
*
* <p>If the input {@code Iterable} is known to be a {@code List} or other {@code Collection},
* consider {@link Lists#transform} and {@link Collections2#transform}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#map}
*/
public static <F, T> Iterable<T> transform(
final Iterable<F> fromIterable, final Function<? super F, ? extends T> function) {
checkNotNull(fromIterable);
checkNotNull(function);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.transform(fromIterable.iterator(), function);
}
@Override
public void forEach(Consumer<? super T> action) {
checkNotNull(action);
fromIterable.forEach((F f) -> action.accept(function.apply(f)));
}
@Override
public Spliterator<T> spliterator() {
return CollectSpliterators.map(fromIterable.spliterator(), function);
}
};
}
/**
* Returns the element at the specified position in an iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.skip(position).findFirst().get()} (throws
* {@code NoSuchElementException} if out of bounds)
*
* @param position position of the element to return
* @return the element at the specified position in {@code iterable}
* @throws IndexOutOfBoundsException if {@code position} is negative or greater than or equal to
* the size of {@code iterable}
*/
public static <T> T get(Iterable<T> iterable, int position) {
checkNotNull(iterable);
return (iterable instanceof List)
? ((List<T>) iterable).get(position)
: Iterators.get(iterable.iterator(), position);
}
/**
* Returns the element at the specified position in an iterable or a default value otherwise.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* stream.skip(position).findFirst().orElse(defaultValue)} (returns the default value if the index
* is out of bounds)
*
* @param position position of the element to return
* @param defaultValue the default value to return if {@code position} is greater than or equal to
* the size of the iterable
* @return the element at the specified position in {@code iterable} or {@code defaultValue} if
* {@code iterable} contains fewer than {@code position + 1} elements.
* @throws IndexOutOfBoundsException if {@code position} is negative
* @since 4.0
*/
public static <T> T get(
Iterable<? extends T> iterable, int position, T defaultValue) {
checkNotNull(iterable);
Iterators.checkNonnegative(position);
if (iterable instanceof List) {
List<? extends T> list = Lists.cast(iterable);
return (position < list.size()) ? list.get(position) : defaultValue;
} else {
Iterator<? extends T> iterator = iterable.iterator();
Iterators.advance(iterator, position);
return Iterators.getNext(iterator, defaultValue);
}
}
/**
* Returns the first element in {@code iterable} or {@code defaultValue} if the iterable is empty.
* The {@link Iterators} analog to this method is {@link Iterators#getNext}.
*
* <p>If no default value is desired (and the caller instead wants a {@link
* NoSuchElementException} to be thrown), it is recommended that {@code
* iterable.iterator().next()} is used instead.
*
* <p>To get the only element in a single-element {@code Iterable}, consider using {@link
* #getOnlyElement(Iterable)} or {@link #getOnlyElement(Iterable, Object)} instead.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.findFirst().orElse(defaultValue)}
*
* @param defaultValue the default value to return if the iterable is empty
* @return the first element of {@code iterable} or the default value
* @since 7.0
*/
public static <T> T getFirst(Iterable<? extends T> iterable, T defaultValue) {
return Iterators.getNext(iterable.iterator(), defaultValue);
}
/**
* Returns the last element of {@code iterable}. If {@code iterable} is a {@link List} with {@link
* RandomAccess} support, then this operation is guaranteed to be {@code O(1)}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Streams#findLast Streams.findLast(stream).get()}
*
* @return the last element of {@code iterable}
* @throws NoSuchElementException if the iterable is empty
*/
public static <T> T getLast(Iterable<T> iterable) {
// TODO(kevinb): Support a concurrently modified collection?
if (iterable instanceof List) {
List<T> list = (List<T>) iterable;
if (list.isEmpty()) {
throw new NoSuchElementException();
}
return getLastInNonemptyList(list);
}
return Iterators.getLast(iterable.iterator());
}
/**
* Returns the last element of {@code iterable} or {@code defaultValue} if the iterable is empty.
* If {@code iterable} is a {@link List} with {@link RandomAccess} support, then this operation is
* guaranteed to be {@code O(1)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code Streams.findLast(stream).orElse(defaultValue)}
*
* @param defaultValue the value to return if {@code iterable} is empty
* @return the last element of {@code iterable} or the default value
* @since 3.0
*/
public static <T> T getLast(Iterable<? extends T> iterable, T defaultValue) {
if (iterable instanceof Collection) {
Collection<? extends T> c = Collections2.cast(iterable);
if (c.isEmpty()) {
return defaultValue;
} else if (iterable instanceof List) {
return getLastInNonemptyList(Lists.cast(iterable));
}
}
return Iterators.getLast(iterable.iterator(), defaultValue);
}
private static <T> T getLastInNonemptyList(List<T> list) {
return list.get(list.size() - 1);
}
/**
* Returns a view of {@code iterable} that skips its first {@code numberToSkip} elements. If
* {@code iterable} contains fewer than {@code numberToSkip} elements, the returned iterable skips
* all of its elements.
*
* <p>Modifications to the underlying {@link Iterable} before a call to {@code iterator()} are
* reflected in the returned iterator. That is, the iterator skips the first {@code numberToSkip}
* elements that exist when the {@code Iterator} is created, not when {@code skip()} is called.
*
* <p>The returned iterable's iterator supports {@code remove()} if the iterator of the underlying
* iterable supports it. Note that it is <i>not</i> possible to delete the last skipped element by
* immediately calling {@code remove()} on that iterator, as the {@code Iterator} contract states
* that a call to {@code remove()} before a call to {@code next()} will throw an {@link
* IllegalStateException}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#skip}
*
* @since 3.0
*/
public static <T> Iterable<T> skip(final Iterable<T> iterable, final int numberToSkip) {
checkNotNull(iterable);
checkArgument(numberToSkip >= 0, "number to skip cannot be negative");
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
if (iterable instanceof List) {
final List<T> list = (List<T>) iterable;
int toSkip = Math.min(list.size(), numberToSkip);
return list.subList(toSkip, list.size()).iterator();
}
final Iterator<T> iterator = iterable.iterator();
Iterators.advance(iterator, numberToSkip);
/*
* We can't just return the iterator because an immediate call to its
* remove() method would remove one of the skipped elements instead of
* throwing an IllegalStateException.
*/
return new Iterator<T>() {
boolean atStart = true;
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
T result = iterator.next();
atStart = false; // not called if next() fails
return result;
}
@Override
public void remove() {
checkRemove(!atStart);
iterator.remove();
}
};
}
@Override
public Spliterator<T> spliterator() {
if (iterable instanceof List) {
final List<T> list = (List<T>) iterable;
int toSkip = Math.min(list.size(), numberToSkip);
return list.subList(toSkip, list.size()).spliterator();
} else {
return Streams.stream(iterable).skip(numberToSkip).spliterator();
}
}
};
}
/**
* Returns a view of {@code iterable} containing its first {@code limitSize} elements. If {@code
* iterable} contains fewer than {@code limitSize} elements, the returned view contains all of its
* elements. The returned iterable's iterator supports {@code remove()} if {@code iterable}'s
* iterator does.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#limit}
*
* @param iterable the iterable to limit
* @param limitSize the maximum number of elements in the returned iterable
* @throws IllegalArgumentException if {@code limitSize} is negative
* @since 3.0
*/
public static <T> Iterable<T> limit(final Iterable<T> iterable, final int limitSize) {
checkNotNull(iterable);
checkArgument(limitSize >= 0, "limit is negative");
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.limit(iterable.iterator(), limitSize);
}
@Override
public Spliterator<T> spliterator() {
return Streams.stream(iterable).limit(limitSize).spliterator();
}
};
}
/**
* Returns a view of the supplied iterable that wraps each generated {@link Iterator} through
* {@link Iterators#consumingIterator(Iterator)}.
*
* <p>Note: If {@code iterable} is a {@link Queue}, the returned iterable will get entries from
* {@link Queue#remove()} since {@link Queue}'s iteration order is undefined. Calling {@link
* Iterator#hasNext()} on a generated iterator from the returned iterable may cause an item to be
* immediately dequeued for return on a subsequent call to {@link Iterator#next()}.
*
* @param iterable the iterable to wrap
* @return a view of the supplied iterable that wraps each generated iterator through {@link
* Iterators#consumingIterator(Iterator)}; for queues, an iterable that generates iterators
* that return and consume the queue's elements in queue order
* @see Iterators#consumingIterator(Iterator)
* @since 2.0
*/
public static <T> Iterable<T> consumingIterable(final Iterable<T> iterable) {
checkNotNull(iterable);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return (iterable instanceof Queue)
? new ConsumingQueueIterator<>((Queue<T>) iterable)
: Iterators.consumingIterator(iterable.iterator());
}
@Override
public String toString() {
return "Iterables.consumingIterable(...)";
}
};
}
// Methods only in Iterables, not in Iterators
/**
* Determines if the given iterable contains no elements.
*
* <p>There is no precise {@link Iterator} equivalent to this method, since one can only ask an
* iterator whether it has any elements <i>remaining</i> (which one does using {@link
* Iterator#hasNext}).
*
* <p><b>{@code Stream} equivalent:</b> {@code !stream.findAny().isPresent()}
*
* @return {@code true} if the iterable contains no elements
*/
public static boolean isEmpty(Iterable<?> iterable) {
if (iterable instanceof Collection) {
return ((Collection<?>) iterable).isEmpty();
}
return !iterable.iterator().hasNext();
}
/**
* Returns an iterable over the merged contents of all given {@code iterables}. Equivalent entries
* will not be de-duplicated.
*
* <p>Callers must ensure that the source {@code iterables} are in non-descending order as this
* method does not sort its input.
*
* <p>For any equivalent elements across all {@code iterables}, it is undefined which element is
* returned first.
*
* @since 11.0
*/
public static <T> Iterable<T> mergeSorted(
final Iterable<? extends Iterable<? extends T>> iterables,
final Comparator<? super T> comparator) {
checkNotNull(iterables, "iterables");
checkNotNull(comparator, "comparator");
Iterable<T> iterable =
new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.mergeSorted(
Iterables.transform(iterables, Iterables.<T>toIterator()), comparator);
}
};
return new UnmodifiableIterable<>(iterable);
}
// TODO(user): Is this the best place for this? Move to fluent functions?
// Useful as a public method?
static <T> Function<Iterable<? extends T>, Iterator<? extends T>> toIterator() {
return new Function<Iterable<? extends T>, Iterator<? extends T>>() {
@Override
public Iterator<? extends T> apply(Iterable<? extends T> iterable) {
return iterable.iterator();
}
};
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection;
import com.google.devtools.build.lib.analysis.config.ConfigurationResolver;
import com.google.devtools.build.lib.analysis.config.TransitionResolver;
import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.packages.Info;
import com.google.devtools.build.lib.packages.NativeProvider;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.TriState;
import com.google.devtools.build.lib.skyframe.SkyframeExecutor;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
/**
* Utility functions for use during analysis.
*/
public final class AnalysisUtils {
private AnalysisUtils() {
throw new IllegalStateException(); // utility class
}
/**
* Returns whether link stamping is enabled for a rule.
*
* <p>This returns false for unstampable rule classes and for rules in the
* host configuration. Otherwise it returns the value of the stamp attribute,
* or of the stamp option if the attribute value is -1.
*/
public static boolean isStampingEnabled(RuleContext ruleContext, BuildConfiguration config) {
if (config.isHostConfiguration()
|| !ruleContext.attributes().has("stamp", BuildType.TRISTATE)) {
return false;
}
TriState stamp = ruleContext.attributes().get("stamp", BuildType.TRISTATE);
return stamp == TriState.YES || (stamp == TriState.AUTO && config.stampBinaries());
}
public static boolean isStampingEnabled(RuleContext ruleContext) {
return isStampingEnabled(ruleContext, ruleContext.getConfiguration());
}
// TODO(bazel-team): These need Iterable<? extends TransitiveInfoCollection> because they need to
// be called with Iterable<ConfiguredTarget>. Once the configured target lockdown is complete, we
// can eliminate the "extends" clauses.
/**
* Returns the list of providers of the specified type from a set of transitive info
* collections.
*/
public static <C extends TransitiveInfoProvider> Iterable<C> getProviders(
Iterable<? extends TransitiveInfoCollection> prerequisites, Class<C> provider) {
ImmutableList.Builder<C> result = ImmutableList.builder();
for (TransitiveInfoCollection prerequisite : prerequisites) {
C prerequisiteProvider = prerequisite.getProvider(provider);
if (prerequisiteProvider != null) {
result.add(prerequisiteProvider);
}
}
return result.build();
}
/**
* Returns the list of declared providers (native and Skylark) of the specified Skylark key from a
* set of transitive info collections.
*/
public static <T extends Info> Iterable<T> getProviders(
Iterable<? extends TransitiveInfoCollection> prerequisites,
final NativeProvider<T> skylarkKey) {
ImmutableList.Builder<T> result = ImmutableList.builder();
for (TransitiveInfoCollection prerequisite : prerequisites) {
T prerequisiteProvider = prerequisite.get(skylarkKey);
if (prerequisiteProvider != null) {
result.add(prerequisiteProvider);
}
}
return result.build();
}
/**
* Returns the iterable of collections that have the specified provider.
*/
public static <S extends TransitiveInfoCollection, C extends TransitiveInfoProvider> Iterable<S>
filterByProvider(Iterable<S> prerequisites, final Class<C> provider) {
return Iterables.filter(prerequisites, target -> target.getProvider(provider) != null);
}
/** Returns the iterable of collections that have the specified provider. */
public static <S extends TransitiveInfoCollection, C extends Info> Iterable<S> filterByProvider(
Iterable<S> prerequisites, final NativeProvider<C> provider) {
return Iterables.filter(prerequisites, target -> target.get(provider) != null);
}
/**
* Returns the path of the associated manifest file for the path of a Fileset. Works for both
* exec paths and root relative paths.
*/
public static PathFragment getManifestPathFromFilesetPath(PathFragment filesetDir) {
PathFragment manifestDir = filesetDir.replaceName("_" + filesetDir.getBaseName());
PathFragment outputManifestFrag = manifestDir.getRelative("MANIFEST");
return outputManifestFrag;
}
/**
* Returns the middleman artifact on the specified attribute of the specified rule for the
* specified mode, or an empty set if it does not exist.
*/
public static NestedSet<Artifact> getMiddlemanFor(RuleContext rule, String attribute, Mode mode) {
TransitiveInfoCollection prereq = rule.getPrerequisite(attribute, mode);
if (prereq == null) {
return NestedSetBuilder.emptySet(Order.STABLE_ORDER);
}
MiddlemanProvider provider = prereq.getProvider(MiddlemanProvider.class);
if (provider == null) {
return NestedSetBuilder.emptySet(Order.STABLE_ORDER);
}
return provider.getMiddlemanArtifact();
}
/**
* Returns a path fragment qualified by the rule name and unique fragment to
* disambiguate artifacts produced from the source file appearing in
* multiple rules.
*
* <p>For example "//pkg:target" -> "pkg/<fragment>/target.
*/
public static PathFragment getUniqueDirectory(Label label, PathFragment fragment) {
return label.getPackageIdentifier().getSourceRoot().getRelative(fragment)
.getRelative(label.getName());
}
/**
* Checks that the given provider class either refers to an interface or to a value class.
*/
public static <T extends TransitiveInfoProvider> void checkProvider(Class<T> clazz) {
// Write this check in terms of getName() rather than getSimpleName(); the latter is expensive.
if (!clazz.isInterface() && clazz.getName().contains(".AutoValue_")) {
// We must have a superclass due to the generic bound above.
throw new IllegalArgumentException(
clazz + " is generated by @AutoValue; use " + clazz.getSuperclass() + " instead");
}
}
/**
* Given a set of *top-level* targets and a configuration collection, evaluate top level
* transitions, resolve configurations and return the appropriate <Target, Configuration> pair for
* each target.
*
* <p>Preserves the original input ordering.
*/
public static List<TargetAndConfiguration> getTargetsWithConfigs(
BuildConfigurationCollection configurations,
Collection<Target> targets,
ExtendedEventHandler eventHandler,
ConfiguredRuleClassProvider ruleClassProvider,
SkyframeExecutor skyframeExecutor)
throws InterruptedException {
// We use a hash set here to remove duplicate nodes; this can happen for input files and package
// groups.
LinkedHashSet<TargetAndConfiguration> nodes = new LinkedHashSet<>(targets.size());
for (BuildConfiguration config : configurations.getTargetConfigurations()) {
for (Target target : targets) {
nodes.add(new TargetAndConfiguration(target, target.isConfigurable() ? config : null));
}
}
// We'll get the configs from SkyframeExecutor#getConfigurations, which gets configurations
// for deps including transitions. So to satisfy its API we resolve transitions and repackage
// each target as a Dependency (with a NONE transition if necessary).
Multimap<BuildConfiguration, Dependency> asDeps =
AnalysisUtils.targetsToDeps(nodes, ruleClassProvider);
return ImmutableList.copyOf(
ConfigurationResolver.getConfigurationsFromExecutor(
nodes, asDeps, eventHandler, skyframeExecutor));
}
public static Multimap<BuildConfiguration, Dependency> targetsToDeps(
LinkedHashSet<TargetAndConfiguration> nodes, ConfiguredRuleClassProvider ruleClassProvider) {
Multimap<BuildConfiguration, Dependency> asDeps =
ArrayListMultimap.<BuildConfiguration, Dependency>create();
for (TargetAndConfiguration targetAndConfig : nodes) {
if (targetAndConfig.getConfiguration() != null) {
asDeps.put(
targetAndConfig.getConfiguration(),
Dependency.withTransitionAndAspects(
targetAndConfig.getLabel(),
TransitionResolver.evaluateTopLevelTransition(
targetAndConfig, ruleClassProvider.getDynamicTransitionMapper()),
// TODO(bazel-team): support top-level aspects
AspectCollection.EMPTY));
}
}
return asDeps;
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.math;
import static com.google.common.math.MathTesting.ALL_LONG_CANDIDATES;
import static com.google.common.math.MathTesting.ALL_ROUNDING_MODES;
import static com.google.common.math.MathTesting.ALL_SAFE_ROUNDING_MODES;
import static com.google.common.math.MathTesting.EXPONENTS;
import static com.google.common.math.MathTesting.NEGATIVE_INTEGER_CANDIDATES;
import static com.google.common.math.MathTesting.NEGATIVE_LONG_CANDIDATES;
import static com.google.common.math.MathTesting.NONZERO_LONG_CANDIDATES;
import static com.google.common.math.MathTesting.POSITIVE_INTEGER_CANDIDATES;
import static com.google.common.math.MathTesting.POSITIVE_LONG_CANDIDATES;
import static java.math.BigInteger.valueOf;
import static java.math.RoundingMode.FLOOR;
import static java.math.RoundingMode.UNNECESSARY;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.testing.NullPointerTester;
import junit.framework.TestCase;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.math.RoundingMode;
/**
* Tests for LongMath.
*
* @author Louis Wasserman
*/
@GwtCompatible(emulated = true)
public class LongMathTest extends TestCase {
@GwtIncompatible("TODO")
public void testConstantMaxPowerOfSqrt2Unsigned() {
assertEquals(BigIntegerMath.sqrt(BigInteger.ZERO.setBit(2 * Long.SIZE - 1), FLOOR).longValue(),
LongMath.MAX_POWER_OF_SQRT2_UNSIGNED);
}
@GwtIncompatible("BigIntegerMath") // TODO(cpovirk): GWT-enable BigIntegerMath
public void testMaxLog10ForLeadingZeros() {
for (int i = 0; i < Long.SIZE; i++) {
assertEquals(
BigIntegerMath.log10(BigInteger.ONE.shiftLeft(Long.SIZE - i), FLOOR),
LongMath.maxLog10ForLeadingZeros[i]);
}
}
@GwtIncompatible("TODO")
public void testConstantsPowersOf10() {
for (int i = 0; i < LongMath.powersOf10.length; i++) {
assertEquals(LongMath.checkedPow(10, i), LongMath.powersOf10[i]);
}
try {
LongMath.checkedPow(10, LongMath.powersOf10.length);
fail("Expected ArithmeticException");
} catch (ArithmeticException expected) {}
}
@GwtIncompatible("TODO")
public void testConstantsHalfPowersOf10() {
for (int i = 0; i < LongMath.halfPowersOf10.length; i++) {
assertEquals(BigIntegerMath.sqrt(BigInteger.TEN.pow(2 * i + 1), FLOOR),
BigInteger.valueOf(LongMath.halfPowersOf10[i]));
}
BigInteger nextBigger =
BigIntegerMath.sqrt(BigInteger.TEN.pow(2 * LongMath.halfPowersOf10.length + 1), FLOOR);
assertTrue(nextBigger.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0);
}
@GwtIncompatible("TODO")
public void testConstantsSqrtMaxLong() {
assertEquals(LongMath.sqrt(Long.MAX_VALUE, FLOOR), LongMath.FLOOR_SQRT_MAX_LONG);
}
@GwtIncompatible("TODO")
public void testConstantsFactorials() {
long expected = 1;
for (int i = 0; i < LongMath.factorials.length; i++, expected *= i) {
assertEquals(expected, LongMath.factorials[i]);
}
try {
LongMath.checkedMultiply(
LongMath.factorials[LongMath.factorials.length - 1], LongMath.factorials.length);
fail("Expected ArithmeticException");
} catch (ArithmeticException expect) {}
}
@GwtIncompatible("TODO")
public void testConstantsBiggestBinomials() {
for (int k = 0; k < LongMath.biggestBinomials.length; k++) {
assertTrue(fitsInLong(BigIntegerMath.binomial(LongMath.biggestBinomials[k], k)));
assertTrue(LongMath.biggestBinomials[k] == Integer.MAX_VALUE
|| !fitsInLong(BigIntegerMath.binomial(LongMath.biggestBinomials[k] + 1, k)));
// In the first case, any long is valid; in the second, we want to test that the next-bigger
// long overflows.
}
int k = LongMath.biggestBinomials.length;
assertFalse(fitsInLong(BigIntegerMath.binomial(2 * k, k)));
// 2 * k is the smallest value for which we don't replace k with (n-k).
}
@GwtIncompatible("TODO")
public void testConstantsBiggestSimpleBinomials() {
for (int k = 0; k < LongMath.biggestSimpleBinomials.length; k++) {
assertTrue(LongMath.biggestSimpleBinomials[k] <= LongMath.biggestBinomials[k]);
simpleBinomial(LongMath.biggestSimpleBinomials[k], k); // mustn't throw
if (LongMath.biggestSimpleBinomials[k] < Integer.MAX_VALUE) {
// unless all n are fair game with this k
try {
simpleBinomial(LongMath.biggestSimpleBinomials[k] + 1, k);
fail("Expected ArithmeticException");
} catch (ArithmeticException expected) {}
}
}
try {
int k = LongMath.biggestSimpleBinomials.length;
simpleBinomial(2 * k, k);
// 2 * k is the smallest value for which we don't replace k with (n-k).
fail("Expected ArithmeticException");
} catch (ArithmeticException expected) {}
}
public void testLessThanBranchFree() {
for (long x : ALL_LONG_CANDIDATES) {
for (long y : ALL_LONG_CANDIDATES) {
BigInteger difference = BigInteger.valueOf(x).subtract(BigInteger.valueOf(y));
if (fitsInLong(difference)) {
int expected = (x < y) ? 1 : 0;
int actual = LongMath.lessThanBranchFree(x, y);
assertEquals(expected, actual);
}
}
}
}
// Throws an ArithmeticException if "the simple implementation" of binomial coefficients overflows
@GwtIncompatible("TODO")
private long simpleBinomial(int n, int k) {
long accum = 1;
for (int i = 0; i < k; i++) {
accum = LongMath.checkedMultiply(accum, n - i);
accum /= i + 1;
}
return accum;
}
@GwtIncompatible("java.math.BigInteger")
public void testIsPowerOfTwo() {
for (long x : ALL_LONG_CANDIDATES) {
// Checks for a single bit set.
BigInteger bigX = BigInteger.valueOf(x);
boolean expected = (bigX.signum() > 0) && (bigX.bitCount() == 1);
assertEquals(expected, LongMath.isPowerOfTwo(x));
}
}
public void testLog2ZeroAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
try {
LongMath.log2(0L, mode);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
public void testLog2NegativeAlwaysThrows() {
for (long x : NEGATIVE_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
try {
LongMath.log2(x, mode);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
}
/* Relies on the correctness of BigIntegerMath.log2 for all modes except UNNECESSARY. */
public void testLog2MatchesBigInteger() {
for (long x : POSITIVE_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) {
// The BigInteger implementation is tested separately, use it as the reference.
assertEquals(BigIntegerMath.log2(valueOf(x), mode), LongMath.log2(x, mode));
}
}
}
/* Relies on the correctness of isPowerOfTwo(long). */
public void testLog2Exact() {
for (long x : POSITIVE_LONG_CANDIDATES) {
// We only expect an exception if x was not a power of 2.
boolean isPowerOf2 = LongMath.isPowerOfTwo(x);
try {
assertEquals(x, 1L << LongMath.log2(x, UNNECESSARY));
assertTrue(isPowerOf2);
} catch (ArithmeticException e) {
assertFalse(isPowerOf2);
}
}
}
@GwtIncompatible("TODO")
public void testLog10ZeroAlwaysThrows() {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
try {
LongMath.log10(0L, mode);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
@GwtIncompatible("TODO")
public void testLog10NegativeAlwaysThrows() {
for (long x : NEGATIVE_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
try {
LongMath.log10(x, mode);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
}
// Relies on the correctness of BigIntegerMath.log10 for all modes except UNNECESSARY.
@GwtIncompatible("TODO")
public void testLog10MatchesBigInteger() {
for (long x : POSITIVE_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) {
assertEquals(BigIntegerMath.log10(valueOf(x), mode), LongMath.log10(x, mode));
}
}
}
// Relies on the correctness of log10(long, FLOOR) and of pow(long, int).
@GwtIncompatible("TODO")
public void testLog10Exact() {
for (long x : POSITIVE_LONG_CANDIDATES) {
int floor = LongMath.log10(x, FLOOR);
boolean expectedSuccess = LongMath.pow(10, floor) == x;
try {
assertEquals(floor, LongMath.log10(x, UNNECESSARY));
assertTrue(expectedSuccess);
} catch (ArithmeticException e) {
if (expectedSuccess) {
failFormat("expected log10(%s, UNNECESSARY) = %s; got ArithmeticException", x, floor);
}
}
}
}
@GwtIncompatible("TODO")
public void testLog10TrivialOnPowerOf10() {
long x = 1000000000000L;
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertEquals(12, LongMath.log10(x, mode));
}
}
@GwtIncompatible("TODO")
public void testSqrtNegativeAlwaysThrows() {
for (long x : NEGATIVE_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
try {
LongMath.sqrt(x, mode);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
}
// Relies on the correctness of BigIntegerMath.sqrt for all modes except UNNECESSARY.
@GwtIncompatible("TODO")
public void testSqrtMatchesBigInteger() {
for (long x : POSITIVE_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) {
// Promote the long value (rather than using longValue() on the expected value) to avoid
// any risk of truncation which could lead to a false positive.
assertEquals(BigIntegerMath.sqrt(valueOf(x), mode), valueOf(LongMath.sqrt(x, mode)));
}
}
}
/* Relies on the correctness of sqrt(long, FLOOR). */
@GwtIncompatible("TODO")
public void testSqrtExactMatchesFloorOrThrows() {
for (long x : POSITIVE_LONG_CANDIDATES) {
long sqrtFloor = LongMath.sqrt(x, FLOOR);
// We only expect an exception if x was not a perfect square.
boolean isPerfectSquare = (sqrtFloor * sqrtFloor == x);
try {
assertEquals(sqrtFloor, LongMath.sqrt(x, UNNECESSARY));
assertTrue(isPerfectSquare);
} catch (ArithmeticException e) {
assertFalse(isPerfectSquare);
}
}
}
@GwtIncompatible("TODO")
public void testPow() {
for (long i : ALL_LONG_CANDIDATES) {
for (int exp : EXPONENTS) {
assertEquals(LongMath.pow(i, exp), valueOf(i)
.pow(exp)
.longValue());
}
}
}
@GwtIncompatible("TODO")
@SuppressUnderAndroid // TODO(cpovirk): File BigDecimal.divide() rounding bug.
public void testDivNonZero() {
for (long p : NONZERO_LONG_CANDIDATES) {
for (long q : NONZERO_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) {
long expected =
new BigDecimal(valueOf(p)).divide(new BigDecimal(valueOf(q)), 0, mode).longValue();
long actual = LongMath.divide(p, q, mode);
if (expected != actual) {
failFormat("expected divide(%s, %s, %s) = %s; got %s", p, q, mode, expected, actual);
}
}
}
}
}
@GwtIncompatible("TODO")
@SuppressUnderAndroid // TODO(cpovirk): Problem with LongMath.divide on Android?
public void testDivNonZeroExact() {
for (long p : NONZERO_LONG_CANDIDATES) {
for (long q : NONZERO_LONG_CANDIDATES) {
boolean expectedSuccess = (p % q) == 0L;
try {
assertEquals(p, LongMath.divide(p, q, UNNECESSARY) * q);
assertTrue(expectedSuccess);
} catch (ArithmeticException e) {
if (expectedSuccess) {
failFormat(
"expected divide(%s, %s, UNNECESSARY) to succeed; got ArithmeticException", p, q);
}
}
}
}
}
@GwtIncompatible("TODO")
public void testZeroDivIsAlwaysZero() {
for (long q : NONZERO_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
assertEquals(0L, LongMath.divide(0L, q, mode));
}
}
}
@GwtIncompatible("TODO")
public void testDivByZeroAlwaysFails() {
for (long p : ALL_LONG_CANDIDATES) {
for (RoundingMode mode : ALL_ROUNDING_MODES) {
try {
LongMath.divide(p, 0L, mode);
fail("Expected ArithmeticException");
} catch (ArithmeticException expected) {}
}
}
}
@GwtIncompatible("TODO")
public void testIntMod() {
for (long x : ALL_LONG_CANDIDATES) {
for (int m : POSITIVE_INTEGER_CANDIDATES) {
assertEquals(valueOf(x)
.mod(valueOf(m))
.intValue(), LongMath.mod(x, m));
}
}
}
@GwtIncompatible("TODO")
public void testIntModNegativeModulusFails() {
for (long x : ALL_LONG_CANDIDATES) {
for (int m : NEGATIVE_INTEGER_CANDIDATES) {
try {
LongMath.mod(x, m);
fail("Expected ArithmeticException");
} catch (ArithmeticException expected) {}
}
}
}
@GwtIncompatible("TODO")
public void testIntModZeroModulusFails() {
for (long x : ALL_LONG_CANDIDATES) {
try {
LongMath.mod(x, 0);
fail("Expected AE");
} catch (ArithmeticException expected) {}
}
}
@GwtIncompatible("TODO")
public void testMod() {
for (long x : ALL_LONG_CANDIDATES) {
for (long m : POSITIVE_LONG_CANDIDATES) {
assertEquals(valueOf(x)
.mod(valueOf(m))
.longValue(), LongMath.mod(x, m));
}
}
}
@GwtIncompatible("TODO")
public void testModNegativeModulusFails() {
for (long x : ALL_LONG_CANDIDATES) {
for (long m : NEGATIVE_LONG_CANDIDATES) {
try {
LongMath.mod(x, m);
fail("Expected ArithmeticException");
} catch (ArithmeticException expected) {}
}
}
}
public void testGCDExhaustive() {
for (long a : POSITIVE_LONG_CANDIDATES) {
for (long b : POSITIVE_LONG_CANDIDATES) {
assertEquals(valueOf(a).gcd(valueOf(b)), valueOf(LongMath.gcd(a, b)));
}
}
}
@GwtIncompatible("TODO")
public void testGCDZero() {
for (long a : POSITIVE_LONG_CANDIDATES) {
assertEquals(a, LongMath.gcd(a, 0));
assertEquals(a, LongMath.gcd(0, a));
}
assertEquals(0, LongMath.gcd(0, 0));
}
@GwtIncompatible("TODO")
public void testGCDNegativePositiveThrows() {
for (long a : NEGATIVE_LONG_CANDIDATES) {
try {
LongMath.gcd(a, 3);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
try {
LongMath.gcd(3, a);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
@GwtIncompatible("TODO")
public void testGCDNegativeZeroThrows() {
for (long a : NEGATIVE_LONG_CANDIDATES) {
try {
LongMath.gcd(a, 0);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
try {
LongMath.gcd(0, a);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
@GwtIncompatible("TODO")
public void testCheckedAdd() {
for (long a : ALL_LONG_CANDIDATES) {
for (long b : ALL_LONG_CANDIDATES) {
BigInteger expectedResult = valueOf(a).add(valueOf(b));
boolean expectedSuccess = fitsInLong(expectedResult);
try {
assertEquals(a + b, LongMath.checkedAdd(a, b));
assertTrue(expectedSuccess);
} catch (ArithmeticException e) {
if (expectedSuccess) {
failFormat(
"expected checkedAdd(%s, %s) = %s; got ArithmeticException", a, b, expectedResult);
}
}
}
}
}
@GwtIncompatible("TODO")
public void testCheckedSubtract() {
for (long a : ALL_LONG_CANDIDATES) {
for (long b : ALL_LONG_CANDIDATES) {
BigInteger expectedResult = valueOf(a).subtract(valueOf(b));
boolean expectedSuccess = fitsInLong(expectedResult);
try {
assertEquals(a - b, LongMath.checkedSubtract(a, b));
assertTrue(expectedSuccess);
} catch (ArithmeticException e) {
if (expectedSuccess) {
failFormat(
"expected checkedSubtract(%s, %s) = %s; got ArithmeticException",
a,
b,
expectedResult);
}
}
}
}
}
@GwtIncompatible("TODO")
@SuppressUnderAndroid // TODO(cpovirk): Problem with LongMath.checkedMultiply on Android?
public void testCheckedMultiply() {
for (long a : ALL_LONG_CANDIDATES) {
for (long b : ALL_LONG_CANDIDATES) {
BigInteger expectedResult = valueOf(a).multiply(valueOf(b));
boolean expectedSuccess = fitsInLong(expectedResult);
try {
assertEquals(a * b, LongMath.checkedMultiply(a, b));
assertTrue(expectedSuccess);
} catch (ArithmeticException e) {
if (expectedSuccess) {
failFormat(
"expected checkedMultiply(%s, %s) = %s; got ArithmeticException",
a,
b,
expectedResult);
}
}
}
}
}
@GwtIncompatible("TODO")
public void testCheckedPow() {
for (long b : ALL_LONG_CANDIDATES) {
for (int exp : EXPONENTS) {
BigInteger expectedResult = valueOf(b).pow(exp);
boolean expectedSuccess = fitsInLong(expectedResult);
try {
assertEquals(expectedResult.longValue(), LongMath.checkedPow(b, exp));
assertTrue(expectedSuccess);
} catch (ArithmeticException e) {
if (expectedSuccess) {
failFormat(
"expected checkedPow(%s, %s) = %s; got ArithmeticException",
b,
exp,
expectedResult);
}
}
}
}
}
// Depends on the correctness of BigIntegerMath.factorial.
@GwtIncompatible("TODO")
public void testFactorial() {
for (int n = 0; n <= 50; n++) {
BigInteger expectedBig = BigIntegerMath.factorial(n);
long expectedLong = fitsInLong(expectedBig) ? expectedBig.longValue() : Long.MAX_VALUE;
assertEquals(expectedLong, LongMath.factorial(n));
}
}
@GwtIncompatible("TODO")
public void testFactorialNegative() {
for (int n : NEGATIVE_INTEGER_CANDIDATES) {
try {
LongMath.factorial(n);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
// Depends on the correctness of BigIntegerMath.binomial.
public void testBinomial() {
for (int n = 0; n <= 70; n++) {
for (int k = 0; k <= n; k++) {
BigInteger expectedBig = BigIntegerMath.binomial(n, k);
long expectedLong = fitsInLong(expectedBig) ? expectedBig.longValue() : Long.MAX_VALUE;
assertEquals(expectedLong, LongMath.binomial(n, k));
}
}
}
@GwtIncompatible("Slow")
public void testBinomial_exhaustiveNotOverflowing() {
// Tests all of the inputs to LongMath.binomial that won't cause it to overflow, that weren't
// tested in the previous method, for k >= 3.
for (int k = 3; k < LongMath.biggestBinomials.length; k++) {
for (int n = 70; n <= LongMath.biggestBinomials[k]; n++) {
assertEquals(BigIntegerMath.binomial(n, k).longValue(), LongMath.binomial(n, k));
}
}
}
public void testBinomialOutside() {
for (int n = 0; n <= 50; n++) {
try {
LongMath.binomial(n, -1);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
try {
LongMath.binomial(n, n + 1);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
public void testBinomialNegative() {
for (int n : NEGATIVE_INTEGER_CANDIDATES) {
try {
LongMath.binomial(n, 0);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException expected) {}
}
}
@GwtIncompatible("far too slow")
public void testSqrtOfPerfectSquareAsDoubleIsPerfect() {
// This takes just over a minute on my machine.
for (long n = 0; n <= LongMath.FLOOR_SQRT_MAX_LONG; n++) {
long actual = (long) Math.sqrt(n * n);
assertTrue(actual == n);
}
}
public void testSqrtOfLongIsAtMostFloorSqrtMaxLong() {
long sqrtMaxLong = (long) Math.sqrt(Long.MAX_VALUE);
assertTrue(sqrtMaxLong <= LongMath.FLOOR_SQRT_MAX_LONG);
}
@GwtIncompatible("java.math.BigInteger")
public void testMean() {
// Odd-sized ranges have an obvious mean
assertMean(2, 1, 3);
assertMean(-2, -3, -1);
assertMean(0, -1, 1);
assertMean(1, -1, 3);
assertMean((1L << 62) - 1, -1, Long.MAX_VALUE);
// Even-sized ranges should prefer the lower mean
assertMean(2, 1, 4);
assertMean(-3, -4, -1);
assertMean(0, -1, 2);
assertMean(0, Long.MIN_VALUE + 2, Long.MAX_VALUE);
assertMean(0, 0, 1);
assertMean(-1, -1, 0);
assertMean(-1, Long.MIN_VALUE, Long.MAX_VALUE);
// x == y == mean
assertMean(1, 1, 1);
assertMean(0, 0, 0);
assertMean(-1, -1, -1);
assertMean(Long.MIN_VALUE, Long.MIN_VALUE, Long.MIN_VALUE);
assertMean(Long.MAX_VALUE, Long.MAX_VALUE, Long.MAX_VALUE);
// Exhaustive checks
for (long x : ALL_LONG_CANDIDATES) {
for (long y : ALL_LONG_CANDIDATES) {
assertMean(x, y);
}
}
}
/**
* Helper method that asserts the arithmetic mean of x and y is equal
* to the expectedMean.
*/
private static void assertMean(long expectedMean, long x, long y) {
assertEquals("The expectedMean should be the same as computeMeanSafely",
expectedMean, computeMeanSafely(x, y));
assertMean(x, y);
}
/**
* Helper method that asserts the arithmetic mean of x and y is equal
*to the result of computeMeanSafely.
*/
private static void assertMean(long x, long y) {
long expectedMean = computeMeanSafely(x, y);
assertEquals(expectedMean, LongMath.mean(x, y));
assertEquals("The mean of x and y should equal the mean of y and x",
expectedMean, LongMath.mean(y, x));
}
/**
* Computes the mean in a way that is obvious and resilient to
* overflow by using BigInteger arithmetic.
*/
private static long computeMeanSafely(long x, long y) {
BigInteger bigX = BigInteger.valueOf(x);
BigInteger bigY = BigInteger.valueOf(y);
BigDecimal bigMean = new BigDecimal(bigX.add(bigY))
.divide(BigDecimal.valueOf(2), BigDecimal.ROUND_FLOOR);
// parseInt blows up on overflow as opposed to intValue() which does not.
return Long.parseLong(bigMean.toString());
}
private static boolean fitsInLong(BigInteger big) {
return big.bitLength() <= 63;
}
@GwtIncompatible("NullPointerTester")
public void testNullPointers() {
NullPointerTester tester = new NullPointerTester();
tester.setDefault(int.class, 1);
tester.setDefault(long.class, 1L);
tester.testAllPublicStaticMethods(LongMath.class);
}
@GwtIncompatible("String.format")
private static void failFormat(String template, Object... args) {
fail(String.format(template, args));
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateBillingGroupRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the billing group.
* </p>
*/
private String billingGroupName;
/**
* <p>
* The properties of the billing group.
* </p>
*/
private BillingGroupProperties billingGroupProperties;
/**
* <p>
* The expected version of the billing group. If the version of the billing group does not match the expected
* version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
* </p>
*/
private Long expectedVersion;
/**
* <p>
* The name of the billing group.
* </p>
*
* @param billingGroupName
* The name of the billing group.
*/
public void setBillingGroupName(String billingGroupName) {
this.billingGroupName = billingGroupName;
}
/**
* <p>
* The name of the billing group.
* </p>
*
* @return The name of the billing group.
*/
public String getBillingGroupName() {
return this.billingGroupName;
}
/**
* <p>
* The name of the billing group.
* </p>
*
* @param billingGroupName
* The name of the billing group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateBillingGroupRequest withBillingGroupName(String billingGroupName) {
setBillingGroupName(billingGroupName);
return this;
}
/**
* <p>
* The properties of the billing group.
* </p>
*
* @param billingGroupProperties
* The properties of the billing group.
*/
public void setBillingGroupProperties(BillingGroupProperties billingGroupProperties) {
this.billingGroupProperties = billingGroupProperties;
}
/**
* <p>
* The properties of the billing group.
* </p>
*
* @return The properties of the billing group.
*/
public BillingGroupProperties getBillingGroupProperties() {
return this.billingGroupProperties;
}
/**
* <p>
* The properties of the billing group.
* </p>
*
* @param billingGroupProperties
* The properties of the billing group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateBillingGroupRequest withBillingGroupProperties(BillingGroupProperties billingGroupProperties) {
setBillingGroupProperties(billingGroupProperties);
return this;
}
/**
* <p>
* The expected version of the billing group. If the version of the billing group does not match the expected
* version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
* </p>
*
* @param expectedVersion
* The expected version of the billing group. If the version of the billing group does not match the expected
* version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
*/
public void setExpectedVersion(Long expectedVersion) {
this.expectedVersion = expectedVersion;
}
/**
* <p>
* The expected version of the billing group. If the version of the billing group does not match the expected
* version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
* </p>
*
* @return The expected version of the billing group. If the version of the billing group does not match the
* expected version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
*/
public Long getExpectedVersion() {
return this.expectedVersion;
}
/**
* <p>
* The expected version of the billing group. If the version of the billing group does not match the expected
* version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
* </p>
*
* @param expectedVersion
* The expected version of the billing group. If the version of the billing group does not match the expected
* version specified in the request, the <code>UpdateBillingGroup</code> request is rejected with a
* <code>VersionConflictException</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateBillingGroupRequest withExpectedVersion(Long expectedVersion) {
setExpectedVersion(expectedVersion);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBillingGroupName() != null)
sb.append("BillingGroupName: ").append(getBillingGroupName()).append(",");
if (getBillingGroupProperties() != null)
sb.append("BillingGroupProperties: ").append(getBillingGroupProperties()).append(",");
if (getExpectedVersion() != null)
sb.append("ExpectedVersion: ").append(getExpectedVersion());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateBillingGroupRequest == false)
return false;
UpdateBillingGroupRequest other = (UpdateBillingGroupRequest) obj;
if (other.getBillingGroupName() == null ^ this.getBillingGroupName() == null)
return false;
if (other.getBillingGroupName() != null && other.getBillingGroupName().equals(this.getBillingGroupName()) == false)
return false;
if (other.getBillingGroupProperties() == null ^ this.getBillingGroupProperties() == null)
return false;
if (other.getBillingGroupProperties() != null && other.getBillingGroupProperties().equals(this.getBillingGroupProperties()) == false)
return false;
if (other.getExpectedVersion() == null ^ this.getExpectedVersion() == null)
return false;
if (other.getExpectedVersion() != null && other.getExpectedVersion().equals(this.getExpectedVersion()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBillingGroupName() == null) ? 0 : getBillingGroupName().hashCode());
hashCode = prime * hashCode + ((getBillingGroupProperties() == null) ? 0 : getBillingGroupProperties().hashCode());
hashCode = prime * hashCode + ((getExpectedVersion() == null) ? 0 : getExpectedVersion().hashCode());
return hashCode;
}
@Override
public UpdateBillingGroupRequest clone() {
return (UpdateBillingGroupRequest) super.clone();
}
}
| |
package vg.civcraft.mc.citadel.reinforcement;
import java.sql.Timestamp;
import java.text.DecimalFormat;
import java.util.logging.Level;
import org.bukkit.Location;
import org.bukkit.block.Block;
import org.bukkit.entity.Player;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import org.bukkit.material.Openable;
import vg.civcraft.mc.citadel.Citadel;
import vg.civcraft.mc.citadel.CitadelConfigManager;
import vg.civcraft.mc.citadel.Utility;
import vg.civcraft.mc.citadel.reinforcementtypes.ReinforcementType;
import vg.civcraft.mc.namelayer.GroupManager;
import vg.civcraft.mc.namelayer.NameAPI;
import vg.civcraft.mc.namelayer.group.Group;
import vg.civcraft.mc.namelayer.permission.PermissionType;
public class PlayerReinforcement extends Reinforcement{
private transient int gid;
private Group g;
private static GroupManager gm;
private boolean isInsecure = false;
private ItemStack stack;
public PlayerReinforcement(Location loc, int health,
int creation, int acid, Group g, ItemStack stack) {
super(loc, stack.getType(), health, creation, acid);
this.g = g;
this.stack = stack;
if (gm == null) {
gm = NameAPI.getGroupManager();
}
this.gid = g.getGroupId();
}
public boolean canBypass(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("BYPASS_REINFORCEMENT"));
}
public boolean canAccessCrops(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("CROPS"));
}
public boolean canAccessChests(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("CHESTS"));
}
public boolean canAccessDoors(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("DOORS"));
}
public boolean canViewInformation(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("REINFORCEMENT_INFO"));
}
public boolean canMakeInsecure(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("INSECURE_REINFORCEMENT"));
}
public boolean canAcid(Player p) {
checkValid();
if (g == null) {
return false;
}
return gm.hasAccess(g.getName(), p.getUniqueId(), PermissionType.getPermission("ACIDBLOCK"));
}
public boolean isSecurable() {
return (isContainer() || isDoor());
}
public int getDamageMultiplier(){
if (g == null){
return 1;
}
Timestamp ts = NameAPI.getGroupManager().getTimestamp(g.getName());
long shiftMultiplier = ((System.currentTimeMillis() - ts.getTime()) / (long)86400000) / (long)Citadel.getReinforcementManager().getDayMultiplier();
if (shiftMultiplier > 0) {
return 1 << shiftMultiplier;
}
return 1;
}
/**
* @return return false if it is secure, return true if it is insecure.
*/
public boolean isInsecure(){
return isInsecure;
}
/**
* Toggles whether or not the block is insecure.
*/
public void toggleInsecure() {
this.setInsecure(!this.isInsecure());
}
/**
* Set the status of either insecure or secure.
* Mostly used with hoppers to allow or disallow the transfer of items
* from one hopper to another if they are on separate groups.
* @param bool
*/
public void setInsecure(boolean bool){
isInsecure = bool;
isDirty = true;
}
/**
* @return Returns the percent of the reinforcement.
*/
public double getHealth() {
return (double)getDurability() / ((double)ReinforcementType.
getReinforcementType(stack).getHitPoints());
}
/**
* @return Returns in textual form the health of the reinforcement.
*/
public String getHealthText() {
double health = getHealth();
if (CitadelConfigManager.showHealthAsPercent()) {
DecimalFormat df = new DecimalFormat("#.##");
return df.format(health * 100) + "%";
} else {
if (health > 0.75) {
return "excellently";
} else if (health > 0.50) {
return "well";
} else if (health > 0.25) {
return "decently";
} else {
return "poorly";
}
}
}
/**
* Returns true if the block has an inventory that can be opened.
* @return boolean
*/
public boolean rable() {
Block block = getLocation().getBlock();
return block.getState() instanceof InventoryHolder
|| block.getState().getData() instanceof Openable || Utility.doorTypes.contains(block.getType());
}
/**
* @return True if the reinforced block is a door/trapdoor etc. or part of one
*/
public boolean isDoor() {
Block block = getLocation().getBlock();
return Utility.doorTypes.contains(block.getType()) || block.getState().getData() instanceof Openable;
}
public boolean isContainer() {
return getLocation().getBlock().getState() instanceof InventoryHolder;
}
/**
* Returns the group this PlayerReinforcement is associated with.
* @return group
*/
public Group getGroup(){
checkValid();
return g;
}
/**
* Sets the Group for this reinforcement
* @param Group
*/
public void setGroup(Group g){
this.g = g;
this.gid = g.getGroupId();
isDirty = true;
}
/**
* @return ItemStack associated with this reinforcement
*/
public ItemStack getStackRepresentation(){
return stack;
}
/**
* @return A string representation of a reinforcement's health, material, ect.
*/
public String getStatus() {
String verb;
if (isSecurable()) {
verb = "Locked";
} else {
verb = "Reinforced";
}
return String.format("%s %s with %s", verb, getHealthText(), getMaterial().name());
}
private void checkValid(){
if (g == null) {
Citadel.getInstance().getLogger().log(Level.WARNING, "CheckValid was called but the underlying group " + gid + " is gone for " + this.getLocation() + "!");
return;
}
if (!g.isValid()){ // incase it was recently merged/ deleted.
g = GroupManager.getGroup(g.getGroupId());
if (g == null) {
Citadel.getInstance().getLogger().log(Level.INFO, "Group " + g.getGroupId() + " was deleted or merged but not marked invalid!");
}
isDirty = true;
}
}
/**
* Note this is different to the group id of a Group.
* If a reinforcement is made with Group 1 and then is merged into Group 0 then this will return the group_id
* of Group 1.
* @return Returns the value of the group_id from the group it was created with.
*/
public int getGroupId(){
if (g == null) return gid;
return g.getGroupId();
}
public String getAgeStatus() {
int d = this.getDamageMultiplier();
if(d < 2){
return "not decayed";
}
else if(d < 16){
return "partially decayed";
}
else if(d < 256){
return "highly decayed";
}
else if(d < 2048){
return "heavily decayed";
}
else if(d > 2047){
return "completely decayed";
}
else
return "";
}
}
| |
// ========================================================================
// Copyright (c) 2009 Intalio, Inc.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.osgi.boot.utils.internal;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceEvent;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.service.packageadmin.PackageAdmin;
import org.osgi.service.startlevel.StartLevel;
/**
* When the PackageAdmin service is activated we can look for the fragments
* attached to this bundle and "activate" them.
*/
public class PackageAdminServiceTracker implements ServiceListener
{
private BundleContext _context;
private List<BundleActivator> _activatedFragments = new ArrayList<BundleActivator>();
private boolean _fragmentsWereActivated = false;
//Use the deprecated StartLevel to stay compatible with older versions of OSGi.
private StartLevel _startLevel;
private int _maxStartLevel = 6;
public static PackageAdminServiceTracker INSTANCE = null;
public PackageAdminServiceTracker(BundleContext context)
{
INSTANCE = this;
_context = context;
if (!setup())
{
try
{
_context.addServiceListener(this,"(objectclass=" + PackageAdmin.class.getName() + ")");
}
catch (InvalidSyntaxException e)
{
e.printStackTrace(); // won't happen
}
}
}
/**
* @return true if the fragments were activated by this method.
*/
private boolean setup()
{
ServiceReference sr = _context.getServiceReference(PackageAdmin.class.getName());
_fragmentsWereActivated = sr != null;
if (sr != null)
invokeFragmentActivators(sr);
sr = _context.getServiceReference(StartLevel.class.getName());
if (sr != null)
{
_startLevel = (StartLevel)_context.getService(sr);
try
{
_maxStartLevel = Integer.parseInt(System.getProperty("osgi.startLevel","6"));
}
catch (Exception e)
{
//nevermind default on the usual.
_maxStartLevel = 6;
}
}
return _fragmentsWereActivated;
}
/**
* Invokes the optional BundleActivator in each fragment. By convention the
* bundle activator for a fragment must be in the package that is defined by
* the symbolic name of the fragment and the name of the class must be
* 'FragmentActivator'.
*
* @param event
* The <code>ServiceEvent</code> object.
*/
public void serviceChanged(ServiceEvent event)
{
if (event.getType() == ServiceEvent.REGISTERED)
{
invokeFragmentActivators(event.getServiceReference());
}
}
/**
* Helper to access the PackageAdmin and return the fragments hosted by a bundle.
* when we drop the support for the older versions of OSGi, we will stop using the PackageAdmin
* service.
* @param bundle
* @return
*/
public Bundle[] getFragments(Bundle bundle)
{
ServiceReference sr = _context.getServiceReference(PackageAdmin.class.getName());
if (sr == null)
{//we should never be here really.
return null;
}
PackageAdmin admin = (PackageAdmin)_context.getService(sr);
return admin.getFragments(bundle);
}
/**
* Returns the fragments and the required-bundles of a bundle.
* Recursively collect the required-bundles and fragment when the directive visibility:=reexport
* is added to a required-bundle.
* @param bundle
* @param webFragOrAnnotationOrResources
* @return
*/
public Bundle[] getFragmentsAndRequiredBundles(Bundle bundle)
{
ServiceReference sr = _context.getServiceReference(PackageAdmin.class.getName());
if (sr == null)
{//we should never be here really.
return null;
}
PackageAdmin admin = (PackageAdmin)_context.getService(sr);
LinkedHashMap<String,Bundle> deps = new LinkedHashMap<String,Bundle>();
collectFragmentsAndRequiredBundles(bundle, admin, deps, false);
return deps.values().toArray(new Bundle[deps.size()]);
}
/**
* Returns the fragments and the required-bundles. Collects them transitively when the directive 'visibility:=reexport'
* is added to a required-bundle.
* @param bundle
* @param webFragOrAnnotationOrResources
* @return
*/
protected void collectFragmentsAndRequiredBundles(Bundle bundle, PackageAdmin admin, Map<String,Bundle> deps, boolean onlyReexport)
{
Bundle[] fragments = admin.getFragments(bundle);
if (fragments != null)
{
//Also add the bundles required by the fragments.
//this way we can inject onto an existing web-bundle a set of bundles that extend it
for (Bundle f : fragments)
{
if (!deps.keySet().contains(f.getSymbolicName()))
{
deps.put(f.getSymbolicName(), f);
collectRequiredBundles(f, admin, deps, onlyReexport);
}
}
}
collectRequiredBundles(bundle, admin, deps, onlyReexport);
}
/**
* A simplistic but good enough parser for the Require-Bundle header.
* Parses the version range attribute and the visibility directive.
*
* @param onlyReexport true to collect resources and web-fragments transitively if and only if the directive visibility is reexport.
* @param bundle
* @return The map of required bundles associated to the value of the jetty-web attribute.
*/
protected void collectRequiredBundles(Bundle bundle, PackageAdmin admin, Map<String,Bundle> deps, boolean onlyReexport)
{
String requiredBundleHeader = (String)bundle.getHeaders().get("Require-Bundle");
if (requiredBundleHeader == null)
{
return;
}
StringTokenizer tokenizer = new ManifestTokenizer(requiredBundleHeader);
while (tokenizer.hasMoreTokens())
{
String tok = tokenizer.nextToken().trim();
StringTokenizer tokenizer2 = new StringTokenizer(tok, ";");
String symbolicName = tokenizer2.nextToken().trim();
if (deps.keySet().contains(symbolicName))
{
//was already added. 2 dependencies pointing at the same bundle.
continue;
}
String versionRange = null;
boolean reexport = false;
while (tokenizer2.hasMoreTokens())
{
String next = tokenizer2.nextToken().trim();
if (next.startsWith("bundle-version="))
{
if (next.startsWith("bundle-version=\"") || next.startsWith("bundle-version='"))
{
versionRange = next.substring("bundle-version=\"".length(), next.length()-1);
}
else
{
versionRange = next.substring("bundle-version=".length());
}
}
else if (next.equals("visibility:=reexport"))
{
reexport = true;
}
}
if (!reexport && onlyReexport)
{
return;
}
Bundle[] reqBundles = admin.getBundles(symbolicName, versionRange);
if (reqBundles != null && reqBundles.length != 0)
{
Bundle reqBundle = null;
for (Bundle b : reqBundles)
{
if (b.getState() == Bundle.ACTIVE || b.getState() == Bundle.STARTING)
{
reqBundle = b;
break;
}
}
if (reqBundle == null)
{
//strange? in OSGi with Require-Bundle,
//the dependent bundle is supposed to be active already
reqBundle = reqBundles[0];
}
deps.put(reqBundle.getSymbolicName(),reqBundle);
collectFragmentsAndRequiredBundles(reqBundle, admin, deps, true);
}
}
}
private void invokeFragmentActivators(ServiceReference sr)
{
PackageAdmin admin = (PackageAdmin)_context.getService(sr);
Bundle[] fragments = admin.getFragments(_context.getBundle());
if (fragments == null)
{
return;
}
for (Bundle frag : fragments)
{
// find a convention to look for a class inside the fragment.
try
{
String fragmentActivator = frag.getSymbolicName() + ".FragmentActivator";
Class<?> c = Class.forName(fragmentActivator);
if (c != null)
{
BundleActivator bActivator = (BundleActivator)c.newInstance();
bActivator.start(_context);
_activatedFragments.add(bActivator);
}
}
catch (NullPointerException e)
{
// e.printStackTrace();
}
catch (InstantiationException e)
{
// e.printStackTrace();
}
catch (IllegalAccessException e)
{
// e.printStackTrace();
}
catch (ClassNotFoundException e)
{
// e.printStackTrace();
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
public void stop()
{
INSTANCE = null;
for (BundleActivator fragAct : _activatedFragments)
{
try
{
fragAct.stop(_context);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
/**
* @return true if the framework has completed all the start levels.
*/
public boolean frameworkHasCompletedAutostarts()
{
return _startLevel == null ? true : _startLevel.getStartLevel() >= _maxStartLevel;
}
private static class ManifestTokenizer extends StringTokenizer {
public ManifestTokenizer(String header) {
super(header, ",");
}
@Override
public String nextToken() {
String token = super.nextToken();
while (hasOpenQuote(token) && hasMoreTokens()) {
token += "," + super.nextToken();
}
return token;
}
private boolean hasOpenQuote(String token) {
int i = -1;
do {
int quote = getQuote(token, i+1);
if (quote < 0) {
return false;
}
i = token.indexOf(quote, i+1);
i = token.indexOf(quote, i+1);
} while (i >= 0);
return true;
}
private int getQuote(String token, int offset) {
int i = token.indexOf('"', offset);
int j = token.indexOf('\'', offset);
if (i < 0) {
if (j < 0) {
return -1;
} else {
return '\'';
}
}
if (j < 0) {
return '"';
}
if (i < j) {
return '"';
}
return '\'';
}
}
}
| |
/*
* Copyright 2014 MIR@MU.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cz.muni.fi.mir.db.service.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.hibernate.Hibernate;
import org.joda.time.DateTime;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import cz.muni.fi.mir.db.dao.AnnotationDAO;
import cz.muni.fi.mir.db.dao.ApplicationRunDAO;
import cz.muni.fi.mir.db.dao.ElementDAO;
import cz.muni.fi.mir.db.dao.FormulaDAO;
import cz.muni.fi.mir.db.domain.Annotation;
import cz.muni.fi.mir.db.domain.ApplicationRun;
import cz.muni.fi.mir.db.domain.CanonicOutput;
import cz.muni.fi.mir.db.domain.Configuration;
import cz.muni.fi.mir.db.domain.Element;
import cz.muni.fi.mir.db.domain.Formula;
import cz.muni.fi.mir.db.domain.FormulaSearchRequest;
import cz.muni.fi.mir.db.domain.Pagination;
import cz.muni.fi.mir.db.domain.Program;
import cz.muni.fi.mir.db.domain.Revision;
import cz.muni.fi.mir.db.domain.SearchResponse;
import cz.muni.fi.mir.db.domain.SourceDocument;
import cz.muni.fi.mir.db.domain.User;
import cz.muni.fi.mir.db.service.ApplicationRunService;
import cz.muni.fi.mir.db.service.ElementService;
import cz.muni.fi.mir.db.service.FormulaService;
import cz.muni.fi.mir.scheduling.FormulaImportTask;
import cz.muni.fi.mir.scheduling.LongRunningTaskFactory;
import cz.muni.fi.mir.services.MathCanonicalizerLoader;
import cz.muni.fi.mir.services.TaskService;
import cz.muni.fi.mir.tools.EntityFactory;
import cz.muni.fi.mir.tools.IndexTools;
import cz.muni.fi.mir.tools.Tools;
/**
*
* @author Dominik Szalai - emptulik at gmail.com
* @author Rober Siska - xsiska2 at mail.muni.cz
*/
@Service(value = "formulaService")
public class FormulaServiceImpl implements FormulaService
{
private static final Logger logger = Logger.getLogger(FormulaServiceImpl.class);
@Autowired
private FormulaDAO formulaDAO;
@Autowired
private ElementDAO elementDAO;
@Autowired
private ApplicationRunDAO applicationRunDAO;
@Autowired
private ApplicationRunService applicationRunService;
@Autowired
private MathCanonicalizerLoader mathCanonicalizerLoader;
@Autowired
private AnnotationDAO annotationDAO;
@Autowired
private LongRunningTaskFactory taskFactory;
@Autowired
private TaskService taskService;
@Autowired
private ElementService elementService;
@Autowired
private IndexTools indexTools;
@Override
@Transactional(readOnly = false)
public void createFormula(Formula formula) throws IllegalArgumentException
{
if(formula == null)
{
throw new IllegalArgumentException("Given input formula is null.");
}
formulaDAO.create(formula);
}
@Override
@Transactional(readOnly = false)
public void updateFormula(Formula formula) throws IllegalArgumentException
{
InputChecker.checkInput(formula);
formulaDAO.update(formula);
}
@Override
@Transactional(readOnly = false)
public void deleteFormula(Formula formula) throws IllegalArgumentException
{
InputChecker.checkInput(formula);
if (formula.getId() == null || formula.getId().compareTo(Long.valueOf("1")) < 0)
{
throw new IllegalArgumentException("Given formula does not have valid ID [" + formula.getId() + "].");
}
formulaDAO.deleteFormula(formula);
}
@Override
@Transactional(readOnly = true)
public Formula getFormulaByID(Long id) throws IllegalArgumentException
{
if (id == null || Long.valueOf("0").compareTo(id) >= 0)
{
throw new IllegalArgumentException("Given ID is not valid [" + id + "].");
}
return formulaDAO.getFormulaByID(id);
}
@Override
@Transactional(readOnly = true)
public Formula getFormulaByAnnotation(Annotation annotation) throws IllegalArgumentException
{
InputChecker.checkInput(annotation);
return formulaDAO.getFormulaByAnnotation(annotation);
}
@Override
@Transactional(readOnly = true)
public List<Formula> getAllFormulas(Pagination pagination) throws IllegalArgumentException
{
InputChecker.checkInput(pagination);
return formulaDAO.getAllFormulas(pagination);
}
@Override
@Transactional(readOnly = true)
public List<Formula> getAllFormulas() throws IllegalArgumentException
{
return formulaDAO.getAllFormulas();
}
@Override
@Transactional(readOnly = false)
public void massFormulaImport(String path, String filter, Revision revision,
Configuration configuration, Program program,
SourceDocument sourceDocument, User user) throws IllegalArgumentException
{
InputChecker.checkInput(configuration);
InputChecker.checkInput(revision);
InputChecker.checkInput(program);
InputChecker.checkInput(sourceDocument);
InputChecker.checkInput(user);
if(path == null || path.length() < 1)
{
throw new IllegalArgumentException("Empty path was passed for import.");
}
FormulaImportTask task = taskFactory.createImportTask();
task.setDependencies(path, filter, revision, configuration, program, sourceDocument, user);
taskService.submitTask(task);
}
@Override
@Transactional(readOnly = true)
public int getNumberOfRecords()
{
return formulaDAO.getNumberOfRecords();
}
@Override
@Transactional(readOnly = false)
public void simpleFormulaImport(String formulaXmlContent, Revision revision,
Configuration configuration, Program program,
SourceDocument sourceDocument, User user) throws IllegalArgumentException
{
InputChecker.checkInput(configuration);
InputChecker.checkInput(revision);
InputChecker.checkInput(program);
InputChecker.checkInput(sourceDocument);
InputChecker.checkInput(user);
ApplicationRun appRun = EntityFactory.createApplicationRun();
appRun.setUser(user);
appRun.setRevision(revision);
appRun.setConfiguration(configuration);
applicationRunDAO.create(appRun);
Formula f = EntityFactory.createFormula();
f.setOutputs(new ArrayList<CanonicOutput>());
f.setXml(formulaXmlContent);
f.setInsertTime(DateTime.now());
f.setUser(user);
f.setHashValue(Tools.getInstance().SHA1(f.getXml()));
f.setElements(elementService.extractElements(f));
if (null == formulaDAO.getFormulaByHash(f.getHashValue()))
{
attachElements(f);
formulaDAO.create(f);
mathCanonicalizerLoader.execute(Arrays.asList(f), appRun);
}
else
{
logger.info("Formula with hash [" + f.getHashValue() + "] is already in database.");
}
}
@Override
@Transactional
public void reindexAndOptimize()
{
indexTools.reIndexClass(Formula.class);
indexTools.optimize(Formula.class);
}
@Override
@Transactional(readOnly = false)
public SearchResponse<Formula> findSimilar(Formula formula,
Map<String,String> properties, boolean override,
boolean directWrite, Pagination pagination) throws IllegalArgumentException
{
InputChecker.checkInput(pagination);
InputChecker.checkInput(formula);
if(properties == null || properties.isEmpty())
{
throw new IllegalArgumentException("Given input map of properties is empty.");
}
return formulaDAO.findSimilar(formula,properties,override,directWrite, pagination);
}
@Override
@Transactional(readOnly = false)
public void attachSimilarFormulas(Formula formula, Long[] similarIDs, boolean override) throws IllegalArgumentException
{
InputChecker.checkInput(formula);
if(similarIDs == null || similarIDs.length == 0)
{
throw new IllegalArgumentException("Input similarIDs is empty.");
}
if(similarIDs.length > 0)
{
d("Size is > 0. Starting attaching.");
List<Formula> similarsToAdd = new ArrayList<>();
if(!override && formula.getSimilarFormulas() != null)
{
d("Override disabled, adding previous similar forms.");
similarsToAdd.addAll(formula.getSimilarFormulas());
}
for(Long id : similarIDs)
{ // because similar formulas are set
// as cascade refresh hibernate needs only IDs
Formula f = EntityFactory.createFormula(id);
d("Adding following formula: "+f);
similarsToAdd.add(f);
}
formula.setSimilarFormulas(similarsToAdd);
d("Task done with following output to be set:"
+ formula.getSimilarFormulas());
//todo x sublist addition
//so if A{w,x,y} where w,x,y are similar then set
// x{w,y,A} w{A,x,y} and y{w,x,A} as similar
formulaDAO.update(formula);
}
}
@Override
@Transactional(readOnly = false)
public void massRemove(List<Formula> toBeRemoved) throws IllegalArgumentException
{
if(toBeRemoved == null || toBeRemoved.isEmpty())
{
throw new IllegalArgumentException("Formulas marked for input are empty.");
}
for(Formula f : toBeRemoved)
{
InputChecker.checkInput(f);
formulaDAO.deleteFormula(f);
}
}
@Override
@Transactional(readOnly = false)
public void annotateFormula(Formula formula, Annotation annotation) throws IllegalArgumentException
{
InputChecker.checkInput(formula);
if(annotation == null)
{
throw new IllegalArgumentException("Given annotation is null");
}
annotationDAO.create(annotation);
List<Annotation> current = new ArrayList<>();
if(formula.getAnnotations() != null && !formula.getAnnotations().isEmpty())
{
current.addAll(formula.getAnnotations());
}
current.add(annotation);
formula.setAnnotations(current);
formulaDAO.update(formula);
}
@Override
@Transactional(readOnly = false)
public void deleteAnnotationFromFormula(Formula formula, Annotation annotation) throws IllegalArgumentException
{
InputChecker.checkInput(formula);
InputChecker.checkInput(annotation);
List<Annotation> temp = new ArrayList<>(formula.getAnnotations());
temp.remove(annotation);
formula.setAnnotations(temp);
formulaDAO.update(formula);
annotationDAO.delete(annotation.getId());
}
@Override
@Transactional(readOnly = true)
public SearchResponse<Formula> findFormulas(FormulaSearchRequest formulaSearchRequest, Pagination pagination) throws IllegalArgumentException
{
InputChecker.checkInput(pagination);
if(formulaSearchRequest == null)
{
throw new IllegalArgumentException("Given request is empty.");
}
return formulaDAO.findFormulas(formulaSearchRequest, pagination);
}
@Override
@Transactional(readOnly = true)
public SearchResponse<Formula> findFormulas(FormulaSearchRequest formulaSearchRequest) throws IllegalArgumentException
{
if(formulaSearchRequest == null)
{
throw new IllegalArgumentException("Given request is empty.");
}
return formulaDAO.findFormulas(formulaSearchRequest);
}
@Override
public void massCanonicalize(List<Long> listOfIds, Revision revision, Configuration configuration, User user) throws IllegalArgumentException
{
InputChecker.checkInput(configuration);
InputChecker.checkInput(revision);
InputChecker.checkInput(user);
if(listOfIds == null || listOfIds.isEmpty())
{
throw new IllegalArgumentException("GIven input list marked for deletion is empty, or null.");
}
ApplicationRun applicationRun = EntityFactory.createApplicationRun();
applicationRun.setUser(user);
applicationRun.setRevision(revision);
applicationRun.setConfiguration(configuration);
List<Formula> toCanonicalize = new ArrayList<>();
for (Long formulaID : listOfIds)
{
Formula formula = formulaDAO.getFormulaByID(formulaID);
// for some reason, the session is already closed in the task,
// so we need to fetch to lazy collection while we have it...
Hibernate.initialize(formula.getOutputs());
toCanonicalize.add(formula);
}
if (!toCanonicalize.isEmpty())
{
logger.fatal("Attempt to create Application Run with flush mode to ensure its persisted.");
applicationRunService.createApplicationRun(applicationRun,true);
logger.fatal("Operation withFlush called.");
mathCanonicalizerLoader.execute(toCanonicalize, applicationRun);
}
}
@Override
@Transactional(readOnly = true)
public List<Formula> getFormulasBySourceDocument(SourceDocument sourceDocumen) throws IllegalArgumentException
{
InputChecker.checkInput(sourceDocumen);
return formulaDAO.getFormulasBySourceDocument(sourceDocumen);
}
@Override
@Transactional(readOnly = true)
public List<Formula> getFormulasByProgram(Program program) throws IllegalArgumentException
{
InputChecker.checkInput(program);
return formulaDAO.getFormulasByProgram(program);
}
@Override
@Transactional(readOnly = true)
public List<Formula> getFormulasByUser(User user) throws IllegalArgumentException
{
InputChecker.checkInput(user);
return formulaDAO.getFormulasByUser(user);
}
/**
* Method logs input with debug level into logger
* @param s to be logged
*/
private void d(String s)
{
logger.debug(s);
}
/**
* Method takes elements from formula and matches them against already
* persisted list of elements. If element already exist then it has id in
* obtained list (from database) and id for element in formula is set.
* Otherwise we check temp list which contains newly created elements. If
* there is no match then new element is created and stored in temp list.
* Equals method somehow fails on CascadeType.ALL, so this is reason why we
* have to do manually. TODO redo in future. Possible solution would be to
* have all possible elements already stored inside database.
*
* @param f formula of which we attach elements.
*/
private void attachElements(Formula f)
{
if (f.getElements() != null && !f.getElements().isEmpty())
{
List<Element> list = elementDAO.getAllElements();
List<Element> newList = new ArrayList<>();
for (Element e : f.getElements())
{
int index = list.indexOf(e);
if (index == -1)
{
int index2 = newList.indexOf(e);
if (index2 == -1)
{
elementDAO.create(e);
newList.add(e);
}
else
{
e.setId(newList.get(index2).getId());
}
}
else
{
e.setId(list.get(index).getId());
}
}
}
}
@Override
@Transactional(readOnly = true)
public List<Formula> getFormulasByCanonicOutputHash(String hash) throws IllegalArgumentException
{
if(hash == null || hash.length() != 40)
{
throw new IllegalArgumentException("Wrong sha1 fingerprint. Length should be 40 but was ["+hash+"]");
}
return formulaDAO.getFormulasByCanonicOutputHash(hash);
}
}
| |
package apoc.load;
import apoc.export.util.FileUtils;
import apoc.result.MapResult;
import org.apache.commons.lang3.StringUtils;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.procedure.Context;
import org.neo4j.procedure.Description;
import org.neo4j.procedure.Name;
import org.neo4j.procedure.Procedure;
import org.w3c.dom.CharacterData;
import org.w3c.dom.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
import java.util.*;
import java.util.stream.Stream;
import static apoc.util.Util.cleanUrl;
import static javax.xml.stream.XMLStreamConstants.*;
public class Xml {
public static final XMLInputFactory FACTORY = XMLInputFactory.newFactory();
@Context
public GraphDatabaseService db;
@Procedure
@Description("apoc.load.xml('http://example.com/test.xml', 'xPath',config, false) YIELD value as doc CREATE (p:Person) SET p.name = doc.name load from XML URL (e.g. web-api) to import XML as single nested map with attributes and _type, _text and _childrenx fields.")
public Stream<MapResult> xml(@Name("url") String url, @Name(value = "path", defaultValue = "/") String path, @Name(value = "config",defaultValue = "{}") Map<String, Object> config, @Name(value = "simple", defaultValue = "false") boolean simpleMode) throws Exception {
return xmlXpathToMapResult(url, simpleMode, path ,config);
}
@Procedure(deprecatedBy = "apoc.load.xml")
@Deprecated
@Description("apoc.load.xmlSimple('http://example.com/test.xml') YIELD value as doc CREATE (p:Person) SET p.name = doc.name load from XML URL (e.g. web-api) to import XML as single nested map with attributes and _type, _text and _children fields. This method does intentionally not work with XML mixed content.")
public Stream<MapResult> xmlSimple(@Name("url") String url) throws Exception {
return xmlToMapResult(url, true);
}
private Stream<MapResult> xmlXpathToMapResult(@Name("url") String url, boolean simpleMode, String path, Map<String, Object> config) throws Exception {
if (config == null) config = Collections.emptyMap();
boolean failOnError = (boolean) config.getOrDefault("failOnError", true);
List<MapResult> result = new ArrayList<>();
try {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
documentBuilderFactory.setIgnoringElementContentWhitespace(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
FileUtils.checkReadAllowed(url);
URLConnection urlConnection = new URL(url).openConnection();
Document doc = documentBuilder.parse(urlConnection.getInputStream());
XPathFactory xPathFactory = XPathFactory.newInstance();
XPath xPath = xPathFactory.newXPath();
path = StringUtils.isEmpty(path) ? "/" : path;
XPathExpression xPathExpression = xPath.compile(path);
NodeList nodeList = (NodeList) xPathExpression.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < nodeList.getLength(); i++) {
final Deque<Map<String, Object>> stack = new LinkedList<>();
handleNode(stack, nodeList.item(i), simpleMode);
for (int index = 0; index < stack.size(); index++) {
result.add(new MapResult(stack.pollFirst()));
}
}
}
catch (FileNotFoundException e){
if(!failOnError)
return Stream.of(new MapResult(Collections.emptyMap()));
else
throw new FileNotFoundException(e.getMessage());
}
catch (Exception e){
if(!failOnError)
return Stream.of(new MapResult(Collections.emptyMap()));
else
throw new Exception(e);
}
return result.stream();
}
private Stream<MapResult> xmlToMapResult(@Name("url") String url, boolean simpleMode) {
try {
XMLStreamReader reader = getXMLStreamReaderFromUrl(url);
final Deque<Map<String, Object>> stack = new LinkedList<>();
do {
handleXmlEvent(stack, reader, simpleMode);
} while (proceedReader(reader));
return Stream.of(new MapResult(stack.getFirst()));
} catch (IOException | XMLStreamException e) {
throw new RuntimeException("Can't read url " + cleanUrl(url) + " as XML", e);
}
}
private XMLStreamReader getXMLStreamReaderFromUrl(@Name("url") String url) throws IOException, XMLStreamException {
FileUtils.checkReadAllowed(url);
URLConnection urlConnection = new URL(url).openConnection();
FACTORY.setProperty("javax.xml.stream.isCoalescing", true);
return FACTORY.createXMLStreamReader(urlConnection.getInputStream());
}
private boolean proceedReader(XMLStreamReader reader) throws XMLStreamException {
if (reader.hasNext()) {
do {
reader.next();
} while (reader.isWhiteSpace());
return true;
} else {
return false;
}
}
private void handleXmlEvent(Deque<Map<String, Object>> stack, XMLStreamReader reader, boolean simpleMode) throws XMLStreamException {
Map<String, Object> elementMap;
switch (reader.getEventType()) {
case START_DOCUMENT:
case END_DOCUMENT:
// intentionally empty
break;
case START_ELEMENT:
int attributes = reader.getAttributeCount();
elementMap = new LinkedHashMap<>(attributes + 3);
elementMap.put("_type", reader.getLocalName());
for (int a = 0; a < attributes; a++) {
elementMap.put(reader.getAttributeLocalName(a), reader.getAttributeValue(a));
}
if (!stack.isEmpty()) {
final Map<String, Object> last = stack.getLast();
String key = simpleMode ? "_" + reader.getLocalName() : "_children";
amendToList(last, key, elementMap);
}
stack.addLast(elementMap);
break;
case END_ELEMENT:
elementMap = stack.size() > 1 ? stack.removeLast() : stack.getLast();
// maintain compatibility with previous implementation:
// if we only have text childs, return them in "_text" and not in "_children"
Object children = elementMap.get("_children");
if (children != null) {
if ((children instanceof String) || collectionIsAllStrings(children)) {
elementMap.put("_text", children);
elementMap.remove("_children");
}
}
break;
case CHARACTERS:
final String text = reader.getText().trim();
if (!text.isEmpty()) {
Map<String, Object> map = stack.getLast();
amendToList(map, "_children", text);
}
break;
default:
throw new RuntimeException("dunno know how to handle xml event type " + reader.getEventType());
}
}
private void handleNode(Deque<Map<String, Object>> stack, Node node, boolean simpleMode) {
// Handle document node
if (node.getNodeType() == Node.DOCUMENT_NODE) {
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
if (children.item(i).getLocalName() != null) {
handleNode(stack, children.item(i), simpleMode);
return;
}
}
}
Map<String, Object> elementMap = new LinkedHashMap<>();
handleTypeAndAttributes(node, elementMap);
// Set children
NodeList children = node.getChildNodes();
int count = 0;
for (int i = 0; i < children.getLength(); i++) {
Node child = children.item(i);
// This is to deal with text between xml tags for example new line characters
if (child.getNodeType() != Node.TEXT_NODE && child.getNodeType() != Node.CDATA_SECTION_NODE) {
handleNode(stack, child, simpleMode);
count++;
} else {
// Deal with text nodes
handleTextNode(child, elementMap);
}
}
if (children.getLength() > 1) {
if (!stack.isEmpty()) {
List<Object> nodeChildren = new ArrayList<>();
for (int i = 0; i < count; i++) {
nodeChildren.add(stack.pollLast());
}
String key = simpleMode ? "_" + node.getLocalName() : "_children";
Collections.reverse(nodeChildren);
if (nodeChildren.size() > 0) {
// Before adding the children we need to handle mixed text
Object text = elementMap.get("_text");
if (text instanceof List) {
for (Object element : (List) text) {
nodeChildren.add(element);
}
elementMap.remove("_text");
}
elementMap.put(key, nodeChildren);
}
}
}
if (!elementMap.isEmpty()) {
stack.addLast(elementMap);
}
}
/**
* Collects type and attributes for the node
*
* @param node
* @param elementMap
*/
private void handleTypeAndAttributes(Node node, Map<String, Object> elementMap) {
// Set type
if (node.getLocalName() != null) {
elementMap.put("_type", node.getLocalName());
}
// Set the attributes
if (node.getAttributes() != null) {
NamedNodeMap attributeMap = node.getAttributes();
for (int i = 0; i < attributeMap.getLength(); i++) {
Node attribute = attributeMap.item(i);
elementMap.put(attribute.getNodeName(), attribute.getNodeValue());
}
}
}
/**
* Handle TEXT nodes and CDATA nodes
*
* @param node
* @param elementMap
*/
private void handleTextNode(Node node, Map<String, Object> elementMap) {
Object text = "";
int nodeType = node.getNodeType();
switch (nodeType) {
case Node.TEXT_NODE:
text = normalizeText(node.getNodeValue());
break;
case Node.CDATA_SECTION_NODE:
text = normalizeText(((CharacterData) node).getData());
break;
default:
break;
}
// If the text is valid ...
if (!StringUtils.isEmpty(text.toString())) {
// We check if we have already collected some text previously
Object previousText = elementMap.get("_text");
if (previousText != null) {
// If we just have a "_text" key than we need to collect to a List
text = Arrays.asList(previousText.toString(), text);
}
elementMap.put("_text", text);
}
}
/**
* Remove trailing whitespaces and new line characters
*
* @param text
* @return
*/
private String normalizeText(String text) {
String[] tokens = StringUtils.split(text, "\n");
for (int i = 0; i < tokens.length; i++) {
tokens[i] = tokens[i].trim();
}
return StringUtils.join(tokens, " ").trim();
}
private boolean collectionIsAllStrings(Object collection) {
if (collection instanceof Collection) {
return ((Collection<Object>) collection).stream().allMatch(o -> o instanceof String);
} else {
return false;
}
}
private void amendToList(Map<String, Object> map, String key, Object value) {
final Object element = map.get(key);
if (element == null) {
map.put(key, value);
} else {
if (element instanceof List) {
((List) element).add(value);
} else {
List<Object> list = new LinkedList<>();
list.add(element);
list.add(value);
map.put(key, list);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.fontbox.cff;
/**
* This is specialized CFFCharset. It's used if the CharsetId of a font is set to 0.
*
* @author Villu Ruusmann
*/
public final class CFFISOAdobeCharset extends CFFCharsetType1
{
private static final int CHAR_CODE = 0;
private static final int CHAR_NAME = 1;
private CFFISOAdobeCharset()
{
// Table of character codes and their corresponding names.
Object[][] cffIsoAdobeCharsetTable = {
{0, ".notdef"},
{1, "space"},
{2, "exclam"},
{3, "quotedbl"},
{4, "numbersign"},
{5, "dollar"},
{6, "percent"},
{7, "ampersand"},
{8, "quoteright"},
{9, "parenleft"},
{10, "parenright"},
{11, "asterisk"},
{12, "plus"},
{13, "comma"},
{14, "hyphen"},
{15, "period"},
{16, "slash"},
{17, "zero"},
{18, "one"},
{19, "two"},
{20, "three"},
{21, "four"},
{22, "five"},
{23, "six"},
{24, "seven"},
{25, "eight"},
{26, "nine"},
{27, "colon"},
{28, "semicolon"},
{29, "less"},
{30, "equal"},
{31, "greater"},
{32, "question"},
{33, "at"},
{34, "A"},
{35, "B"},
{36, "C"},
{37, "D"},
{38, "E"},
{39, "F"},
{40, "G"},
{41, "H"},
{42, "I"},
{43, "J"},
{44, "K"},
{45, "L"},
{46, "M"},
{47, "N"},
{48, "O"},
{49, "P"},
{50, "Q"},
{51, "R"},
{52, "S"},
{53, "T"},
{54, "U"},
{55, "V"},
{56, "W"},
{57, "X"},
{58, "Y"},
{59, "Z"},
{60, "bracketleft"},
{61, "backslash"},
{62, "bracketright"},
{63, "asciicircum"},
{64, "underscore"},
{65, "quoteleft"},
{66, "a"},
{67, "b"},
{68, "c"},
{69, "d"},
{70, "e"},
{71, "f"},
{72, "g"},
{73, "h"},
{74, "i"},
{75, "j"},
{76, "k"},
{77, "l"},
{78, "m"},
{79, "n"},
{80, "o"},
{81, "p"},
{82, "q"},
{83, "r"},
{84, "s"},
{85, "t"},
{86, "u"},
{87, "v"},
{88, "w"},
{89, "x"},
{90, "y"},
{91, "z"},
{92, "braceleft"},
{93, "bar"},
{94, "braceright"},
{95, "asciitilde"},
{96, "exclamdown"},
{97, "cent"},
{98, "sterling"},
{99, "fraction"},
{100, "yen"},
{101, "florin"},
{102, "section"},
{103, "currency"},
{104, "quotesingle"},
{105, "quotedblleft"},
{106, "guillemotleft"},
{107, "guilsinglleft"},
{108, "guilsinglright"},
{109, "fi"},
{110, "fl"},
{111, "endash"},
{112, "dagger"},
{113, "daggerdbl"},
{114, "periodcentered"},
{115, "paragraph"},
{116, "bullet"},
{117, "quotesinglbase"},
{118, "quotedblbase"},
{119, "quotedblright"},
{120, "guillemotright"},
{121, "ellipsis"},
{122, "perthousand"},
{123, "questiondown"},
{124, "grave"},
{125, "acute"},
{126, "circumflex"},
{127, "tilde"},
{128, "macron"},
{129, "breve"},
{130, "dotaccent"},
{131, "dieresis"},
{132, "ring"},
{133, "cedilla"},
{134, "hungarumlaut"},
{135, "ogonek"},
{136, "caron"},
{137, "emdash"},
{138, "AE"},
{139, "ordfeminine"},
{140, "Lslash"},
{141, "Oslash"},
{142, "OE"},
{143, "ordmasculine"},
{144, "ae"},
{145, "dotlessi"},
{146, "lslash"},
{147, "oslash"},
{148, "oe"},
{149, "germandbls"},
{150, "onesuperior"},
{151, "logicalnot"},
{152, "mu"},
{153, "trademark"},
{154, "Eth"},
{155, "onehalf"},
{156, "plusminus"},
{157, "Thorn"},
{158, "onequarter"},
{159, "divide"},
{160, "brokenbar"},
{161, "degree"},
{162, "thorn"},
{163, "threequarters"},
{164, "twosuperior"},
{165, "registered"},
{166, "minus"},
{167, "eth"},
{168, "multiply"},
{169, "threesuperior"},
{170, "copyright"},
{171, "Aacute"},
{172, "Acircumflex"},
{173, "Adieresis"},
{174, "Agrave"},
{175, "Aring"},
{176, "Atilde"},
{177, "Ccedilla"},
{178, "Eacute"},
{179, "Ecircumflex"},
{180, "Edieresis"},
{181, "Egrave"},
{182, "Iacute"},
{183, "Icircumflex"},
{184, "Idieresis"},
{185, "Igrave"},
{186, "Ntilde"},
{187, "Oacute"},
{188, "Ocircumflex"},
{189, "Odieresis"},
{190, "Ograve"},
{191, "Otilde"},
{192, "Scaron"},
{193, "Uacute"},
{194, "Ucircumflex"},
{195, "Udieresis"},
{196, "Ugrave"},
{197, "Yacute"},
{198, "Ydieresis"},
{199, "Zcaron"},
{200, "aacute"},
{201, "acircumflex"},
{202, "adieresis"},
{203, "agrave"},
{204, "aring"},
{205, "atilde"},
{206, "ccedilla"},
{207, "eacute"},
{208, "ecircumflex"},
{209, "edieresis"},
{210, "egrave"},
{211, "iacute"},
{212, "icircumflex"},
{213, "idieresis"},
{214, "igrave"},
{215, "ntilde"},
{216, "oacute"},
{217, "ocircumflex"},
{218, "odieresis"},
{219, "ograve"},
{220, "otilde"},
{221, "scaron"},
{222, "uacute"},
{223, "ucircumflex"},
{224, "udieresis"},
{225, "ugrave"},
{226, "yacute"},
{227, "ydieresis"},
{228, "zcaron"}
};
int gid = 0;
for (Object[] charsetEntry : cffIsoAdobeCharsetTable)
{
addSID(gid++, (Integer) charsetEntry[CHAR_CODE], charsetEntry[CHAR_NAME].toString());
}
}
/**
* Returns an instance of the CFFExpertSubsetCharset class.
* @return an instance of CFFExpertSubsetCharset
*/
public static CFFISOAdobeCharset getInstance()
{
return CFFISOAdobeCharset.INSTANCE;
}
private static final CFFISOAdobeCharset INSTANCE = new CFFISOAdobeCharset();
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.openxr;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Pose action metadata.
*
* <h5>Description</h5>
*
* <p>A pose action <b>must</b> not be bound to multiple input sources, according to <a target="_blank" href="https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#multiple_inputs">the previously defined rule</a>.</p>
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code type} <b>must</b> be {@link XR10#XR_TYPE_ACTION_STATE_POSE TYPE_ACTION_STATE_POSE}</li>
* <li>{@code next} <b>must</b> be {@code NULL} or a valid pointer to the <a target="_blank" href="https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#valid-usage-for-structure-pointer-chains">next structure in a structure chain</a></li>
* </ul>
*
* <h5>See Also</h5>
*
* <p>{@link XR10#xrGetActionStatePose GetActionStatePose}</p>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct XrActionStatePose {
* XrStructureType {@link #type};
* void * {@link #next};
* XrBool32 {@link #isActive};
* }</code></pre>
*/
public class XrActionStatePose extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
TYPE,
NEXT,
ISACTIVE;
static {
Layout layout = __struct(
__member(4),
__member(POINTER_SIZE),
__member(4)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
TYPE = layout.offsetof(0);
NEXT = layout.offsetof(1);
ISACTIVE = layout.offsetof(2);
}
/**
* Creates a {@code XrActionStatePose} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public XrActionStatePose(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the {@code XrStructureType} of this structure. */
@NativeType("XrStructureType")
public int type() { return ntype(address()); }
/** {@code NULL} or a pointer to the next structure in a structure chain. No such structures are defined in core OpenXR. */
@NativeType("void *")
public long next() { return nnext(address()); }
/** {@link XR10#XR_TRUE TRUE} if and only if there exists an input source that is being tracked by this pose action. */
@NativeType("XrBool32")
public boolean isActive() { return nisActive(address()) != 0; }
/** Sets the specified value to the {@link #type} field. */
public XrActionStatePose type(@NativeType("XrStructureType") int value) { ntype(address(), value); return this; }
/** Sets the {@link XR10#XR_TYPE_ACTION_STATE_POSE TYPE_ACTION_STATE_POSE} value to the {@link #type} field. */
public XrActionStatePose type$Default() { return type(XR10.XR_TYPE_ACTION_STATE_POSE); }
/** Sets the specified value to the {@link #next} field. */
public XrActionStatePose next(@NativeType("void *") long value) { nnext(address(), value); return this; }
/** Sets the specified value to the {@link #isActive} field. */
public XrActionStatePose isActive(@NativeType("XrBool32") boolean value) { nisActive(address(), value ? 1 : 0); return this; }
/** Initializes this struct with the specified values. */
public XrActionStatePose set(
int type,
long next,
boolean isActive
) {
type(type);
next(next);
isActive(isActive);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public XrActionStatePose set(XrActionStatePose src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code XrActionStatePose} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static XrActionStatePose malloc() {
return wrap(XrActionStatePose.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code XrActionStatePose} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static XrActionStatePose calloc() {
return wrap(XrActionStatePose.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code XrActionStatePose} instance allocated with {@link BufferUtils}. */
public static XrActionStatePose create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(XrActionStatePose.class, memAddress(container), container);
}
/** Returns a new {@code XrActionStatePose} instance for the specified memory address. */
public static XrActionStatePose create(long address) {
return wrap(XrActionStatePose.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static XrActionStatePose createSafe(long address) {
return address == NULL ? null : wrap(XrActionStatePose.class, address);
}
/**
* Returns a new {@link XrActionStatePose.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static XrActionStatePose.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link XrActionStatePose.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static XrActionStatePose.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link XrActionStatePose.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static XrActionStatePose.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link XrActionStatePose.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static XrActionStatePose.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static XrActionStatePose.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
/**
* Returns a new {@code XrActionStatePose} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static XrActionStatePose malloc(MemoryStack stack) {
return wrap(XrActionStatePose.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code XrActionStatePose} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static XrActionStatePose calloc(MemoryStack stack) {
return wrap(XrActionStatePose.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link XrActionStatePose.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static XrActionStatePose.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link XrActionStatePose.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static XrActionStatePose.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #type}. */
public static int ntype(long struct) { return UNSAFE.getInt(null, struct + XrActionStatePose.TYPE); }
/** Unsafe version of {@link #next}. */
public static long nnext(long struct) { return memGetAddress(struct + XrActionStatePose.NEXT); }
/** Unsafe version of {@link #isActive}. */
public static int nisActive(long struct) { return UNSAFE.getInt(null, struct + XrActionStatePose.ISACTIVE); }
/** Unsafe version of {@link #type(int) type}. */
public static void ntype(long struct, int value) { UNSAFE.putInt(null, struct + XrActionStatePose.TYPE, value); }
/** Unsafe version of {@link #next(long) next}. */
public static void nnext(long struct, long value) { memPutAddress(struct + XrActionStatePose.NEXT, value); }
/** Unsafe version of {@link #isActive(boolean) isActive}. */
public static void nisActive(long struct, int value) { UNSAFE.putInt(null, struct + XrActionStatePose.ISACTIVE, value); }
// -----------------------------------
/** An array of {@link XrActionStatePose} structs. */
public static class Buffer extends StructBuffer<XrActionStatePose, Buffer> implements NativeResource {
private static final XrActionStatePose ELEMENT_FACTORY = XrActionStatePose.create(-1L);
/**
* Creates a new {@code XrActionStatePose.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link XrActionStatePose#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected XrActionStatePose getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link XrActionStatePose#type} field. */
@NativeType("XrStructureType")
public int type() { return XrActionStatePose.ntype(address()); }
/** @return the value of the {@link XrActionStatePose#next} field. */
@NativeType("void *")
public long next() { return XrActionStatePose.nnext(address()); }
/** @return the value of the {@link XrActionStatePose#isActive} field. */
@NativeType("XrBool32")
public boolean isActive() { return XrActionStatePose.nisActive(address()) != 0; }
/** Sets the specified value to the {@link XrActionStatePose#type} field. */
public XrActionStatePose.Buffer type(@NativeType("XrStructureType") int value) { XrActionStatePose.ntype(address(), value); return this; }
/** Sets the {@link XR10#XR_TYPE_ACTION_STATE_POSE TYPE_ACTION_STATE_POSE} value to the {@link XrActionStatePose#type} field. */
public XrActionStatePose.Buffer type$Default() { return type(XR10.XR_TYPE_ACTION_STATE_POSE); }
/** Sets the specified value to the {@link XrActionStatePose#next} field. */
public XrActionStatePose.Buffer next(@NativeType("void *") long value) { XrActionStatePose.nnext(address(), value); return this; }
/** Sets the specified value to the {@link XrActionStatePose#isActive} field. */
public XrActionStatePose.Buffer isActive(@NativeType("XrBool32") boolean value) { XrActionStatePose.nisActive(address(), value ? 1 : 0); return this; }
}
}
| |
/**
* Date Modified: $Date: 2010-11-15 13:38:09 +1100 (Mon, 15 Nov 2010) $
* Version: $Revision: 559 $
*
* Copyright 2008 The Australian National University (ANU)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.edu.apsr.pids.util;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Calendar;
import java.util.TimeZone;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.Transformer;
import net.handle.hdllib.HandleValue;
import au.edu.apsr.pids.servlet.MintServlet;
import au.edu.apsr.pids.to.Handle;
import au.edu.apsr.pids.util.Constants;
import au.edu.apsr.pids.to.TrustedClient;
/**
* Utility methods for XML processing
*
* @author Scott Yeadon, ANU
*/
public class XMLSupport
{
/** Value of XML RESPONSE_TYPE_ATTRIBUTE to indicate a failed request */
public static final String RESPONSE_TYPE_FAILURE = "failure";
/** Value of XML RESPONSE_TYPE_ATTRIBUTE to indicate a successful request */
public static final String RESPONSE_TYPE_SUCCESS = "success";
/** Name of XML root element of service response */
public static final String RESPONSE_ELEMENT = "response";
/** Name of XML timestamp element */
public static final String RESPONSE_TIMESTAMP_ELEMENT = "timestamp";
/** Name of XML response type attribute */
public static final String RESPONSE_TYPE_ATTRIBUTE = "type";
/** Name of XML message element */
public static final String RESPONSE_MESSAGE_ELEMENT = "message";
/** Name of XML properties element */
public static final String RESPONSE_PROPERTIES_ELEMENT = "properties";
/** Name of XML property element */
public static final String RESPONSE_PROPERTY_ELEMENT = "property";
/** Name of XML property property name attribute */
public static final String RESPONSE_NAME_ATTRIBUTE = "name";
/** Name of XML response property value attribute */
public static final String RESPONSE_VALUE_ATTRIBUTE = "value";
/** Name of XML identifiers element */
public static final String RESPONSE_IDENTIFIERS_ELEMENT = "identifiers";
/** Name of XML identifier element */
public static final String RESPONSE_IDENTIFIER_ELEMENT = "identifier";
/** Name of XML trustedclients element */
public static final String RESPONSE_TRUSTEDCLIENTS_ELEMENT = "trustedclients";
/** Name of XML client element */
public static final String RESPONSE_CLIENT_ELEMENT = "client";
/** Name of XML appId attribute */
public static final String RESPONSE_APPID_ATTRIBUTE = "appId";
/** Name of XML IP attribute */
public static final String RESPONSE_IP_ATTRIBUTE = "ip";
/** Name of XML DESCRIPTION attribute */
public static final String RESPONSE_DESCRIPTION_ATTRIBUTE = "desc";
/** Java Date Format for UTC dates used within the harvester application */
public static final String TIMESTAMP_UTC_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'";
/**
* create an XML document representing a failed or successful
* service call.
*
* @param type
* 'success' or 'failure'
* @param messageString
* a message string to form part of the XML response
* @param messageCategory
* the class of message
* @param clientMap
* a map containing appId, IP and description of clients
* part of the XML response (can be null)
*
* @return String
* an XML string
*/
public static String getXMLResponse(String type,
String messageString,
String messageCategory,
ArrayList<TrustedClient> clientMap) throws Exception
{
try
{
// create a DocumentBuilderFactory
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
// create a DocumentBuilder (DOM Parser)
DocumentBuilder builder = factory.newDocumentBuilder();
// create an EMPTY XML document for the output
Document doc = builder.newDocument();
Element root = doc.createElement(RESPONSE_ELEMENT);
root.setAttribute(RESPONSE_TYPE_ATTRIBUTE, type);
Element timestamp = doc.createElement(RESPONSE_TIMESTAMP_ELEMENT);
SimpleDateFormat sdf = new SimpleDateFormat(TIMESTAMP_UTC_FORMAT);
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
sdf.setCalendar(cal);
timestamp.setTextContent(sdf.format(cal.getTime()));
Element message = doc.createElement(RESPONSE_MESSAGE_ELEMENT);
message.setAttribute(RESPONSE_TYPE_ATTRIBUTE, messageCategory);
message.setTextContent(messageString);
if (clientMap != null)
{
Element clients = doc.createElement(RESPONSE_TRUSTEDCLIENTS_ELEMENT);
for (TrustedClient tc : clientMap)
{
Element client = doc.createElement(RESPONSE_CLIENT_ELEMENT);
client.setAttribute("appId", tc.getAppId());
client.setAttribute("ip", tc.getIP());
client.setAttribute("desc", tc.getDescription());
clients.appendChild(client);
}
root.appendChild(clients);
}
root.appendChild(timestamp);
root.appendChild(message);
doc.appendChild(root);
// Create dom source for the document
DOMSource domSource = new DOMSource(doc);
// Create a string writer
StringWriter stringWriter = new StringWriter();
// Create the result stream for the transform
StreamResult result = new StreamResult(stringWriter);
// Create a Transformer to serialize the document
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty("indent","yes");
// Transform the document to the result stream
transformer.transform(domSource, result);
return stringWriter.toString();
}
catch (Exception e)
{
throw new Exception(e);
}
}
/**
* create an XML document representing a failed or successful
* service call.
*
* @param type
* 'success' or 'failure'
* @param messageString
* a message string to form part of the XML response
* @param messageCategory
* the class of message
* @param propertyMap
* a map containing name value pairs which will form
* part of the XML response (can be null)
*
* @return String
* an XML string
*/
public static String getXMLResponse(String type,
String messageString,
String messageCategory,
Map<String,String> propertyMap) throws Exception
{
try
{
// create a DocumentBuilderFactory
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
// create a DocumentBuilder (DOM Parser)
DocumentBuilder builder = factory.newDocumentBuilder();
// create an EMPTY XML document for the output
Document doc = builder.newDocument();
Element root = doc.createElement(RESPONSE_ELEMENT);
root.setAttribute(RESPONSE_TYPE_ATTRIBUTE, type);
Element timestamp = doc.createElement(RESPONSE_TIMESTAMP_ELEMENT);
SimpleDateFormat sdf = new SimpleDateFormat(TIMESTAMP_UTC_FORMAT);
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
sdf.setCalendar(cal);
timestamp.setTextContent(sdf.format(cal.getTime()));
Element message = doc.createElement(RESPONSE_MESSAGE_ELEMENT);
message.setAttribute(RESPONSE_TYPE_ATTRIBUTE, messageCategory);
message.setTextContent(messageString);
if (propertyMap != null)
{
Element properties = doc.createElement(RESPONSE_PROPERTIES_ELEMENT);
for (Iterator<String> i = propertyMap.keySet().iterator(); i.hasNext();)
{
String key = i.next();
Element property = doc.createElement(RESPONSE_PROPERTY_ELEMENT);
property.setAttribute("name", key);
property.setAttribute("value", propertyMap.get(key));
properties.appendChild(property);
}
root.appendChild(properties);
}
root.appendChild(timestamp);
root.appendChild(message);
doc.appendChild(root);
// Create dom source for the document
DOMSource domSource = new DOMSource(doc);
// Create a string writer
StringWriter stringWriter = new StringWriter();
// Create the result stream for the transform
StreamResult result = new StreamResult(stringWriter);
// Create a Transformer to serialize the document
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty("indent","yes");
// Transform the document to the result stream
transformer.transform(domSource, result);
return stringWriter.toString();
}
catch (Exception e)
{
throw new Exception(e);
}
}
/**
* create an XML document representing a failed or successful
* listHandles service call
*
* @param type
* 'success' or 'failure'
* @param messageString
* a message string to form part of the XML response
* @param messageCategory
* the class of message
* @param handleList
* a list of handle objects
*
* @return String
* an XML string
*/
public static String getXMLListStringsResponse(String type,
String messageString,
String messageCategory,
List<String> handleList) throws Exception
{
try
{
// create a DocumentBuilderFactory
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
// create a DocumentBuilder (DOM Parser)
DocumentBuilder builder = factory.newDocumentBuilder();
// create an EMPTY XML document for the output
Document doc = builder.newDocument();
Element root = doc.createElement(RESPONSE_ELEMENT);
root.setAttribute(RESPONSE_TYPE_ATTRIBUTE, type);
Element timestamp = doc.createElement(RESPONSE_TIMESTAMP_ELEMENT);
SimpleDateFormat sdf = new SimpleDateFormat(TIMESTAMP_UTC_FORMAT);
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
sdf.setCalendar(cal);
timestamp.setTextContent(sdf.format(cal.getTime()));
Element message = doc.createElement(RESPONSE_MESSAGE_ELEMENT);
message.setAttribute(RESPONSE_TYPE_ATTRIBUTE, messageCategory);
message.setTextContent(messageString);
if (handleList.size() > 0)
{
Element identifiers = doc.createElement(RESPONSE_IDENTIFIERS_ELEMENT);
for (Iterator<String> i = handleList.iterator(); i.hasNext();)
{
Element identifier = doc.createElement(RESPONSE_IDENTIFIER_ELEMENT);
identifier.setAttribute("handle", i.next());
identifiers.appendChild(identifier);
}
root.appendChild(identifiers);
}
root.appendChild(timestamp);
root.appendChild(message);
doc.appendChild(root);
// Create dom source for the document
DOMSource domSource = new DOMSource(doc);
// Create a string writer
StringWriter stringWriter = new StringWriter();
// Create the result stream for the transform
StreamResult result = new StreamResult(stringWriter);
// Create a Transformer to serialize the document
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty("indent","yes");
// Transform the document to the result stream
transformer.transform(domSource, result);
return stringWriter.toString();
}
catch (Exception e)
{
throw new Exception(e);
}
}
/**
* create an XML document representing a failed or successful
* listHandles service call
*
* @param type
* 'success' or 'failure'
* @param messageString
* a message string to form part of the XML response
* @param messageCategory
* the class of message
* @param handleList
* a list of handle objects
*
* @return String
* an XML string
*/
public static String getXMLListHandlesResponse(String type,
String messageString,
String messageCategory,
List<Handle> handleList) throws Exception
{
try
{
// create a DocumentBuilderFactory
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
// create a DocumentBuilder (DOM Parser)
DocumentBuilder builder = factory.newDocumentBuilder();
// create an EMPTY XML document for the output
Document doc = builder.newDocument();
Element root = doc.createElement(RESPONSE_ELEMENT);
root.setAttribute(RESPONSE_TYPE_ATTRIBUTE, type);
Element timestamp = doc.createElement(RESPONSE_TIMESTAMP_ELEMENT);
SimpleDateFormat sdf = new SimpleDateFormat(TIMESTAMP_UTC_FORMAT);
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
sdf.setCalendar(cal);
timestamp.setTextContent(sdf.format(cal.getTime()));
Element message = doc.createElement(RESPONSE_MESSAGE_ELEMENT);
message.setAttribute(RESPONSE_TYPE_ATTRIBUTE, messageCategory);
message.setTextContent(messageString);
if (handleList.size() > 0)
{
Element identifiers = doc.createElement(RESPONSE_IDENTIFIERS_ELEMENT);
for (Iterator<Handle> i = handleList.iterator(); i.hasNext();)
{
Element identifier = doc.createElement(RESPONSE_IDENTIFIER_ELEMENT);
identifier.setAttribute("handle", i.next().getHandle());
identifiers.appendChild(identifier);
}
root.appendChild(identifiers);
}
root.appendChild(timestamp);
root.appendChild(message);
doc.appendChild(root);
// Create dom source for the document
DOMSource domSource = new DOMSource(doc);
// Create a string writer
StringWriter stringWriter = new StringWriter();
// Create the result stream for the transform
StreamResult result = new StreamResult(stringWriter);
// Create a Transformer to serialize the document
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty("indent","yes");
// Transform the document to the result stream
transformer.transform(domSource, result);
return stringWriter.toString();
}
catch (Exception e)
{
throw new Exception(e);
}
}
/**
* create an XML document representing a failed or successful
* getHandle service call
*
* @param type
* 'success' or 'failure'
* @param messageString
* a message string to form part of the XML response
* @param messageCategory
* the class of message
* @param handleList
* a list of handle objects
*
* @return String
* an XML string
*/
public static String getXMLGetHandleResponse(String type,
String messageString,
String messageCategory,
List<Handle> handleList) throws Exception
{
try
{
// create a DocumentBuilderFactory
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
// create a DocumentBuilder (DOM Parser)
DocumentBuilder builder = factory.newDocumentBuilder();
// create an EMPTY XML document for the output
Document doc = builder.newDocument();
Element root = doc.createElement(RESPONSE_ELEMENT);
root.setAttribute(RESPONSE_TYPE_ATTRIBUTE, type);
Element timestamp = doc.createElement(RESPONSE_TIMESTAMP_ELEMENT);
SimpleDateFormat sdf = new SimpleDateFormat(TIMESTAMP_UTC_FORMAT);
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
sdf.setCalendar(cal);
timestamp.setTextContent(sdf.format(cal.getTime()));
Element message = doc.createElement(RESPONSE_MESSAGE_ELEMENT);
message.setAttribute(RESPONSE_TYPE_ATTRIBUTE, messageCategory);
message.setTextContent(messageString);
if (handleList.size() > 0)
{
Element identifier = doc.createElement(RESPONSE_IDENTIFIER_ELEMENT);
Handle handle = handleList.iterator().next();
identifier.setAttribute("handle", handle.getHandle());
String[] types = {Constants.STD_TYPE_URL_STRING, Constants.XT_TYPE_DESC_STRING};
HandleValue[] hv = handle.getValues(types);
if (hv.length > 0)
{
for (int j = 0; j < hv.length; j++)
{
// if (hv[j].getTypeAsString().equals(Constants.STD_TYPE_URL_STRING) || hv[j].getTypeAsString().equals(Constants.XT_TYPE_DESC_STRING))
// {
Element property = doc.createElement(RESPONSE_PROPERTY_ELEMENT);
property.setAttribute("index", String.valueOf(hv[j].getIndex()));
property.setAttribute("type", String.valueOf(hv[j].getTypeAsString()));
property.setAttribute("value", String.valueOf(hv[j].getDataAsString()));
identifier.appendChild(property);
// }
}
}
root.appendChild(identifier);
}
root.appendChild(timestamp);
root.appendChild(message);
doc.appendChild(root);
// Create dom source for the document
DOMSource domSource = new DOMSource(doc);
// Create a string writer
StringWriter stringWriter = new StringWriter();
// Create the result stream for the transform
StreamResult result = new StreamResult(stringWriter);
// Create a Transformer to serialize the document
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty("indent","yes");
// Transform the document to the result stream
transformer.transform(domSource, result);
return stringWriter.toString();
}
catch (Exception e)
{
throw new Exception(e);
}
}
}
| |
/*
* Copyright (C) 2016 David Vittor http://digitalspider.com.au
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalspider.jspwiki.plugin;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Map;
import java.util.Properties;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.wiki.WikiContext;
import org.apache.wiki.WikiEngine;
import org.apache.wiki.api.engine.PluginManager;
import org.apache.wiki.api.exceptions.PluginException;
import org.apache.wiki.api.plugin.WikiPlugin;
public class CSVPlugin implements WikiPlugin {
private final Logger log = Logger.getLogger(CSVPluginTest.class);
public enum SQLType {
MYSQL("com.mysql.jdbc.Driver", "jdbc:mysql:", "jdbc:mysql://hostname:portNumber/databaseName"),
MSSQL("com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbc:sqlserver:", "jdbc:sqlserver://serverName\\instanceName:portNumber"),
POSTGRESQL("org.postgresql.Driver", "jdbc:postgresql:", "jdbc:postgresql://hostname:portNumber/databaseName"),
ORACLE("oracle.jdbc.driver.OracleDriver", "jdbc:oracle:", "jdbc:oracle:thin:@hostname:portNumber:databaseName"),
DB2("COM.ibm.db2.jdbc.net.DB2Driver", "jdbc:db2:", "jdbc:db2:hostname:portNumber/databaseName"),
SYBASE("com.sybase.jdbc.SybDriver", "jdbc:sybase:", "jdbc:sybase:Tds:hostname:portNumber/databaseName");
private String driverClass;
private String startsWith;
private String urlDefaultPath;
SQLType(String driverClass, String startsWith, String urlDefaultPath) {
this.driverClass = driverClass;
this.startsWith = startsWith;
this.urlDefaultPath = urlDefaultPath;
}
public static SQLType parse(String input) throws Exception {
for (SQLType type : SQLType.values()) {
if (type.name().equalsIgnoreCase(input) || type.driverClass.equalsIgnoreCase(input)) {
return type;
}
}
throw new Exception("Could not find SQLType of value: "+input);
}
}
public static final SQLType DEFAULT_TYPE = SQLType.MYSQL;
public static final String DEFAULT_URL = "";
public static final String DEFAULT_USER = "";
public static final String DEFAULT_PASSWORD = "";
public static final Integer DEFAULT_MAXRESULTS = 50;
public static final String DEFAULT_CLASS = "sql-table";
public static final String DEFAULT_SQL = "select 1";
public static final Boolean DEFAULT_HEADER = true;
public static final String DEFAULT_SOURCE = null;
private static final String PROP_DRIVER = "jdbc.driver";
private static final String PROP_URL = "jdbc.url";
private static final String PROP_USER = "jdbc.user";
private static final String PROP_PASSWORD = "jdbc.password";
private static final String PROP_MAXRESULTS = "jdbc.maxresults";
private static final String PARAM_CLASS = "class";
private static final String PARAM_SQL = "sql";
private static final String PARAM_HEADER = "header";
private static final String PARAM_SOURCE = "src";
private SQLType sqlType = DEFAULT_TYPE;
private String dbUrl = DEFAULT_URL;
private String dbUser = DEFAULT_USER;
private String dbPassword = DEFAULT_PASSWORD;
private Integer maxResults = DEFAULT_MAXRESULTS;
private String className = DEFAULT_CLASS;
private String sql = DEFAULT_SQL;
private Boolean header = DEFAULT_HEADER;
private String source = DEFAULT_SOURCE;
private DataSource ds = null;
@Override
public String execute(WikiContext wikiContext, Map<String, String> params) throws PluginException {
setLogForDebug(params.get(PluginManager.PARAM_DEBUG));
log.info("STARTED");
String result = "";
StringBuffer buffer = new StringBuffer();
WikiEngine engine = wikiContext.getEngine();
Properties props = engine.getWikiProperties();
// Validate all parameters
validateParams(props, params);
Connection conn = null;
try {
if (ds == null) {
if (StringUtils.isBlank(dbUser) && StringUtils.isBlank(dbPassword)) {
conn = DriverManager.getConnection(dbUrl);
} else {
conn = DriverManager.getConnection(dbUrl, dbUser, dbPassword);
}
if (conn == null) {
throw new Exception("Could not create connection for url=" + dbUrl + " user=" + dbUser);
}
} else {
conn = ds.getConnection();
}
sql = addLimits(sqlType,sql,maxResults);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql);
ResultSetMetaData md = rs.getMetaData();
if (header) {
for (int i = 0; i < md.getColumnCount(); i++) {
String header = md.getColumnLabel(i + 1);
buffer.append("|| " + header);
}
buffer.append("\n");
}
while (rs.next()) {
for (int i=0; i<md.getColumnCount(); i++) {
String value = rs.getString(i+1);
buffer.append("| "+value);
}
buffer.append("\n");
}
log.info("result="+buffer.toString());
result = engine.textToHTML(wikiContext,buffer.toString());
result = "<div class='"+className+"'>"+result+"</div>";
} catch (Exception e) {
log.error("ERROR. "+e.getMessage()+". sql="+sql,e);
throw new PluginException(e.getMessage());
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
// ignore
}
}
}
return result;
}
protected void validateParams(Properties props, Map<String, String> params) throws PluginException {
String paramName;
String param;
log.info("validateParams() START");
paramName = PARAM_SOURCE;
param = params.get(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isAsciiPrintable(param)) {
throw new PluginException(paramName + " parameter is not a valid value");
}
source = param;
}
paramName = getPropKey(PROP_DRIVER, source);
param = props.getProperty(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
try {
sqlType = SQLType.parse(param);
} catch (Exception e) {
throw new PluginException(paramName + " property is not a valid value. " +param);
}
try {
Class.forName(param).newInstance();
}
catch(ClassNotFoundException e) {
log.error("Error: unable to load driver class "+param+"!",e);
throw new PluginException("Error: unable to load driver class "+param+"!");
}
catch(IllegalAccessException e) {
log.error("Error: access problem while loading "+param+"!",e);
throw new PluginException("Error: access problem while loading "+param+"!");
}
catch(InstantiationException e) {
log.error("Error: unable to instantiate driver "+param+"!",e);
throw new PluginException("Error: unable to instantiate driver "+param+"!");
}
catch(Exception e) {
log.error("Error: unable to load driver "+param+"!",e);
throw new PluginException("Error: unable to load driver "+param+"! "+e.getMessage());
}
} else {
try {
Context ctx = new InitialContext();
ds = (DataSource) ctx.lookup("java:/comp/env/jdbc/" + source);
} catch (NamingException e) {
log.error("Neither jspwiki-custom.properties or conf/context.xml has not been configured for "+source+"!");
throw new PluginException("Neither jspwiki-custom.properties or conf/context.xml has not been configured for "+source+"!");
}
}
if (ds == null) {
paramName = getPropKey(PROP_URL, source);
param = props.getProperty(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isAsciiPrintable(param)) {
throw new PluginException(paramName + " property is not a valid value");
}
if (!param.trim().startsWith(sqlType.startsWith)) {
throw new PluginException("Error: " + paramName + " property has value " + param + ". " +
"Expected: " + sqlType.urlDefaultPath);
}
dbUrl = param;
}
paramName = getPropKey(PROP_USER, source);
param = props.getProperty(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isAsciiPrintable(param)) {
throw new PluginException(paramName + " property is not a valid value");
}
dbUser = param;
}
paramName = getPropKey(PROP_PASSWORD, source);
param = props.getProperty(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isAsciiPrintable(param)) {
throw new PluginException(paramName + " property is not a valid value");
}
dbPassword = param;
}
}
paramName = getPropKey(PROP_MAXRESULTS,source);
param = props.getProperty(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isNumeric(param)) {
throw new PluginException(paramName + " property is not a valid value");
}
maxResults = Integer.parseInt(param);
}
paramName = PARAM_CLASS;
param = params.get(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isAsciiPrintable(param)) {
throw new PluginException(paramName + " parameter is not a valid value");
}
className = param;
}
paramName = PARAM_SQL;
param = params.get(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!StringUtils.isAsciiPrintable(param)) {
throw new PluginException(paramName + " parameter is not a valid value");
}
if (!sql.toLowerCase().startsWith("select")) {
throw new PluginException(paramName + " parameter needs to start with 'SELECT'.");
}
sql = param;
}
paramName = PARAM_HEADER;
param = params.get(paramName);
if (StringUtils.isNotBlank(param)) {
log.info(paramName + "=" + param);
if (!param.equalsIgnoreCase("true") && !param.equalsIgnoreCase("false")
&& !param.equals("0") && !param.equals("1")) {
throw new PluginException(paramName + " parameter is not a valid boolean");
}
header = Boolean.parseBoolean(param);
}
}
private String addLimits(SQLType sqlType, String sql, Integer maxResults) {
String result = sql;
if (StringUtils.isNotBlank(sql)) {
result = sql.trim();
if (result.endsWith(";")) {
result = result.substring(result.length()-1);
}
switch (sqlType) {
case MSSQL:
if (!result.toLowerCase().contains(" top")) {
result = sql.replace("select", "select top " + maxResults);
result += ";";
}
break;
case MYSQL:
if (!result.toLowerCase().contains(" limit ")) {
result = result + " limit " + maxResults+";";
}
break;
case ORACLE:
if (!result.toLowerCase().contains("rownum")) {
result = "select * from ( "+result+" ) where ROWNUM <= " + maxResults+";";
}
break;
case POSTGRESQL:
if (!result.toLowerCase().contains(" limit ")) {
result = result + " limit " + maxResults+";";
}
break;
case DB2:
if (!result.toLowerCase().contains(" fetch")) {
result = result + " FETCH FIRST "+maxResults+" ROWS ONLY;";
}
break;
case SYBASE:
if (!result.toLowerCase().contains(" top")) {
result = result.replace("select", "select top " + maxResults);
result += ";";
}
break;
}
}
return result;
}
private String getPropKey(String currentKey, String source) {
String result = currentKey;
if (StringUtils.isNotBlank(source)) {
result+="."+source;
}
return result;
}
private void setLogForDebug(String value) {
if (StringUtils.isNotBlank(value) && (value.equalsIgnoreCase("true") || value.equals("1"))) {
log.setLevel(Level.INFO);
}
}
}
| |
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2015 Relaxed Complexity, LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*
*******************************************************************************/
package com.relaxedcomplexity.sounder;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Logger;
/**
* Maintains the state of the sound system and contains methods that allow
* the sound to be manipulated by starting and stopping it, as well as changing
* volume and pitch.
* <p>
* SoundPlayer runs on the main thread and facilitates communication between
* the mouse event handler and the sound system (MouseCntl and SineSynth).
*
* @author Jim Medlock
*
*/
public class SoundPlayer {
private static final Logger logger = Logger.getLogger("com.relaxedcomplexity.sounder");
private static final ExecutorService executor = Executors.newSingleThreadExecutor();
private ReentrantLock lock = new ReentrantLock();
private static SineSynth sineSynth = null;
private double pitch = 0d;
public static final double STARTINGPITCH = 400;
public static final double ENDINGPITCH = 800;
public static final double PITCHDELTA = 20;
private float volume = 0f;
public static final float STARTINGVOLUME = 0.5f;
public static final float ENDINGVOLUME = 1.0f;
public static final float VOLUMEDELTA = 0.1f;
private boolean soundPlaying = false;
public SoundPlayer() {
pitch = STARTINGPITCH;
volume = STARTINGVOLUME;
sineSynth = new SineSynth();
}
// -------------------------------------------------------------------------
// Sound Manipulation Methods
// -------------------------------------------------------------------------
/**
* Modify volume/pitch of the sound
*
* @param direction Direction enum value
*/
public void modifySound(MouseCntl.Direction direction) {
logger.entering(SoundPlayer.class.getSimpleName(), "modifySound");
switch (direction) {
case LEFT:
decrPitch();
break;
case RIGHT:
incrPitch();
break;
case UP:
incrVolume();
break;
case DOWN:
decrVolume();
break;
default:
logger.severe("Invalid direction passed to modifySound. direction=" + direction);
}
logger.exiting(SoundPlayer.class.getSimpleName(), null);
}
// -------------------------------------------------------------------------
// Pitch Methods
// -------------------------------------------------------------------------
/**
* Decrement the pitch
*/
public void decrPitch() {
lock.lock();
try {
pitch -= PITCHDELTA;
} finally {
lock.unlock();
}
}
/**
* Get current pitch value
*/
public double getPitch() {
return pitch;
}
/**
* Increment the pitch
*/
public void incrPitch() {
lock.lock();
try {
pitch += PITCHDELTA;
} finally {
lock.unlock();
}
}
/**
* Set current pitch value
*
* @param newPitch New pitch value
*
* TODO: Perform editing on inbound newFrequency value
*/
public void setPitch(double newPitch) {
// Test preconditions
if (newPitch <= 0) {
throw new IllegalArgumentException("Invalid newPitch of "+newPitch+" passed.");
}
// Set the pitch
lock.lock();
try {
pitch = newPitch;
} finally {
lock.unlock();
}
}
// -------------------------------------------------------------------------
// Volume Methods (gain)
// -------------------------------------------------------------------------
/**
* Decrement the volume level
*/
public void decrVolume() {
lock.lock();
try {
volume -= VOLUMEDELTA;
if (volume < STARTINGVOLUME) {
volume = STARTINGVOLUME;
}
} finally {
lock.unlock();
}
sineSynth.adjustVolume(getVolume());
}
/**
* Get current volume level
*/
public float getVolume() {
return volume;
}
/**
* Increment the volume level
*/
public void incrVolume() {
lock.lock();
try {
volume += VOLUMEDELTA;
if (volume > 1.0f) {
volume = 1.0f;
}
} finally {
lock.unlock();
}
sineSynth.adjustVolume(getVolume());
}
/**
* Set current volume level
*
* @param newVolume New volume level
*
* TODO: Perform editing on inbound newGain value
*/
public void setVolume(float newVolume) {
// Test preconditions
if (newVolume < 0.0f || newVolume > ENDINGVOLUME) {
throw new IllegalArgumentException("newVolume not in range 0.0-1.0");
}
// Set the volume level
lock.lock();
try {
volume = newVolume;
} finally {
lock.unlock();
}
sineSynth.adjustVolume(getVolume());
}
// -------------------------------------------------------------------------
// Sound Playing State Methods
// -------------------------------------------------------------------------
/**
* Get sound state (i.e. current playing or not playing)
*/
public boolean isSoundPlaying() {
return soundPlaying;
}
/**
* Set the sound indicator on/off
*
* @param soundIndicator boolean indicating if sound is on or off
*/
public void setSoundPlaying(boolean soundIndicator) {
lock.lock();
try {
soundPlaying = soundIndicator;
} finally {
lock.unlock();
}
}
/**
* Toggle sound on/off.
*
* Since playing sound is an operation that's concurrent with event
* processing a separate thread must be created for it.
*
*/
public void toggleSound() {
if (isSoundPlaying()) {
setSoundPlaying(false);
} else {
setSoundPlaying(true);
executor.submit(() -> {
while (isSoundPlaying()) {
sineSynth.playAudio(this);
}
});
}
}
}
| |
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.portlet.tck.portlets;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Map;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.ActionURL;
import javax.portlet.HeaderPortlet;
import javax.portlet.HeaderRequest;
import javax.portlet.HeaderResponse;
import javax.portlet.Portlet;
import javax.portlet.PortletConfig;
import javax.portlet.PortletException;
import javax.portlet.PortletURL;
import javax.portlet.RenderParameters;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.ResourceRequest;
import javax.portlet.ResourceResponse;
import javax.portlet.ResourceServingPortlet;
import javax.portlet.ResourceURL;
import javax.portlet.annotations.PortletApplication;
import javax.portlet.annotations.PortletConfiguration;
import javax.portlet.annotations.PortletQName;
import javax.portlet.annotations.PublicRenderParameterDefinition;
import javax.portlet.annotations.Supports;
import javax.portlet.tck.beans.TestButton;
import javax.portlet.tck.beans.TestResult;
import javax.portlet.tck.beans.TestSetupLink;
import javax.portlet.tck.util.ModuleTestCaseDetails;
import javax.servlet.http.Cookie;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS10;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS15;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE9;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE10;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS13;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS2;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS6;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS15;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS16;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS13A;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES1;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES2;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES3;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES4;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE11;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_CHARACTERENCODING4;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_CONTENTTYPE5;
import static javax.portlet.tck.util.ModuleTestCaseDetails.V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE8;
import static javax.portlet.tck.constants.Constants.RESULT_ATTR_PREFIX;
import static javax.portlet.ResourceURL.PAGE;
import static javax.portlet.tck.constants.Constants.THREADID_ATTR;
/**
* This portlet implements several test cases for the JSR 362 TCK. The test case
* names are defined in the /src/main/resources/xml-resources/additionalTCs.xml
* file. The build process will integrate the test case names defined in the
* additionalTCs.xml file into the complete list of test case names for
* execution by the driver.
*
*/
@PortletApplication(publicParams = {
@PublicRenderParameterDefinition(identifier = "tckPRP3", qname = @PortletQName(localPart = "tckPRP3", namespaceURI = "")),
@PublicRenderParameterDefinition(identifier = "tr1_ready", qname = @PortletQName(localPart = "tr1_ready", namespaceURI = "")) })
@PortletConfiguration(portletName = "HeaderPortletTests_SPEC15_Header", publicParams = {
"tckPRP3",
"tr1_ready" }, supports = { @Supports(mimeType = "text/html") })
public class HeaderPortletTests_SPEC15_Header
implements Portlet, HeaderPortlet, ResourceServingPortlet {
@Override
public void init(PortletConfig config) throws PortletException {
}
@Override
public void destroy() {
}
@Override
public void processAction(ActionRequest actionRequest,
ActionResponse actionResponse) throws PortletException, IOException {
String action = actionRequest.getParameter("inputval");
if (action != null) {
if (V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS10.equals(action)
&& actionRequest.getParameter("actionURLTr0") != null
&& actionRequest.getParameter("actionURLTr0").equals("true")) {
/* TestCase: V2AddlRequestTests_SPEC2_11_Render_parameters10 */
/* Details: "The portlet-container must not propagate parameters */
/* received in an action or event request to subsequent render */
/* requests of the portlet" */
actionResponse.setRenderParameter("tr0", "true");
} else if (V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS15
.equals(action) && actionRequest.getParameter("tr3a") != null
&& actionRequest.getParameter("tr3a").equals("true")) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters15 */
/*
* Details: "Render parameters get automatically cleared if the
* portlet receives a processAction or processEvent call"
*/
actionResponse.setRenderParameter("tr3b", "true");
} else if (V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE9.equals(action)) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie9 */
/*
* Details: "Cookies set during the Header phase should be available
* to the portlet during a subsequent Action phase"
*/
Cookie[] cookies = actionRequest.getCookies();
for (Cookie c : cookies) {
if (c.getName().equals("header_tr1_cookie")
&& c.getValue().equals("true")) {
c.setMaxAge(0);
c.setValue("");
actionResponse.setRenderParameter("trCookie1", "true");
}
}
}
}
}
@Override
public void render(RenderRequest renderRequest, RenderResponse renderResponse)
throws PortletException, IOException {
ModuleTestCaseDetails tcd = new ModuleTestCaseDetails();
PrintWriter writer = renderResponse.getWriter();
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie8 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during the Resource phase"
*/
writer.write(
"<div id=\"V3HeaderPortletTests_SPEC15_Header\">no resource output.</div>\n");
ResourceURL resurl = renderResponse.createResourceURL();
resurl.setCacheability(PAGE);
writer.write("<script>\n");
writer.write("(function () {\n");
writer.write(" var xhr = new XMLHttpRequest();\n");
writer.write(" xhr.onreadystatechange=function() {\n");
writer.write(" if (xhr.readyState==4 && xhr.status==200) {\n");
writer.write(
" document.getElementById(\"V3HeaderPortletTests_SPEC15_Header\").innerHTML=xhr.responseText;\n");
writer.write(" }\n");
writer.write(" };\n");
writer.write(
" xhr.open(\"GET\",\"" + resurl.toString() + "\",true);\n");
writer.write(" xhr.send();\n");
writer.write("})();\n");
writer.write("</script>\n");
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie10 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during a subsequent Render phase"
*/
Cookie[] cookies = renderRequest.getCookies();
StringBuilder txt = new StringBuilder(128);
txt.append("<p>Debug info:");
txt.append("<br>");
txt.append("# Cookies: ").append(cookies.length).append("<br>");
TestResult tr2 = tcd
.getTestResultFailed(V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE10);
for (Cookie c : cookies) {
txt.append("Name: ").append(c.getName());
txt.append(", Value: ").append(c.getValue()).append("<br>");
if (c.getName().equals("header_tr2_cookie")
&& c.getValue().equals("true")) {
txt.append("<br>").append("Found my cookie!").append("<br>");
c.setMaxAge(0);
c.setValue("");
tr2.setTcSuccess(true);
}
}
tr2.writeTo(writer);
txt.append("</p>");
writer.append(txt.toString());
String msg = (String) renderRequest.getAttribute(
RESULT_ATTR_PREFIX + "HeaderPortletTests_SPEC15_Header");
writer.write("<p>" + msg + "</p>");
renderRequest.removeAttribute(
RESULT_ATTR_PREFIX + "HeaderPortletTests_SPEC15_Header");
}
@Override
public void renderHeaders(HeaderRequest headerRequest,
HeaderResponse headerResponse) throws PortletException, IOException {
ModuleTestCaseDetails tcd = new ModuleTestCaseDetails();
StringWriter writer = new StringWriter();
RenderParameters renderParams = headerRequest.getRenderParameters();
String action = headerRequest.getParameter("inputval");
Boolean successTr2 = false, successTr5 = false, successTr6 = false;
Boolean successTr7 = false, successTr8 = false, successTr13 = false;
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters10 */
/*
* Details: "The portlet-container must not propagate parameters received
* in an action or event request to subsequent header requests of the
* portlet"
*/
if (renderParams.getValue("actionURLTr0") == null
&& renderParams.getValue("tr0") != null
&& "true".equals(renderParams.getValue("tr0"))) {
TestResult tr0 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS10);
tr0.setTcSuccess(true);
tr0.writeTo(writer);
} else {
ActionURL aurl = headerResponse.createActionURL();
aurl.setParameter("actionURLTr0", "true");
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS10, aurl);
tb.writeTo(writer);
}
if (action != null) {
if (action.equals(V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS13)) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters13 */
/*
* Details: "If a portlet receives a render request that is the
* result of invoking a render URL targeting this portlet the render
* parameters received with the render request must be the
* parameters set on the render URL"
*/
TestResult tr2 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS13);
if (headerRequest.getParameter("renderURLTr2") != null
&& headerRequest.getParameter("tr2") != null
&& (headerRequest.getParameter("renderURLTr2").contains("tr2:" + headerRequest.getParameter("tr2")) ||
headerRequest.getParameter("renderURLTr2").contains("tr2=" + headerRequest.getParameter("tr2")))) {
tr2.setTcSuccess(true);
successTr2 = true;
} else {
tr2.appendTcDetail(
"Parameter renderURLTr2 is missing or does not contain tr2 parameter value.");
}
tr2.writeTo(writer);
} else if (action
.equals(V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS2)) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters2 */
/* Details: "The parameters the request object returns must be */
/* \"x-www-form-urlencoded\" decoded" */
TestResult tr5 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS2);
if (headerRequest.getParameter("tr5") != null
&& headerRequest.getParameter("tr5").equals("true&<>'")) {
tr5.setTcSuccess(true);
successTr5 = true;
}
tr5.writeTo(writer);
} else if (action
.equals(V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS6)) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters6 */
/*
* Details: "The getParameterMap method must return an unmodifiable
* Map object"
*/
TestResult tr6 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS6);
if (headerRequest.getParameterMap().containsKey("inputval")
&& V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS6.equals(
headerRequest.getParameterMap().get("inputval")[0])) {
String tr6TestStringArray[] = { "Modified Value" };
try {
headerRequest.getParameterMap().put(
"inputval", tr6TestStringArray);
if (V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS6
.equals(
headerRequest.getParameterMap().get("inputval")[0])) {
tr6.setTcSuccess(true);
successTr6 = true;
}
}
catch (UnsupportedOperationException e) {
tr6.setTcSuccess(true);
successTr6 = true;
}
}
tr6.writeTo(writer);
} else if (action.equals(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS15)) {
/*
* TestCase:
* V3HeaderPortletTests_SPEC15_Header_publicRenderParameters15
*/
/*
* Details: "A map of private parameters can be obtained through the
* getPrivateParameterMap method"
*/
TestResult tr7 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS15);
Map<String, String[]> privateParamMap = headerRequest
.getPrivateParameterMap();
if (privateParamMap != null && privateParamMap.containsKey("tr7")
&& privateParamMap.get("tr7")[0].equals("true")) {
tr7.setTcSuccess(true);
successTr7 = true;
}
tr7.writeTo(writer);
} else if (action.equals(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS16)) {
/*
* TestCase:
* V3HeaderPortletTests_SPEC15_Header_publicRenderParameters16
*/
/*
* Details: "A map of public parameters can be obtained through the
* getPublicParameterMap method"
*/
TestResult tr8 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS16);
if (headerRequest.getPublicParameterMap() != null && headerRequest
.getPublicParameterMap().containsKey("tckPRP3")) {
tr8.setTcSuccess(true);
successTr8 = true;
} else {
tr8.appendTcDetail("No public render parameter found.");
}
tr8.writeTo(writer);
} else if (action.equals(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS13A)) {
/*
* TestCase:
* V3HeaderPortletTests_SPEC15_Header_publicRenderParameters13a
*/
/* Details: "A public render parameter can be deleted using the */
/* removePublicRenderParameter method on the PortletURL" */
TestResult tr13 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS13A);
if (headerRequest.getPublicParameterMap() != null && !headerRequest
.getPublicParameterMap().containsKey("tckPRP3")) {
tr13.setTcSuccess(true);
successTr13 = true;
} else {
tr13.appendTcDetail("Render parameter tckPRP3 is not removed.");
}
tr13.writeTo(writer);
}
}
if (!successTr2) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters13 */
/*
* Details: "If a portlet receives a render request that is the result
* of invoking a render URL targeting this portlet the render
* parameters received with the render request must be the parameters
* set on the render URL"
*/
PortletURL rurl = headerResponse.createRenderURL();
rurl.setParameters(headerRequest.getPrivateParameterMap());
rurl.setParameter("tr2", "true");
rurl.setParameter("renderURLTr2", rurl.toString());
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS13, rurl);
tb.writeTo(writer);
}
if (!successTr5) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters2 */
/* Details: "The parameters the request object returns must be */
/* \"x-www-form-urlencoded\" decoded" */
PortletURL purl = headerResponse.createRenderURL();
purl.setParameter("tr5", "true&<>'");
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS2, purl);
tb.writeTo(writer);
}
if (!successTr6) {
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters6 */
/*
* Details: "The getParameterMap method must return an unmodifiable Map
* object"
*/
PortletURL purl = headerResponse.createRenderURL();
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS6, purl);
tb.writeTo(writer);
}
if (!successTr7) {
/*
* TestCase:
* V3HeaderPortletTests_SPEC15_Header_publicRenderParameters15
*/
/* Details: "A map of private parameters can be obtained through the */
/* getPrivateParameterMap method" */
PortletURL purl = headerResponse.createRenderURL();
purl.setParameter("tr7", "true");
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS15,
purl);
tb.writeTo(writer);
}
if (!successTr8) {
/*
* TestCase:
* V3HeaderPortletTests_SPEC15_Header_publicRenderParameters16
*/
/* Details: "A map of public parameters can be obtained through the */
/* getPublicParameterMap method" */
if (headerRequest.getParameter("tckPRP3") == null) {
PortletURL purl = headerResponse.createRenderURL();
purl.setParameter("tckPRP3", "true");
TestSetupLink tl = new TestSetupLink(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS16,
purl);
tl.writeTo(writer);
} else {
PortletURL aurl = headerResponse.createRenderURL();
aurl.setParameters(headerRequest.getPrivateParameterMap());
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS16,
aurl);
tb.writeTo(writer);
}
}
if (!successTr13) {
/*
* TestCase:
* V3HeaderPortletTests_SPEC15_Header_publicRenderParameters13a
*/
/* Details: "A public render parameter can be deleted using the */
/* removePublicRenderParameter method on the PortletURL" */
if (headerRequest.getParameter("tckPRP3") == null) {
PortletURL purl = headerResponse.createRenderURL();
purl.setParameter("tckPRP3", "true");
TestSetupLink tl = new TestSetupLink(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS13A,
purl);
tl.writeTo(writer);
} else {
PortletURL purl = headerResponse.createRenderURL();
purl.setParameters(headerRequest.getPrivateParameterMap());
purl.removePublicRenderParameter("tckPRP3");
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PUBLICRENDERPARAMETERS13A,
purl);
tb.writeTo(writer);
}
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_parameters15 */
/* Details: "Render parameters get automatically cleared if the portlet */
/* receives a processAction or processEvent call" */
if (headerRequest.getParameter("tr3a") != null) {
PortletURL aurl = headerResponse.createActionURL();
aurl.setParameter("tr3a", "true");
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS15, aurl);
tb.writeTo(writer);
} else {
if (headerRequest.getParameter("tr3b") != null
&& headerRequest.getParameter("tr3b").equals("true")) {
TestResult tr3 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS15);
tr3.setTcSuccess(true);
tr3.writeTo(writer);
} else {
PortletURL purl = headerResponse.createRenderURL();
purl.setParameter("tr3a", "true");
TestSetupLink tl = new TestSetupLink(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PARAMETERS15, purl);
tl.writeTo(writer);
}
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_properties1 */
/*
* Details: "The portlet can use the getProperty method to access single
* portal property and optionally-available HTTP header values"
*/
{
TestResult result = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES1);
if (headerRequest.getProperty("Accept") != null) {
result.setTcSuccess(true);
} else {
result.appendTcDetail(
"Failed because Accept header is not found in request headers.");
}
result.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_properties2 */
/*
* Details: "The portlet can use the getProperties method to access
* multiple portal property and optionally-available HTTP header values by
* the same property name"
*/
{
TestResult result = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES2);
if (headerRequest.getProperties("Accept").hasMoreElements()) {
result.setTcSuccess(true);
} else {
result.appendTcDetail(
"Failed because Accept header is not found in request headers.");
}
result.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_properties3 */
/*
* Details: "The portlet can use the getPropertyNames method to obtain an
* Enumeration of all available property names"
*/
{
TestResult result = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES3);
if (headerRequest.getPropertyNames().hasMoreElements()) {
result.setTcSuccess(true);
} else {
result.appendTcDetail(
"Failed because no header is not found in request headers.");
}
result.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_properties4 */
/*
* Details: "The portlet can access cookies provided by the current
* request using the getCookies method"
*/
{
TestResult result = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_PROPERTIES4);
if (headerRequest.getCookies().length > 0) {
result.setTcSuccess(true);
} else {
result.appendTcDetail(
"Failed because no cookies are found in HeaderRequest object");
}
result.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie8 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during the Resource phase"
*/
{
Cookie c = new Cookie("header_tr0_cookie", "true");
c.setMaxAge(100);
c.setPath("/");
headerResponse.addProperty(c);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie9 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during a subsequent Action phase"
*/
if (headerRequest.getParameter("trCookie1") != null
&& headerRequest.getParameter("trCookie1").equals("true")) {
TestResult tr1 = tcd
.getTestResultFailed(V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE9);
tr1.setTcSuccess(true);
tr1.writeTo(writer);
} else {
Cookie c = new Cookie("header_tr1_cookie", "true");
c.setMaxAge(100);
c.setPath("/");
headerResponse.addProperty(c);
PortletURL aurl = headerResponse.createActionURL();
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE9, aurl);
tb.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie10 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during a subsequent Render phase"
*/
{
Cookie c = new Cookie("header_tr2_cookie", "true");
c.setMaxAge(100);
c.setPath("/");
headerResponse.addProperty(c);
PortletURL rurl = headerResponse.createRenderURL();
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE10, rurl);
tb.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie11 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during a subsequent request triggered by a URL"
*/
if (headerRequest.getParameter("tr3") != null
&& headerRequest.getParameter("tr3").equals("true")) {
Cookie[] cookies = headerRequest.getCookies();
StringBuilder txt = new StringBuilder(128);
txt.append("<p>Debug info:");
txt.append("<br>");
txt.append("# Cookies: ").append(cookies.length).append("<br>");
TestResult tr2 = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE11);
for (Cookie c : cookies) {
txt.append("Name: ").append(c.getName());
txt.append(", Value: ").append(c.getValue()).append("<br>");
if (c.getName().equals("header_tr3_cookie")
&& c.getValue().equals("true")) {
txt.append("<br>").append("Found my cookie!").append("<br>");
c.setMaxAge(0);
c.setValue("");
tr2.setTcSuccess(true);
}
}
tr2.writeTo(writer);
txt.append("</p>");
writer.append(txt.toString());
} else {
Cookie c = new Cookie("header_tr3_cookie", "true");
c.setMaxAge(100);
c.setPath("/");
headerResponse.addProperty(c);
PortletURL rurl = headerResponse.createRenderURL();
rurl.setParameter("tr3", "true");
TestButton tb = new TestButton(
V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE11, rurl);
tb.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_contentType5 */
/*
* Details: "If the setContentType method is not called before the
* getWriter or getPortletOutputStream method is used, the portlet
* container uses the content type returned by getResponseContentType"
*/
{
TestResult result = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_CONTENTTYPE5);
if (headerRequest.getResponseContentType() != null) {
result.setTcSuccess(true);
result.appendTcDetail(
"Content type is - " + headerRequest.getResponseContentType());
} else {
result.appendTcDetail(
"Failed because getResponseContentType() method returned null");
}
result.writeTo(writer);
}
/* TestCase: V3HeaderPortletTests_SPEC15_Header_characterEncoding4 */
/*
* Details: "If the portlet does not set the character encoding, the
* portlet container uses UTF-8 as the default character encoding"
*/
{
TestResult result = tcd.getTestResultFailed(
V3HEADERPORTLETTESTS_SPEC15_HEADER_CHARACTERENCODING4);
if (headerResponse.getCharacterEncoding().equals("UTF-8")) {
result.setTcSuccess(true);
} else {
result.appendTcDetail(
"Failed because default character encoding is not UTF-8 but "
+ headerResponse.getCharacterEncoding());
}
result.writeTo(writer);
}
headerRequest.setAttribute(
RESULT_ATTR_PREFIX + "HeaderPortletTests_SPEC15_Header",
writer.toString());
}
@Override
public void serveResource(ResourceRequest resourceRequest,
ResourceResponse resourceResponse) throws PortletException, IOException {
ModuleTestCaseDetails tcd = new ModuleTestCaseDetails();
long tid = Thread.currentThread().getId();
resourceRequest.setAttribute(THREADID_ATTR, tid);
PrintWriter writer = resourceResponse.getWriter();
/* TestCase: V3HeaderPortletTests_SPEC15_Header_cookie8 */
/*
* Details: "Cookies set during the Header phase should be available to
* the portlet during the Resource phase"
*/
Cookie[] cookies = resourceRequest.getCookies();
StringBuilder txt = new StringBuilder(128);
txt.append("<p>Debug info:");
txt.append("<br>");
txt.append("# Cookies: ").append(cookies.length).append("<br>");
TestResult tr1 = tcd
.getTestResultFailed(V3HEADERPORTLETTESTS_SPEC15_HEADER_COOKIE8);
for (Cookie c : cookies) {
txt.append("Name: ").append(c.getName());
txt.append(", Value: ").append(c.getValue()).append("<br>");
if (c.getName().equals("header_tr0_cookie")
&& c.getValue().equals("true")) {
txt.append("<br>").append("Found my cookie!").append("<br>");
c.setMaxAge(0);
c.setValue("");
tr1.setTcSuccess(true);
}
}
tr1.writeTo(writer);
txt.append("</p>");
writer.append(txt.toString());
}
}
| |
/*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.gwt.client;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
import java.util.AbstractList;
import java.util.List;
/**
* GWT/J2CL utilities to abstract out JS interop.
*/
public class Util {
/** Wraps native RegExp. */
@JsType(isNative = true, name = "RegExp", namespace = JsPackage.GLOBAL)
public static class JsRegExp {
public JsRegExp(String regex, String flags) {}
@JsProperty
public native int getLastIndex();
@JsProperty
public native boolean getIgnoreCase();
@JsProperty
public native boolean getGlobal();
@JsProperty
public native boolean getMultiline();
@JsProperty
public native String getSource();
public native boolean test(String str);
public native Match exec(String str);
/** Return type for RegExp.prototype.exec. */
@JsType(isNative = true)
public static class Match extends JsArray<String> {
protected Match() {}
@JsProperty
public native int getIndex();
@JsProperty
public native String getInput();
}
}
/** Wraps native Object. */
@JsType(isNative = true, name = "Object", namespace = JsPackage.GLOBAL)
public static class JsObject<T> {
public JsObject() {}
@JsOverlay
public final T get(String key) {
return objectGet(this, key);
}
@JsOverlay
public final JsObject<T> set(String key, T value) {
objectSet(this, key, value);
return this;
}
}
/** Wraps native Array. */
@JsType(isNative = true, name = "Array", namespace = JsPackage.GLOBAL)
public static class JsArray<T> extends JsObject<T> {
public JsArray() {}
@JsProperty
public native int getLength();
@JsProperty
public native void setLength(int length);
public native void push(T obj);
public native JsArray<T> slice();
public native JsArray<T> slice(int start);
public native JsArray<T> slice(int start, int end);
@SuppressWarnings("unusable-by-js")
public native JsArray<T> splice(int start, int end, T... elems);
@JsOverlay
public final T get(int i) {
return arrayGet(this, i);
}
@JsOverlay
public final void set(int i, T value) {
arraySet(this, i, value);
}
@JsOverlay
public final List<T> asList() {
return new JsArrayList<T>(this);
}
@JsOverlay
public static <T> JsArray<T> of(T... elems) {
return slice(elems);
}
@SuppressWarnings("unusable-by-js")
@JsMethod(name = "call", namespace = "Array.prototype.slice")
public static native <T> JsArray<T> slice(T[] elems);
@JsOverlay
public static <T> JsArray<T> copyOf(Iterable<? extends T> elems) {
JsArray<T> arr = of();
for (T elem : elems) {
arr.push(elem);
}
return arr;
}
/** List implementation for {@link JsArray}. */
private static class JsArrayList<T> extends AbstractList<T> {
final JsArray<T> array;
JsArrayList(JsArray<T> array) {
this.array = array;
}
void checkBounds(int index) {
if (index < 0 || index >= array.getLength()) {
throw new IndexOutOfBoundsException();
}
}
@Override public T get(int index) {
checkBounds(index);
return array.get(index);
}
@Override public T set(int index, T elem) {
checkBounds(index);
T prev = array.get(index);
array.set(index, elem);
return prev;
}
@Override public T remove(int index) {
checkBounds(index);
T prev = array.get(index);
array.splice(index, 1);
return prev;
}
@Override public boolean add(T elem) {
array.push(elem);
return true;
}
@Override public int size() {
return array.getLength();
}
}
}
/** Wraps native String, to provide static methods. */
@JsType(isNative = true, name = "String", namespace = JsPackage.GLOBAL)
public static class JsString {
public JsString() {}
public static native String fromCharCode(int charCode);
}
// PRIVATE UTILITY METHODS
@JsMethod(namespace = "util")
private static native <T> T arrayGet(JsArray<T> array, int i);
@JsMethod(namespace = "util")
private static native <T> void arraySet(JsArray<T> array, int i, T value);
@JsMethod(namespace = "util")
private static native <T> T objectGet(JsObject<T> array, String key);
@JsMethod(namespace = "util")
private static native <T> void objectSet(JsObject<T> array, String key, T value);
private Util() {}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: DeleteCommand.proto
package com.alachisoft.tayzgrid.common.protobuf;
public final class DeleteCommandProtocol {
private DeleteCommandProtocol() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public static final class DeleteCommand extends
com.google.protobuf.GeneratedMessage {
// Use DeleteCommand.newBuilder() to construct.
private DeleteCommand() {
initFields();
}
private DeleteCommand(boolean noInit) {}
private static final DeleteCommand defaultInstance;
public static DeleteCommand getDefaultInstance() {
return defaultInstance;
}
public DeleteCommand getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_fieldAccessorTable;
}
// optional bytes key = 1;
public static final int KEY_FIELD_NUMBER = 1;
private boolean hasKey;
private com.google.protobuf.ByteString key_ = com.google.protobuf.ByteString.EMPTY;
public boolean hasKey() { return hasKey; }
public com.google.protobuf.ByteString getKey() { return key_; }
// optional int64 requestId = 2;
public static final int REQUESTID_FIELD_NUMBER = 2;
private boolean hasRequestId;
private long requestId_ = 0L;
public boolean hasRequestId() { return hasRequestId; }
public long getRequestId() { return requestId_; }
// optional int32 flag = 3;
public static final int FLAG_FIELD_NUMBER = 3;
private boolean hasFlag;
private int flag_ = 0;
public boolean hasFlag() { return hasFlag; }
public int getFlag() { return flag_; }
// optional bool isAsync = 4;
public static final int ISASYNC_FIELD_NUMBER = 4;
private boolean hasIsAsync;
private boolean isAsync_ = false;
public boolean hasIsAsync() { return hasIsAsync; }
public boolean getIsAsync() { return isAsync_; }
// optional sint32 datasourceItemRemovedCallbackId = 5;
public static final int DATASOURCEITEMREMOVEDCALLBACKID_FIELD_NUMBER = 5;
private boolean hasDatasourceItemRemovedCallbackId;
private int datasourceItemRemovedCallbackId_ = 0;
public boolean hasDatasourceItemRemovedCallbackId() { return hasDatasourceItemRemovedCallbackId; }
public int getDatasourceItemRemovedCallbackId() { return datasourceItemRemovedCallbackId_; }
// optional string lockId = 6;
public static final int LOCKID_FIELD_NUMBER = 6;
private boolean hasLockId;
private java.lang.String lockId_ = "";
public boolean hasLockId() { return hasLockId; }
public java.lang.String getLockId() { return lockId_; }
// optional int32 lockAccessType = 7;
public static final int LOCKACCESSTYPE_FIELD_NUMBER = 7;
private boolean hasLockAccessType;
private int lockAccessType_ = 0;
public boolean hasLockAccessType() { return hasLockAccessType; }
public int getLockAccessType() { return lockAccessType_; }
// optional uint64 version = 8;
public static final int VERSION_FIELD_NUMBER = 8;
private boolean hasVersion;
private long version_ = 0L;
public boolean hasVersion() { return hasVersion; }
public long getVersion() { return version_; }
// optional string providerName = 9;
public static final int PROVIDERNAME_FIELD_NUMBER = 9;
private boolean hasProviderName;
private java.lang.String providerName_ = "";
public boolean hasProviderName() { return hasProviderName; }
public java.lang.String getProviderName() { return providerName_; }
// optional bool compareOld = 10;
public static final int COMPAREOLD_FIELD_NUMBER = 10;
private boolean hasCompareOld;
private boolean compareOld_ = false;
public boolean hasCompareOld() { return hasCompareOld; }
public boolean getCompareOld() { return compareOld_; }
// optional int32 oldValueFlag = 11;
public static final int OLDVALUEFLAG_FIELD_NUMBER = 11;
private boolean hasOldValueFlag;
private int oldValueFlag_ = 0;
public boolean hasOldValueFlag() { return hasOldValueFlag; }
public int getOldValueFlag() { return oldValueFlag_; }
// repeated bytes oldValue = 12;
public static final int OLDVALUE_FIELD_NUMBER = 12;
private java.util.List<com.google.protobuf.ByteString> oldValue_ =
java.util.Collections.emptyList();
public java.util.List<com.google.protobuf.ByteString> getOldValueList() {
return oldValue_;
}
public int getOldValueCount() { return oldValue_.size(); }
public com.google.protobuf.ByteString getOldValue(int index) {
return oldValue_.get(index);
}
private void initFields() {
}
public final boolean isInitialized() {
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (hasKey()) {
output.writeBytes(1, getKey());
}
if (hasRequestId()) {
output.writeInt64(2, getRequestId());
}
if (hasFlag()) {
output.writeInt32(3, getFlag());
}
if (hasIsAsync()) {
output.writeBool(4, getIsAsync());
}
if (hasDatasourceItemRemovedCallbackId()) {
output.writeSInt32(5, getDatasourceItemRemovedCallbackId());
}
if (hasLockId()) {
output.writeString(6, getLockId());
}
if (hasLockAccessType()) {
output.writeInt32(7, getLockAccessType());
}
if (hasVersion()) {
output.writeUInt64(8, getVersion());
}
if (hasProviderName()) {
output.writeString(9, getProviderName());
}
if (hasCompareOld()) {
output.writeBool(10, getCompareOld());
}
if (hasOldValueFlag()) {
output.writeInt32(11, getOldValueFlag());
}
for (com.google.protobuf.ByteString element : getOldValueList()) {
output.writeBytes(12, element);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasKey()) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getKey());
}
if (hasRequestId()) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, getRequestId());
}
if (hasFlag()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, getFlag());
}
if (hasIsAsync()) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, getIsAsync());
}
if (hasDatasourceItemRemovedCallbackId()) {
size += com.google.protobuf.CodedOutputStream
.computeSInt32Size(5, getDatasourceItemRemovedCallbackId());
}
if (hasLockId()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(6, getLockId());
}
if (hasLockAccessType()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(7, getLockAccessType());
}
if (hasVersion()) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(8, getVersion());
}
if (hasProviderName()) {
size += com.google.protobuf.CodedOutputStream
.computeStringSize(9, getProviderName());
}
if (hasCompareOld()) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(10, getCompareOld());
}
if (hasOldValueFlag()) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(11, getOldValueFlag());
}
{
int dataSize = 0;
for (com.google.protobuf.ByteString element : getOldValueList()) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(element);
}
size += dataSize;
size += 1 * getOldValueList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand result;
// Construct using com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand();
return builder;
}
protected com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
"Cannot call clear() after build().");
}
result = new com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand.getDescriptor();
}
public com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand getDefaultInstanceForType() {
return com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
public com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand build() {
if (result != null && !isInitialized()) {
throw newUninitializedMessageException(result);
}
return buildPartial();
}
private com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return buildPartial();
}
public com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand buildPartial() {
if (result == null) {
throw new IllegalStateException(
"build() has already been called on this Builder.");
}
if (result.oldValue_ != java.util.Collections.EMPTY_LIST) {
result.oldValue_ =
java.util.Collections.unmodifiableList(result.oldValue_);
}
com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand returnMe = result;
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand) {
return mergeFrom((com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand other) {
if (other == com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand.getDefaultInstance()) return this;
if (other.hasKey()) {
setKey(other.getKey());
}
if (other.hasRequestId()) {
setRequestId(other.getRequestId());
}
if (other.hasFlag()) {
setFlag(other.getFlag());
}
if (other.hasIsAsync()) {
setIsAsync(other.getIsAsync());
}
if (other.hasDatasourceItemRemovedCallbackId()) {
setDatasourceItemRemovedCallbackId(other.getDatasourceItemRemovedCallbackId());
}
if (other.hasLockId()) {
setLockId(other.getLockId());
}
if (other.hasLockAccessType()) {
setLockAccessType(other.getLockAccessType());
}
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (other.hasProviderName()) {
setProviderName(other.getProviderName());
}
if (other.hasCompareOld()) {
setCompareOld(other.getCompareOld());
}
if (other.hasOldValueFlag()) {
setOldValueFlag(other.getOldValueFlag());
}
if (!other.oldValue_.isEmpty()) {
if (result.oldValue_.isEmpty()) {
result.oldValue_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
}
result.oldValue_.addAll(other.oldValue_);
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
return this;
}
break;
}
case 10: {
setKey(input.readBytes());
break;
}
case 16: {
setRequestId(input.readInt64());
break;
}
case 24: {
setFlag(input.readInt32());
break;
}
case 32: {
setIsAsync(input.readBool());
break;
}
case 40: {
setDatasourceItemRemovedCallbackId(input.readSInt32());
break;
}
case 50: {
setLockId(input.readString());
break;
}
case 56: {
setLockAccessType(input.readInt32());
break;
}
case 64: {
setVersion(input.readUInt64());
break;
}
case 74: {
setProviderName(input.readString());
break;
}
case 80: {
setCompareOld(input.readBool());
break;
}
case 88: {
setOldValueFlag(input.readInt32());
break;
}
case 98: {
addOldValue(input.readBytes());
break;
}
}
}
}
// optional bytes key = 1;
public boolean hasKey() {
return result.hasKey();
}
public com.google.protobuf.ByteString getKey() {
return result.getKey();
}
public Builder setKey(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
result.hasKey = true;
result.key_ = value;
return this;
}
public Builder clearKey() {
result.hasKey = false;
result.key_ = getDefaultInstance().getKey();
return this;
}
// optional int64 requestId = 2;
public boolean hasRequestId() {
return result.hasRequestId();
}
public long getRequestId() {
return result.getRequestId();
}
public Builder setRequestId(long value) {
result.hasRequestId = true;
result.requestId_ = value;
return this;
}
public Builder clearRequestId() {
result.hasRequestId = false;
result.requestId_ = 0L;
return this;
}
// optional int32 flag = 3;
public boolean hasFlag() {
return result.hasFlag();
}
public int getFlag() {
return result.getFlag();
}
public Builder setFlag(int value) {
result.hasFlag = true;
result.flag_ = value;
return this;
}
public Builder clearFlag() {
result.hasFlag = false;
result.flag_ = 0;
return this;
}
// optional bool isAsync = 4;
public boolean hasIsAsync() {
return result.hasIsAsync();
}
public boolean getIsAsync() {
return result.getIsAsync();
}
public Builder setIsAsync(boolean value) {
result.hasIsAsync = true;
result.isAsync_ = value;
return this;
}
public Builder clearIsAsync() {
result.hasIsAsync = false;
result.isAsync_ = false;
return this;
}
// optional sint32 datasourceItemRemovedCallbackId = 5;
public boolean hasDatasourceItemRemovedCallbackId() {
return result.hasDatasourceItemRemovedCallbackId();
}
public int getDatasourceItemRemovedCallbackId() {
return result.getDatasourceItemRemovedCallbackId();
}
public Builder setDatasourceItemRemovedCallbackId(int value) {
result.hasDatasourceItemRemovedCallbackId = true;
result.datasourceItemRemovedCallbackId_ = value;
return this;
}
public Builder clearDatasourceItemRemovedCallbackId() {
result.hasDatasourceItemRemovedCallbackId = false;
result.datasourceItemRemovedCallbackId_ = 0;
return this;
}
// optional string lockId = 6;
public boolean hasLockId() {
return result.hasLockId();
}
public java.lang.String getLockId() {
return result.getLockId();
}
public Builder setLockId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasLockId = true;
result.lockId_ = value;
return this;
}
public Builder clearLockId() {
result.hasLockId = false;
result.lockId_ = getDefaultInstance().getLockId();
return this;
}
// optional int32 lockAccessType = 7;
public boolean hasLockAccessType() {
return result.hasLockAccessType();
}
public int getLockAccessType() {
return result.getLockAccessType();
}
public Builder setLockAccessType(int value) {
result.hasLockAccessType = true;
result.lockAccessType_ = value;
return this;
}
public Builder clearLockAccessType() {
result.hasLockAccessType = false;
result.lockAccessType_ = 0;
return this;
}
// optional uint64 version = 8;
public boolean hasVersion() {
return result.hasVersion();
}
public long getVersion() {
return result.getVersion();
}
public Builder setVersion(long value) {
result.hasVersion = true;
result.version_ = value;
return this;
}
public Builder clearVersion() {
result.hasVersion = false;
result.version_ = 0L;
return this;
}
// optional string providerName = 9;
public boolean hasProviderName() {
return result.hasProviderName();
}
public java.lang.String getProviderName() {
return result.getProviderName();
}
public Builder setProviderName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
result.hasProviderName = true;
result.providerName_ = value;
return this;
}
public Builder clearProviderName() {
result.hasProviderName = false;
result.providerName_ = getDefaultInstance().getProviderName();
return this;
}
// optional bool compareOld = 10;
public boolean hasCompareOld() {
return result.hasCompareOld();
}
public boolean getCompareOld() {
return result.getCompareOld();
}
public Builder setCompareOld(boolean value) {
result.hasCompareOld = true;
result.compareOld_ = value;
return this;
}
public Builder clearCompareOld() {
result.hasCompareOld = false;
result.compareOld_ = false;
return this;
}
// optional int32 oldValueFlag = 11;
public boolean hasOldValueFlag() {
return result.hasOldValueFlag();
}
public int getOldValueFlag() {
return result.getOldValueFlag();
}
public Builder setOldValueFlag(int value) {
result.hasOldValueFlag = true;
result.oldValueFlag_ = value;
return this;
}
public Builder clearOldValueFlag() {
result.hasOldValueFlag = false;
result.oldValueFlag_ = 0;
return this;
}
// repeated bytes oldValue = 12;
public java.util.List<com.google.protobuf.ByteString> getOldValueList() {
return java.util.Collections.unmodifiableList(result.oldValue_);
}
public int getOldValueCount() {
return result.getOldValueCount();
}
public com.google.protobuf.ByteString getOldValue(int index) {
return result.getOldValue(index);
}
public Builder setOldValue(int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
result.oldValue_.set(index, value);
return this;
}
public Builder addOldValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
if (result.oldValue_.isEmpty()) {
result.oldValue_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
}
result.oldValue_.add(value);
return this;
}
public Builder addAllOldValue(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
if (result.oldValue_.isEmpty()) {
result.oldValue_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
}
super.addAll(values, result.oldValue_);
return this;
}
public Builder clearOldValue() {
result.oldValue_ = java.util.Collections.emptyList();
return this;
}
// @@protoc_insertion_point(builder_scope:com.alachisoft.tayzgrid.common.protobuf.DeleteCommand)
}
static {
defaultInstance = new DeleteCommand(true);
com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:com.alachisoft.tayzgrid.common.protobuf.DeleteCommand)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\023DeleteCommand.proto\022\'com.alachisoft.ta" +
"yzgrid.common.protobuf\"\202\002\n\rDeleteCommand" +
"\022\013\n\003key\030\001 \001(\014\022\021\n\trequestId\030\002 \001(\003\022\014\n\004flag" +
"\030\003 \001(\005\022\017\n\007isAsync\030\004 \001(\010\022\'\n\037datasourceIte" +
"mRemovedCallbackId\030\005 \001(\021\022\016\n\006lockId\030\006 \001(\t" +
"\022\026\n\016lockAccessType\030\007 \001(\005\022\017\n\007version\030\010 \001(" +
"\004\022\024\n\014providerName\030\t \001(\t\022\022\n\ncompareOld\030\n " +
"\001(\010\022\024\n\014oldValueFlag\030\013 \001(\005\022\020\n\010oldValue\030\014 " +
"\003(\014B\027B\025DeleteCommandProtocol"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_com_alachisoft_tayzgrid_common_protobuf_DeleteCommand_descriptor,
new java.lang.String[] { "Key", "RequestId", "Flag", "IsAsync", "DatasourceItemRemovedCallbackId", "LockId", "LockAccessType", "Version", "ProviderName", "CompareOld", "OldValueFlag", "OldValue", },
com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand.class,
com.alachisoft.tayzgrid.common.protobuf.DeleteCommandProtocol.DeleteCommand.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright 2008 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Created on Apr 26, 2008
*/
package org.drools.rule;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Comparator;
import java.util.Date;
import java.util.PriorityQueue;
import org.drools.common.EventFactHandle;
import org.drools.common.InternalWorkingMemory;
import org.drools.common.PropagationContextImpl;
import org.drools.common.WorkingMemoryAction;
import org.drools.marshalling.MarshallerWriteContext;
import org.drools.reteoo.RightTuple;
import org.drools.spi.PropagationContext;
import org.drools.time.Job;
import org.drools.time.JobContext;
import org.drools.time.JobHandle;
import org.drools.time.TimerService;
import org.drools.time.Trigger;
/**
* @author etirelli
*
*/
public class SlidingTimeWindow
implements
Externalizable,
Behavior {
private long size;
// FIXME: THIS IS SO WRONG!!! HOW DID I MADE THAT????
private volatile transient RightTuple expiringTuple;
public SlidingTimeWindow() {
this( 0 );
}
/**
* @param size
*/
public SlidingTimeWindow(final long size) {
super();
this.size = size;
}
/**
* @inheritDoc
*
* @see java.io.Externalizable#readExternal(java.io.ObjectInput)
*/
public void readExternal(final ObjectInput in) throws IOException,
ClassNotFoundException {
this.size = in.readLong();
}
/**
* @inheritDoc
*
* @see java.io.Externalizable#writeExternal(java.io.ObjectOutput)
*/
public void writeExternal(final ObjectOutput out) throws IOException {
out.writeLong( this.size );
}
public BehaviorType getType() {
return BehaviorType.TIME_WINDOW;
}
/**
* @return the size
*/
public long getSize() {
return size;
}
/**
* @param size the size to set
*/
public void setSize(final long size) {
this.size = size;
}
public Object createContext() {
return new PriorityQueue<RightTuple>( 16, // arbitrary size... can we improve it?
new SlidingTimeWindowComparator() );
}
/**
* @inheritDoc
*
* @see org.drools.rule.Behavior#assertRightTuple(java.lang.Object, org.drools.reteoo.RightTuple, org.drools.common.InternalWorkingMemory)
*/
public void assertRightTuple(final Object context,
final RightTuple rightTuple,
final InternalWorkingMemory workingMemory) {
PriorityQueue<RightTuple> queue = (PriorityQueue<RightTuple>) context;
queue.add( rightTuple );
if ( queue.peek() == rightTuple ) {
// update next expiration time
updateNextExpiration( rightTuple,
workingMemory,
queue );
}
}
/**
* @inheritDoc
*
* @see org.drools.rule.Behavior#retractRightTuple(java.lang.Object, org.drools.reteoo.RightTuple, org.drools.common.InternalWorkingMemory)
*/
public void retractRightTuple(final Object context,
final RightTuple rightTuple,
final InternalWorkingMemory workingMemory) {
// it may be a call back to expire the tuple that is already being expired
if( this.expiringTuple != rightTuple ) {
PriorityQueue<RightTuple> queue = (PriorityQueue<RightTuple>) context;
if ( queue.peek() == rightTuple ) {
// it was the head of the queue
queue.poll();
// update next expiration time
updateNextExpiration( queue.peek(),
workingMemory,
queue );
} else {
queue.remove( rightTuple );
}
}
}
public void expireTuples(final Object context,
final InternalWorkingMemory workingMemory) {
TimerService clock = workingMemory.getTimerService();
long currentTime = clock.getCurrentTime();
PriorityQueue<RightTuple> queue = (PriorityQueue<RightTuple>) context;
RightTuple tuple = queue.peek();
while ( tuple != null && isExpired( currentTime,
tuple ) ) {
this.expiringTuple = tuple;
queue.remove();
final PropagationContext propagationContext = new PropagationContextImpl( workingMemory.getNextPropagationIdCounter(),
PropagationContext.RETRACTION,
null,
null,
tuple.getFactHandle() );
tuple.getRightTupleSink().retractRightTuple( tuple,
propagationContext,
workingMemory );
tuple.unlinkFromRightParent();
this.expiringTuple = null;
tuple = queue.peek();
}
// update next expiration time
updateNextExpiration( tuple,
workingMemory,
queue );
}
private boolean isExpired(final long currentTime,
final RightTuple rightTuple) {
return ((EventFactHandle) rightTuple.getFactHandle()).getStartTimestamp() + this.size <= currentTime;
}
/**
* @param rightTuple
* @param workingMemory
*/
private void updateNextExpiration(final RightTuple rightTuple,
final InternalWorkingMemory workingMemory,
final Object context) {
TimerService clock = workingMemory.getTimerService();
if ( rightTuple != null ) {
long nextTimestamp = ((EventFactHandle) rightTuple.getFactHandle()).getStartTimestamp() + this.size;
JobContext jobctx = new BehaviorJobContext( workingMemory, this, context );
BehaviorJob job = new BehaviorJob();
JobHandle handle = clock.scheduleJob( job, jobctx, new PointInTimeTrigger( nextTimestamp ));
jobctx.setJobHandle( handle );
}
}
public String toString() {
return "SlidingTimeWindow( size="+size+" )";
}
/**
* A Comparator<RightTuple> implementation for the fact queue
*
* @author etirelli
*/
private static class SlidingTimeWindowComparator
implements
Comparator<RightTuple> {
public int compare(RightTuple t1,
RightTuple t2) {
final EventFactHandle e1 = (EventFactHandle) t1.getFactHandle();
final EventFactHandle e2 = (EventFactHandle) t2.getFactHandle();
return (e1.getStartTimestamp() < e2.getStartTimestamp()) ? -1 : (e1.getStartTimestamp() == e2.getStartTimestamp() ? 0 : 1);
}
}
private static class PointInTimeTrigger implements Trigger {
private Date timestamp;
public PointInTimeTrigger() {}
public PointInTimeTrigger( long timestamp ) {
this.timestamp = new Date( timestamp );
}
public Date getNextFireTime() {
return this.timestamp;
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
this.timestamp = (Date) in.readObject();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject( this.timestamp );
}
}
private static class BehaviorJobContext implements JobContext {
public InternalWorkingMemory workingMemory;
public Behavior behavior;
public Object behaviorContext;
public JobHandle handle;
/**
* @param workingMemory
* @param behavior
* @param behaviorContext
*/
public BehaviorJobContext(InternalWorkingMemory workingMemory,
Behavior behavior,
Object behaviorContext) {
super();
this.workingMemory = workingMemory;
this.behavior = behavior;
this.behaviorContext = behaviorContext;
}
public JobHandle getJobHandle() {
return this.handle;
}
public void setJobHandle(JobHandle jobHandle) {
this.handle = jobHandle;
}
}
private static class BehaviorJob implements Job {
public void execute(JobContext ctx) {
BehaviorJobContext context = (BehaviorJobContext) ctx;
context.workingMemory.queueWorkingMemoryAction( new BehaviorExpireWMAction( context.behavior, context.behaviorContext ) );
}
}
private static class BehaviorExpireWMAction implements WorkingMemoryAction {
private final Behavior behavior;
private final Object context;
/**
* @param behavior
* @param context
*/
public BehaviorExpireWMAction(Behavior behavior,
Object context) {
super();
this.behavior = behavior;
this.context = context;
}
public void execute(InternalWorkingMemory workingMemory) {
this.behavior.expireTuples( context, workingMemory );
}
public void write(MarshallerWriteContext context) throws IOException {
// TODO Auto-generated method stub
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
// TODO Auto-generated method stub
}
public void writeExternal(ObjectOutput out) throws IOException {
// TODO Auto-generated method stub
}
}
}
| |
package org.luaj.vm2.luajc;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Hashtable;
import org.luaj.vm2.Lua;
import org.luaj.vm2.LuaString;
import org.luaj.vm2.Print;
import org.luaj.vm2.Prototype;
import org.luaj.vm2.Upvaldesc;
/**
* Prototype information for static single-assignment analysis
*/
public class ProtoInfo {
public final String name;
public final Prototype prototype; // the prototype that this info is about
public final ProtoInfo[] subprotos; // one per enclosed prototype, or null
public final BasicBlock[] blocks; // basic block analysis of code branching
public final BasicBlock[] blocklist; // blocks in breadth-first order
public final VarInfo[] params; // Parameters and initial values of stack variables
public final VarInfo[][] vars; // Each variable
public final UpvalInfo[] upvals; // from outer scope
public final UpvalInfo[][] openups; // per slot, upvalues allocated by this prototype
// A main chunk proto info.
public ProtoInfo(Prototype p, String name) {
// For the outer chunk, we have one upvalue which is the environment.
this(p,name,null);
}
private ProtoInfo(Prototype p, String name, UpvalInfo[] u) {
this.name = name;
this.prototype = p;
this.upvals = u != null? u: new UpvalInfo[] { new UpvalInfo(this) };
this.subprotos = p.p!=null&&p.p.length>0? new ProtoInfo[p.p.length]: null;
// find basic blocks
this.blocks = BasicBlock.findBasicBlocks(p);
this.blocklist = BasicBlock.findLiveBlocks(blocks);
// params are inputs to first block
this.params = new VarInfo[p.maxstacksize];
for ( int slot=0; slot<p.maxstacksize; slot++ ) {
VarInfo v = VarInfo.PARAM(slot);
params[slot] = v;
}
// find variables
this.vars = findVariables();
replaceTrivialPhiVariables();
// find upvalues, create sub-prototypes
this.openups = new UpvalInfo[p.maxstacksize][];
findUpvalues();
}
public String toString() {
StringBuffer sb = new StringBuffer();
// prototpye name
sb.append( "proto '"+name+"'\n" );
// upvalues from outer scopes
for ( int i=0, n=(upvals!=null? upvals.length: 0); i<n; i++ )
sb.append( " up["+i+"]: "+upvals[i]+"\n" );
// basic blocks
for ( int i=0; i<blocklist.length; i++ ) {
BasicBlock b = blocklist[i];
int pc0 = b.pc0;
sb.append( " block "+b.toString() );
appendOpenUps( sb, -1 );
// instructions
for ( int pc=pc0; pc<=b.pc1; pc++ ) {
// open upvalue storage
appendOpenUps( sb, pc );
// opcode
sb.append( " " );
for ( int j=0; j<prototype.maxstacksize; j++ ) {
VarInfo v = vars[j][pc];
String u = (v==null? "": v.upvalue!=null? !v.upvalue.rw? "[C] ": (v.allocupvalue&&v.pc==pc? "[*] ": "[] "): " ");
String s = v==null? "null ": String.valueOf(v);
sb.append( s+u);
}
sb.append( " " );
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ops = Print.ps;
Print.ps = new PrintStream(baos);
try {
Print.printOpCode(prototype, pc);
} finally {
Print.ps.close();
Print.ps = ops;
}
sb.append( baos.toString() );
sb.append( "\n" );
}
}
// nested functions
for ( int i=0, n=subprotos!=null? subprotos.length: 0; i<n; i++ ) {
sb.append( subprotos[i].toString() );
}
return sb.toString();
}
private void appendOpenUps(StringBuffer sb, int pc) {
for ( int j=0; j<prototype.maxstacksize; j++ ) {
VarInfo v = (pc<0? params[j]: vars[j][pc]);
if ( v != null && v.pc == pc && v.allocupvalue ) {
sb.append( " open: "+v.upvalue+"\n" );
}
}
}
private VarInfo[][] findVariables() {
// create storage for variables.
int n = prototype.code.length;
int m = prototype.maxstacksize;
VarInfo[][] v = new VarInfo[m][];
for ( int i=0; i<v.length; i++ )
v[i] = new VarInfo[n];
// process instructions
for ( int bi=0; bi<blocklist.length; bi++ ) {
BasicBlock b0 = blocklist[bi];
// input from previous blocks
int nprev = b0.prev!=null? b0.prev.length: 0;
for ( int slot=0; slot<m; slot++ ) {
VarInfo var = null;
if ( nprev == 0 )
var = params[slot];
else if ( nprev == 1 )
var = v[slot][b0.prev[0].pc1];
else {
for ( int i=0; i<nprev; i++ ) {
BasicBlock bp = b0.prev[i];
if ( v[slot][bp.pc1] == VarInfo.INVALID )
var = VarInfo.INVALID;
}
}
if ( var == null )
var = VarInfo.PHI(this, slot, b0.pc0);
v[slot][b0.pc0] = var;
}
// process instructions for this basic block
for ( int pc=b0.pc0; pc<=b0.pc1; pc++ ) {
// propogate previous values except at block boundaries
if ( pc > b0.pc0 )
propogateVars( v, pc-1, pc );
int a,b,c;
int ins = prototype.code[pc];
int op = Lua.GET_OPCODE(ins);
// account for assignments, references and invalidations
switch ( op ) {
case Lua.OP_LOADK:/* A Bx R(A) := Kst(Bx) */
case Lua.OP_LOADBOOL:/* A B C R(A) := (Bool)B; if (C) pc++ */
case Lua.OP_GETUPVAL: /* A B R(A) := UpValue[B] */
case Lua.OP_NEWTABLE: /* A B C R(A) := {} (size = B,C) */
a = Lua.GETARG_A( ins );
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_MOVE:/* A B R(A) := R(B) */
case Lua.OP_UNM: /* A B R(A) := -R(B) */
case Lua.OP_NOT: /* A B R(A) := not R(B) */
case Lua.OP_LEN: /* A B R(A) := length of R(B) */
case Lua.OP_TESTSET: /* A B C if (R(B) <=> C) then R(A) := R(B) else pc++ */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
v[b][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_ADD: /* A B C R(A) := RK(B) + RK(C) */
case Lua.OP_SUB: /* A B C R(A) := RK(B) - RK(C) */
case Lua.OP_MUL: /* A B C R(A) := RK(B) * RK(C) */
case Lua.OP_DIV: /* A B C R(A) := RK(B) / RK(C) */
case Lua.OP_MOD: /* A B C R(A) := RK(B) % RK(C) */
case Lua.OP_POW: /* A B C R(A) := RK(B) ^ RK(C) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
if (!Lua.ISK(b)) v[b][pc].isreferenced = true;
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_SETTABLE: /* A B C R(A)[RK(B)]:= RK(C) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
v[a][pc].isreferenced = true;
if (!Lua.ISK(b)) v[b][pc].isreferenced = true;
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
break;
case Lua.OP_SETTABUP: /* A B C UpValue[A][RK(B)] := RK(C) */
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
if (!Lua.ISK(b)) v[b][pc].isreferenced = true;
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
break;
case Lua.OP_CONCAT: /* A B C R(A) := R(B).. ... ..R(C) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
for ( ; b<=c; b++ )
v[b][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_FORPREP: /* A sBx R(A)-=R(A+2); pc+=sBx */
a = Lua.GETARG_A( ins );
v[a+2][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_GETTABLE: /* A B C R(A) := R(B)[RK(C)] */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
v[b][pc].isreferenced = true;
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_GETTABUP: /* A B C R(A) := UpValue[B][RK(C)] */
a = Lua.GETARG_A( ins );
c = Lua.GETARG_C( ins );
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_SELF: /* A B C R(A+1) := R(B); R(A) := R(B)[RK(C)] */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
v[b][pc].isreferenced = true;
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
v[a+1][pc] = new VarInfo(a+1,pc);
break;
case Lua.OP_FORLOOP: /* A sBx R(A)+=R(A+2);
if R(A) <?= R(A+1) then { pc+=sBx; R(A+3)=R(A) }*/
a = Lua.GETARG_A( ins );
v[a][pc].isreferenced = true;
v[a+2][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
v[a][pc].isreferenced = true;
v[a+1][pc].isreferenced = true;
v[a+3][pc] = new VarInfo(a+3,pc);
break;
case Lua.OP_LOADNIL: /* A B R(A) := ... := R(A+B) := nil */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
for ( ; b-->=0; a++ )
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_VARARG: /* A B R(A), R(A+1), ..., R(A+B-1) = vararg */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
for ( int j=1; j<b; j++, a++ )
v[a][pc] = new VarInfo(a,pc);
if ( b == 0 )
for ( ; a<m; a++ )
v[a][pc] = VarInfo.INVALID;
break;
case Lua.OP_CALL: /* A B C R(A), ... ,R(A+C-2) := R(A)(R(A+1), ... ,R(A+B-1)) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
v[a][pc].isreferenced = true;
v[a][pc].isreferenced = true;
for ( int i=1; i<=b-1; i++ )
v[a+i][pc].isreferenced = true;
for ( int j=0; j<=c-2; j++, a++ )
v[a][pc] = new VarInfo(a,pc);
for ( ; a<m; a++ )
v[a][pc] = VarInfo.INVALID;
break;
case Lua.OP_TFORCALL: /* A C R(A+3), ... ,R(A+2+C) := R(A)(R(A+1), R(A+2)); */
a = Lua.GETARG_A( ins );
c = Lua.GETARG_C( ins );
v[a++][pc].isreferenced = true;
v[a++][pc].isreferenced = true;
v[a++][pc].isreferenced = true;
for ( int j=0; j<c; j++, a++ )
v[a][pc] = new VarInfo(a,pc);
for ( ; a<m; a++ )
v[a][pc] = VarInfo.INVALID;
break;
case Lua.OP_TFORLOOP: /* A sBx if R(A+1) ~= nil then { R(A)=R(A+1); pc += sBx */
a = Lua.GETARG_A( ins );
v[a+1][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
case Lua.OP_TAILCALL: /* A B C return R(A)(R(A+1), ... ,R(A+B-1)) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
v[a][pc].isreferenced = true;
for ( int i=1; i<=b-1; i++ )
v[a+i][pc].isreferenced = true;
break;
case Lua.OP_RETURN: /* A B return R(A), ... ,R(A+B-2) (see note) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
for ( int i=0; i<=b-2; i++ )
v[a+i][pc].isreferenced = true;
break;
case Lua.OP_CLOSURE: { /* A Bx R(A) := closure(KPROTO[Bx], R(A), ... ,R(A+n)) */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_Bx( ins );
Upvaldesc[] upvalues = prototype.p[b].upvalues;
for (int k = 0, nups = upvalues.length; k < nups; ++k)
if (upvalues[k].instack)
v[upvalues[k].idx][pc].isreferenced = true;
v[a][pc] = new VarInfo(a,pc);
break;
}
case Lua.OP_SETLIST: /* A B C R(A)[(C-1)*FPF+i]:= R(A+i), 1 <= i <= B */
a = Lua.GETARG_A( ins );
b = Lua.GETARG_B( ins );
v[a][pc].isreferenced = true;
for ( int i=1; i<=b; i++ )
v[a+i][pc].isreferenced = true;
break;
case Lua.OP_SETUPVAL: /* A B UpValue[B]:= R(A) */
case Lua.OP_TEST: /* A C if not (R(A) <=> C) then pc++ */
a = Lua.GETARG_A( ins );
v[a][pc].isreferenced = true;
break;
case Lua.OP_EQ: /* A B C if ((RK(B) == RK(C)) ~= A) then pc++ */
case Lua.OP_LT: /* A B C if ((RK(B) < RK(C)) ~= A) then pc++ */
case Lua.OP_LE: /* A B C if ((RK(B) <= RK(C)) ~= A) then pc++ */
b = Lua.GETARG_B( ins );
c = Lua.GETARG_C( ins );
if (!Lua.ISK(b)) v[b][pc].isreferenced = true;
if (!Lua.ISK(c)) v[c][pc].isreferenced = true;
break;
case Lua.OP_JMP: /* sBx pc+=sBx */
a = Lua.GETARG_A( ins );
if (a > 0)
for ( --a; a<m; a++ )
v[a][pc] = VarInfo.INVALID;
break;
default:
throw new IllegalStateException("unhandled opcode: "+ins);
}
}
}
return v;
}
private static void propogateVars(VarInfo[][] v, int pcfrom, int pcto) {
for ( int j=0, m=v.length; j<m; j++ )
v[j][pcto] = v[j][pcfrom];
}
private void replaceTrivialPhiVariables() {
for ( int i=0; i<blocklist.length; i++ ) {
BasicBlock b0 = blocklist[i];
for ( int slot=0; slot<prototype.maxstacksize; slot++ ) {
VarInfo vold = vars[slot][b0.pc0];
VarInfo vnew = vold.resolvePhiVariableValues();
if ( vnew != null )
substituteVariable( slot, vold, vnew );
}
}
}
private void substituteVariable(int slot, VarInfo vold, VarInfo vnew) {
for ( int i=0, n=prototype.code.length; i<n; i++ )
replaceAll( vars[slot], vars[slot].length, vold, vnew );
}
private void replaceAll(VarInfo[] v, int n, VarInfo vold, VarInfo vnew) {
for ( int i=0; i<n; i++ )
if ( v[i] == vold )
v[i] = vnew;
}
private void findUpvalues() {
int[] code = prototype.code;
int n = code.length;
// propogate to inner prototypes
String[] names = findInnerprotoNames();
for ( int pc=0; pc<n; pc++ ) {
if ( Lua.GET_OPCODE(code[pc]) == Lua.OP_CLOSURE ) {
int bx = Lua.GETARG_Bx(code[pc]);
Prototype newp = prototype.p[bx];
UpvalInfo[] newu = new UpvalInfo[newp.upvalues.length];
String newname = name + "$" + names[bx];
for ( int j=0; j<newp.upvalues.length; ++j ) {
Upvaldesc u = newp.upvalues[j];
newu[j] = u.instack? findOpenUp(pc,u.idx) : upvals[u.idx];
}
subprotos[bx] = new ProtoInfo(newp, newname, newu);
}
}
// mark all upvalues that are written locally as read/write
for ( int pc=0; pc<n; pc++ ) {
if ( Lua.GET_OPCODE(code[pc]) == Lua.OP_SETUPVAL )
upvals[Lua.GETARG_B(code[pc])].rw = true;
}
}
private UpvalInfo findOpenUp(int pc, int slot) {
if ( openups[slot] == null )
openups[slot] = new UpvalInfo[prototype.code.length];
if ( openups[slot][pc] != null )
return openups[slot][pc];
UpvalInfo u = new UpvalInfo(this, pc, slot);
for ( int i=0, n=prototype.code.length; i<n; ++i )
if ( vars[slot][i] != null && vars[slot][i].upvalue == u )
openups[slot][i] = u;
return u;
}
public boolean isUpvalueAssign(int pc, int slot) {
VarInfo v = pc<0? params[slot]: vars[slot][pc];
return v != null && v.upvalue != null && v.upvalue.rw;
}
public boolean isUpvalueCreate(int pc, int slot) {
VarInfo v = pc<0? params[slot]: vars[slot][pc];
return v != null && v.upvalue != null && v.upvalue.rw && v.allocupvalue && pc == v.pc;
}
public boolean isUpvalueRefer(int pc, int slot) {
// special case when both refer and assign in same instruction
if ( pc > 0 && vars[slot][pc] != null && vars[slot][pc].pc == pc && vars[slot][pc-1] != null )
pc -= 1;
VarInfo v = pc<0? params[slot]: vars[slot][pc];
return v != null && v.upvalue != null && v.upvalue.rw;
}
public boolean isInitialValueUsed(int slot) {
VarInfo v = params[slot];
return v.isreferenced;
}
public boolean isReadWriteUpvalue(UpvalInfo u) {
return u.rw;
}
private String[] findInnerprotoNames() {
if (prototype.p.length <= 0)
return null;
// find all the prototype names
String[] names = new String[prototype.p.length];
Hashtable used = new Hashtable();
int[] code = prototype.code;
int n = code.length;
for ( int pc=0; pc<n; pc++ ) {
if ( Lua.GET_OPCODE(code[pc]) == Lua.OP_CLOSURE ) {
int bx = Lua.GETARG_Bx(code[pc]);
String name = null;
final int i = code[pc+1];
switch (Lua.GET_OPCODE(i)) {
case Lua.OP_SETTABLE:
case Lua.OP_SETTABUP: {
final int b = Lua.GETARG_B(i);
if (Lua.ISK(b))
name = prototype.k[b&0x0ff].tojstring();
break;
}
case Lua.OP_SETUPVAL: {
final int b = Lua.GETARG_B(i);
final LuaString s = prototype.upvalues[b].name;
if (s != null)
name = s.tojstring();
break;
}
default: // Local variable
final int a = Lua.GETARG_A(code[pc]);
final LuaString s = prototype.getlocalname(a+1, pc+1);
if (s != null)
name = s.tojstring();
break;
}
name = name != null? toJavaClassPart(name): String.valueOf(bx);
if (used.containsKey(name)) {
String basename = name;
int count = 1;
do {
name = basename + '$' + count++;
} while (used.containsKey(name));
}
used.put(name, Boolean.TRUE);
names[bx] = name;
}
}
return names;
}
private static String toJavaClassPart(String s) {
final int n = s.length();
StringBuffer sb = new StringBuffer(n);
for (int i = 0; i < n; ++i)
sb.append( Character.isJavaIdentifierPart(s.charAt(i)) ? s.charAt(i): '_' );
return sb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.mlcontext;
import static org.apache.sysml.api.mlcontext.ScriptFactory.dml;
import java.util.ArrayList;
import java.util.List;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.ml.linalg.DenseVector;
import org.apache.spark.ml.linalg.VectorUDT;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.sysml.api.mlcontext.FrameFormat;
import org.apache.sysml.api.mlcontext.FrameMetadata;
import org.apache.sysml.api.mlcontext.Matrix;
import org.apache.sysml.api.mlcontext.Script;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtils;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.util.DataConverter;
import org.apache.sysml.runtime.util.UtilFunctions;
import org.apache.sysml.test.integration.TestConfiguration;
import org.apache.sysml.test.integration.mlcontext.MLContextTestBase;
import org.apache.sysml.test.utils.TestUtils;
import org.junit.Test;
public class DataFrameVectorScriptTest extends MLContextTestBase
{
private final static String TEST_DIR = "functions/mlcontext/";
private final static String TEST_NAME = "DataFrameConversion";
private final static String TEST_CLASS_DIR = TEST_DIR + DataFrameVectorScriptTest.class.getSimpleName() + "/";
//schema restriction: single vector included
private final static ValueType[] schemaStrings = new ValueType[]{ValueType.OBJECT, ValueType.STRING, ValueType.STRING, ValueType.STRING};
private final static ValueType[] schemaDoubles = new ValueType[]{ValueType.DOUBLE, ValueType.DOUBLE, ValueType.OBJECT, ValueType.DOUBLE};
private final static ValueType[] schemaMixed1 = new ValueType[]{ValueType.OBJECT, ValueType.INT, ValueType.STRING, ValueType.DOUBLE, ValueType.INT};
private final static ValueType[] schemaMixed2 = new ValueType[]{ValueType.STRING, ValueType.OBJECT, ValueType.DOUBLE};
private final static int rows1 = 2245;
private final static int colsVector = 7;
private final static double sparsity1 = 0.9;
private final static double sparsity2 = 0.1;
private final static double eps=0.0000000001;
@Override
public void setUp() {
addTestConfiguration(TEST_NAME, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME, new String[] {"A", "B"}));
}
@Test
public void testVectorStringsConversionIDDenseUnknown() {
testDataFrameScriptInput(schemaStrings, true, false, true);
}
@Test
public void testVectorDoublesConversionIDDenseUnknown() {
testDataFrameScriptInput(schemaDoubles, true, false, true);
}
@Test
public void testVectorMixed1ConversionIDDenseUnknown() {
testDataFrameScriptInput(schemaMixed1, true, false, true);
}
@Test
public void testVectorMixed2ConversionIDDenseUnknown() {
testDataFrameScriptInput(schemaMixed2, true, false, true);
}
@Test
public void testVectorStringsConversionIDDense() {
testDataFrameScriptInput(schemaStrings, true, false, false);
}
@Test
public void testVectorDoublesConversionIDDense() {
testDataFrameScriptInput(schemaDoubles, true, false, false);
}
@Test
public void testVectorMixed1ConversionIDDense() {
testDataFrameScriptInput(schemaMixed1, true, false, false);
}
@Test
public void testVectorMixed2ConversionIDDense() {
testDataFrameScriptInput(schemaMixed2, true, false, false);
}
@Test
public void testVectorStringsConversionIDSparseUnknown() {
testDataFrameScriptInput(schemaStrings, true, true, true);
}
@Test
public void testVectorDoublesConversionIDSparseUnknown() {
testDataFrameScriptInput(schemaDoubles, true, true, true);
}
@Test
public void testVectorMixed1ConversionIDSparseUnknown() {
testDataFrameScriptInput(schemaMixed1, true, true, true);
}
@Test
public void testVectorMixed2ConversionIDSparseUnknown() {
testDataFrameScriptInput(schemaMixed2, true, true, true);
}
@Test
public void testVectorStringsConversionIDSparse() {
testDataFrameScriptInput(schemaStrings, true, true, false);
}
@Test
public void testVectorDoublesConversionIDSparse() {
testDataFrameScriptInput(schemaDoubles, true, true, false);
}
@Test
public void testVectorMixed1ConversionIDSparse() {
testDataFrameScriptInput(schemaMixed1, true, true, false);
}
@Test
public void testVectorMixed2ConversionIDSparse() {
testDataFrameScriptInput(schemaMixed2, true, true, false);
}
@Test
public void testVectorStringsConversionDenseUnknown() {
testDataFrameScriptInput(schemaStrings, false, false, true);
}
@Test
public void testVectorDoublesConversionDenseUnknown() {
testDataFrameScriptInput(schemaDoubles, false, false, true);
}
@Test
public void testVectorMixed1ConversionDenseUnknown() {
testDataFrameScriptInput(schemaMixed1, false, false, true);
}
@Test
public void testVectorMixed2ConversionDenseUnknown() {
testDataFrameScriptInput(schemaMixed2, false, false, true);
}
@Test
public void testVectorStringsConversionDense() {
testDataFrameScriptInput(schemaStrings, false, false, false);
}
@Test
public void testVectorDoublesConversionDense() {
testDataFrameScriptInput(schemaDoubles, false, false, false);
}
@Test
public void testVectorMixed1ConversionDense() {
testDataFrameScriptInput(schemaMixed1, false, false, false);
}
@Test
public void testVectorMixed2ConversionDense() {
testDataFrameScriptInput(schemaMixed2, false, false, false);
}
@Test
public void testVectorStringsConversionSparseUnknown() {
testDataFrameScriptInput(schemaStrings, false, true, true);
}
@Test
public void testVectorDoublesConversionSparseUnknown() {
testDataFrameScriptInput(schemaDoubles, false, true, true);
}
@Test
public void testVectorMixed1ConversionSparseUnknown() {
testDataFrameScriptInput(schemaMixed1, false, true, true);
}
@Test
public void testVectorMixed2ConversionSparseUnknown() {
testDataFrameScriptInput(schemaMixed2, false, true, true);
}
@Test
public void testVectorStringsConversionSparse() {
testDataFrameScriptInput(schemaStrings, false, true, false);
}
@Test
public void testVectorDoublesConversionSparse() {
testDataFrameScriptInput(schemaDoubles, false, true, false);
}
@Test
public void testVectorMixed1ConversionSparse() {
testDataFrameScriptInput(schemaMixed1, false, true, false);
}
@Test
public void testVectorMixed2ConversionSparse() {
testDataFrameScriptInput(schemaMixed2, false, true, false);
}
private void testDataFrameScriptInput(ValueType[] schema, boolean containsID, boolean dense, boolean unknownDims) {
//TODO fix inconsistency ml context vs jmlc register Xf
try
{
//generate input data and setup metadata
int cols = schema.length + colsVector - 1;
double sparsity = dense ? sparsity1 : sparsity2;
double[][] A = TestUtils.round(getRandomMatrix(rows1, cols, -10, 1000, sparsity, 2373));
MatrixBlock mbA = DataConverter.convertToMatrixBlock(A);
int blksz = ConfigurationManager.getBlocksize();
MatrixCharacteristics mc1 = new MatrixCharacteristics(rows1, cols, blksz, blksz, mbA.getNonZeros());
MatrixCharacteristics mc2 = unknownDims ? new MatrixCharacteristics() : new MatrixCharacteristics(mc1);
//create input data frame
Dataset<Row> df = createDataFrame(spark, mbA, containsID, schema);
// Create full frame metadata, and empty frame metadata
FrameMetadata meta = new FrameMetadata(containsID ? FrameFormat.DF_WITH_INDEX :
FrameFormat.DF, mc2.getRows(), mc2.getCols());
FrameMetadata metaEmpty = new FrameMetadata();
//run scripts and obtain result
Script script1 = dml(
"Xm = as.matrix(Xf);")
.in("Xf", df, meta).out("Xm");
Script script2 = dml(
"Xm = as.matrix(Xf);")
.in("Xf", df, metaEmpty).out("Xm"); // empty metadata
Matrix Xm1 = ml.execute(script1).getMatrix("Xm");
Matrix Xm2 = ml.execute(script2).getMatrix("Xm");
double[][] B1 = Xm1.to2DDoubleArray();
double[][] B2 = Xm2.to2DDoubleArray();
TestUtils.compareMatrices(A, B1, rows1, cols, eps);
TestUtils.compareMatrices(A, B2, rows1, cols, eps);
}
catch( Exception ex ) {
ex.printStackTrace();
throw new RuntimeException(ex);
}
}
@SuppressWarnings("resource")
private static Dataset<Row> createDataFrame(SparkSession sparkSession, MatrixBlock mb, boolean containsID, ValueType[] schema) {
//create in-memory list of rows
List<Row> list = new ArrayList<Row>();
int off = (containsID ? 1 : 0);
int clen = mb.getNumColumns() + off - colsVector + 1;
for( int i=0; i<mb.getNumRows(); i++ ) {
Object[] row = new Object[clen];
if( containsID )
row[0] = (double)i+1;
for( int j=0, j2=0; j<mb.getNumColumns(); j++, j2++ ) {
if( schema[j2] != ValueType.OBJECT ) {
row[j2+off] = UtilFunctions
.doubleToObject(schema[j2], mb.quickGetValue(i, j));
}
else {
double[] tmp = DataConverter.convertToDoubleVector(
mb.slice(i, i, j, j+colsVector-1, new MatrixBlock()), false);
row[j2+off] = new DenseVector(tmp);
j += colsVector-1;
}
}
list.add(RowFactory.create(row));
}
//create data frame schema
List<StructField> fields = new ArrayList<StructField>();
if( containsID )
fields.add(DataTypes.createStructField(RDDConverterUtils.DF_ID_COLUMN,
DataTypes.DoubleType, true));
for( int j=0; j<schema.length; j++ ) {
DataType dt = null;
switch(schema[j]) {
case STRING: dt = DataTypes.StringType; break;
case DOUBLE: dt = DataTypes.DoubleType; break;
case INT: dt = DataTypes.LongType; break;
case OBJECT: dt = new VectorUDT(); break;
default: throw new RuntimeException("Unsupported value type.");
}
fields.add(DataTypes.createStructField("C"+(j+1), dt, true));
}
StructType dfSchema = DataTypes.createStructType(fields);
//create rdd and data frame
JavaSparkContext sc = new JavaSparkContext(sparkSession.sparkContext());
JavaRDD<Row> rowRDD = sc.parallelize(list);
return sparkSession.createDataFrame(rowRDD, dfSchema);
}
}
| |
/*
* Copyright (c) 2014 UP-NEXT. All rights reserved.
* http://www.up-next.com
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package com.upnext.blekit.util.http;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.upnext.blekit.util.L;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.Map;
/**
* Http client class used for creating simple HTTP requests.
*
* @author Roman Wozniak (roman@up-next.com)
*/
public class HttpClient {
private final static String TAG = HttpClient.class.getSimpleName();
private final static boolean LOG_RESPONSE = true;
private String url;
private final ObjectMapper objectMapper;
public HttpClient(String url) {
this.url = url;
this.objectMapper = new ObjectMapper();
this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
private String urlWithParams(String url, Map<String, String> postParams) throws UnsupportedEncodingException {
final StringBuilder urlBuilder = new StringBuilder();
urlBuilder.append(url);
boolean isFirst = true;
if ( postParams != null ) {
for ( String k: postParams.keySet() ) {
urlBuilder.append(isFirst ? "?" : "&");
isFirst = false;
urlBuilder.append(URLEncoder.encode(k, "UTF-8"));
urlBuilder.append("=");
String v = postParams.get(k);
urlBuilder.append(URLEncoder.encode(v, "UTF-8"));
}
}
return urlBuilder.toString();
}
public <T> Response<T> get(Class<T> clazz, Map<String, String> params) {
return fetchResponse(clazz, null, params, "GET");
}
public <T> Response<T> post(Class<T> clazz, Map<String, String> params) {
return fetchResponse(clazz, null, params, "POST");
}
public <T> Response<T> put(Class<T> clazz, Map<String, String> params) {
return fetchResponse(clazz, null, params, "PUT");
}
public <T> Response<T> get(Class<T> clazz, String path, Map<String, String> params) {
return fetchResponse(clazz, path, params, "GET");
}
public <T> Response<T> post(Class<T> clazz, String path, Map<String, String> params, String payload) {
return fetchResponse(clazz, path, params, "POST", payload);
}
public <T> Response<T> post(Class<T> clazz, String path, Map<String, String> params, Object payload) {
if(payload instanceof String) {
return fetchResponse(clazz, path, params, "POST", (String)payload);
}
try {
String payloadString = objectMapper.writeValueAsString(payload);
return fetchResponse(clazz, path, params, "POST", payloadString, "application/json;charset=UTF-8");
} catch (JsonProcessingException e) {
return new Response<T>(Error.serlizerError(e));
}
}
public <T> Response<T> put(Class<T> clazz, String path, Map<String, String> params) {
return fetchResponse(clazz, path, params, "PUT");
}
public <T> Response<T> delete(Class<T> clazz, String path, Map<String, String> params) {
return fetchResponse(clazz, path, params, "DELETE");
}
public <T> Response<T> fetchResponse(Class<T> clazz, String path, Map<String, String> params, String httpMethod) {
return fetchResponse(clazz, path, params, httpMethod, null);
}
public <T> Response<T> fetchResponse(Class<T> clazz, String path, Map<String, String> params, String httpMethod, String payload) {
return fetchResponse(clazz, path, params, httpMethod, payload, "application/x-www-form-urlencoded;charset=UTF-8");
}
public <T> Response<T> fetchResponse(Class<T> clazz, String path, Map<String, String> params, String httpMethod, String payload, String payloadContentType) {
try {
String fullUrl = urlWithParams(path != null ? url + path : url, params);
L.d("[" + httpMethod + "] " + fullUrl);
final URLConnection connection = new URL(fullUrl).openConnection();
if ( connection instanceof HttpURLConnection) {
final HttpURLConnection httpConnection = (HttpURLConnection)connection;
httpConnection.setDoInput(true);
if ( httpMethod != null ) {
httpConnection.setRequestMethod(httpMethod);
if(httpMethod.equals("POST")) {
connection.setDoOutput(true); // Triggers POST.
connection.setRequestProperty("Accept-Charset", "UTF-8");
connection.setRequestProperty("Content-Type", payloadContentType);
}
} else {
httpConnection.setRequestMethod(params != null ? "POST" : "GET");
}
httpConnection.addRequestProperty("Accept", "application/json");
httpConnection.connect();
if(payload != null) {
OutputStream outputStream = httpConnection.getOutputStream();
try {
if(LOG_RESPONSE) {
L.d("[payload] " + payload);
}
OutputStreamWriter writer = new OutputStreamWriter(outputStream, "UTF-8");
writer.write(payload);
writer.close();
} finally {
outputStream.close();
}
}
InputStream input = null;
try {
input = connection.getInputStream();
} catch ( IOException e ) {
// workaround for Android HttpURLConnection ( IOException is thrown for 40x error codes ).
final int statusCode = httpConnection.getResponseCode();
if ( statusCode == -1 ) throw e;
return new Response<T>(Error.httpError(httpConnection.getResponseCode()) );
}
final int statusCode = httpConnection.getResponseCode();
L.d("statusCode " + statusCode);
if ( statusCode == HttpURLConnection.HTTP_OK ||
statusCode == HttpURLConnection.HTTP_CREATED ) {
try {
T value = null;
if ( clazz != Void.class ) {
if(LOG_RESPONSE || clazz == String.class) {
StringBuilder sb=new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(input));
String read = br.readLine();
while(read != null) {
sb.append(read);
read = br.readLine();
}
String response = sb.toString();
if( LOG_RESPONSE ) {
L.d("response " + response);
}
if( clazz == String.class ) {
value = (T) response;
} else {
value = (T) objectMapper.readValue(response, clazz);
}
}
else {
value = (T) objectMapper.readValue(input, clazz);
}
}
return new Response<T>(value);
} catch (JsonMappingException e ) {
return new Response<T>(Error.serlizerError(e));
} catch (JsonParseException e) {
return new Response<T>(Error.serlizerError(e));
}
} else if(statusCode == HttpURLConnection.HTTP_NO_CONTENT) {
try {
T def = clazz.newInstance();
if(LOG_RESPONSE) {
L.d("statusCode == HttpURLConnection.HTTP_NO_CONTENT");
}
return new Response<T>(def);
} catch (InstantiationException e) {
return new Response<T>(Error.ioError(e));
} catch (IllegalAccessException e) {
return new Response<T>(Error.ioError(e));
}
} else {
if(LOG_RESPONSE) {
L.d("error, statusCode " + statusCode);
}
return new Response<T>(Error.httpError(statusCode));
}
}
return new Response<T>(Error.ioError(new Exception("Url is not a http link")));
} catch ( IOException e ) {
if(LOG_RESPONSE) {
L.d("error, ioError " + e);
}
return new Response<T>(Error.ioError(e));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.rest;
import java.io.IOException;
import java.util.*;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.commons.lang3.StringUtils;
import org.apache.zeppelin.annotation.ZeppelinApi;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.notebook.Note;
import org.apache.zeppelin.notebook.Notebook;
import org.apache.zeppelin.notebook.NotebookAuthorization;
import org.apache.zeppelin.notebook.Paragraph;
import org.apache.zeppelin.rest.exception.BadRequestException;
import org.apache.zeppelin.rest.exception.NotFoundException;
import org.apache.zeppelin.rest.exception.ForbiddenException;
import org.apache.zeppelin.rest.message.CronRequest;
import org.apache.zeppelin.rest.message.NewNoteRequest;
import org.apache.zeppelin.rest.message.NewParagraphRequest;
import org.apache.zeppelin.rest.message.RunParagraphWithParametersRequest;
import org.apache.zeppelin.search.SearchService;
import org.apache.zeppelin.server.JsonResponse;
import org.apache.zeppelin.socket.NotebookServer;
import org.apache.zeppelin.types.InterpreterSettingsList;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.apache.zeppelin.utils.InterpreterBindingUtils;
import org.apache.zeppelin.utils.SecurityUtils;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
/**
* Rest api endpoint for the notebook.
*/
@Path("/notebook")
@Produces("application/json")
public class NotebookRestApi {
private static final Logger LOG = LoggerFactory.getLogger(NotebookRestApi.class);
Gson gson = new Gson();
private Notebook notebook;
private NotebookServer notebookServer;
private SearchService noteSearchService;
private NotebookAuthorization notebookAuthorization;
public NotebookRestApi() {
}
public NotebookRestApi(Notebook notebook, NotebookServer notebookServer, SearchService search) {
this.notebook = notebook;
this.notebookServer = notebookServer;
this.noteSearchService = search;
this.notebookAuthorization = notebook.getNotebookAuthorization();
}
/**
* get note authorization information
*/
@GET
@Path("{noteId}/permissions")
@ZeppelinApi
public Response getNotePermissions(@PathParam("noteId") String noteId) throws IOException {
checkIfUserIsAnon(getBlockNotAuthenticatedUserErrorMsg());
checkIfUserCanRead(noteId,
"Insufficient privileges you cannot get the list of permissions for this note");
HashMap<String, Set<String>> permissionsMap = new HashMap<>();
permissionsMap.put("owners", notebookAuthorization.getOwners(noteId));
permissionsMap.put("readers", notebookAuthorization.getReaders(noteId));
permissionsMap.put("writers", notebookAuthorization.getWriters(noteId));
return new JsonResponse<>(Status.OK, "", permissionsMap).build();
}
private String ownerPermissionError(Set<String> current, Set<String> allowed) throws IOException {
LOG.info("Cannot change permissions. Connection owners {}. Allowed owners {}",
current.toString(), allowed.toString());
return "Insufficient privileges to change permissions.\n\n" +
"Allowed owners: " + allowed.toString() + "\n\n" +
"User belongs to: " + current.toString();
}
private String getBlockNotAuthenticatedUserErrorMsg() throws IOException {
return "Only authenticated user can set the permission.";
}
/**
* Set of utils method to check if current user can perform action to the note.
* Since we only have security on notebook level, from now we keep this logic in this class.
* In the future we might want to generalize this for the rest of the api enmdpoints.
*/
/**
* Check if the current user is not authenticated(anonymous user) or not
*/
private void checkIfUserIsAnon(String errorMsg) {
boolean isAuthenticated = SecurityUtils.isAuthenticated();
if (isAuthenticated && SecurityUtils.getPrincipal().equals("anonymous")) {
LOG.info("Anonymous user cannot set any permissions for this note.");
throw new ForbiddenException(errorMsg);
}
}
/**
* Check if the current user own the given note.
*/
private void checkIfUserIsOwner(String noteId, String errorMsg) {
Set<String> userAndRoles = Sets.newHashSet();
userAndRoles.add(SecurityUtils.getPrincipal());
userAndRoles.addAll(SecurityUtils.getRoles());
if (!notebookAuthorization.isOwner(userAndRoles, noteId)) {
throw new ForbiddenException(errorMsg);
}
}
/**
* Check if the current user is either Owner or Writer for the given note.
*/
private void checkIfUserCanWrite(String noteId, String errorMsg) {
Set<String> userAndRoles = Sets.newHashSet();
userAndRoles.add(SecurityUtils.getPrincipal());
userAndRoles.addAll(SecurityUtils.getRoles());
if (!notebookAuthorization.hasWriteAuthorization(userAndRoles, noteId)) {
throw new ForbiddenException(errorMsg);
}
}
/**
* Check if the current user can access (at least he have to be reader) the given note.
*/
private void checkIfUserCanRead(String noteId, String errorMsg) {
Set<String> userAndRoles = Sets.newHashSet();
userAndRoles.add(SecurityUtils.getPrincipal());
userAndRoles.addAll(SecurityUtils.getRoles());
if (!notebookAuthorization.hasReadAuthorization(userAndRoles, noteId)) {
throw new ForbiddenException(errorMsg);
}
}
private void checkIfNoteIsNotNull(Note note) {
if (note == null) {
throw new NotFoundException("note not found");
}
}
private void checkIfParagraphIsNotNull(Paragraph paragraph) {
if (paragraph == null) {
throw new NotFoundException("paragraph not found");
}
}
/**
* set note authorization information
*/
@PUT
@Path("{noteId}/permissions")
@ZeppelinApi
public Response putNotePermissions(@PathParam("noteId") String noteId, String req)
throws IOException {
String principal = SecurityUtils.getPrincipal();
HashSet<String> roles = SecurityUtils.getRoles();
HashSet<String> userAndRoles = new HashSet<>();
userAndRoles.add(principal);
userAndRoles.addAll(roles);
checkIfUserIsAnon(getBlockNotAuthenticatedUserErrorMsg());
checkIfUserIsOwner(noteId,
ownerPermissionError(userAndRoles, notebookAuthorization.getOwners(noteId)));
HashMap<String, HashSet<String>> permMap =
gson.fromJson(req, new TypeToken<HashMap<String, HashSet<String>>>() {}.getType());
Note note = notebook.getNote(noteId);
LOG.info("Set permissions {} {} {} {} {}", noteId, principal, permMap.get("owners"),
permMap.get("readers"), permMap.get("writers"));
HashSet<String> readers = permMap.get("readers");
HashSet<String> owners = permMap.get("owners");
HashSet<String> writers = permMap.get("writers");
// Set readers, if writers and owners is empty -> set to user requesting the change
if (readers != null && !readers.isEmpty()) {
if (owners.isEmpty()) {
owners = Sets.newHashSet(SecurityUtils.getPrincipal());
}
}
// Set writers, if owners is empty -> set to user requesting the change
if (writers != null && !writers.isEmpty()) {
if (owners.isEmpty()) {
owners = Sets.newHashSet(SecurityUtils.getPrincipal());
}
}
notebookAuthorization.setReaders(noteId, readers);
notebookAuthorization.setWriters(noteId, writers);
notebookAuthorization.setOwners(noteId, owners);
LOG.debug("After set permissions {} {} {}", notebookAuthorization.getOwners(noteId),
notebookAuthorization.getReaders(noteId), notebookAuthorization.getWriters(noteId));
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
note.persist(subject);
notebookServer.broadcastNote(note);
notebookServer.broadcastNoteList(subject, userAndRoles);
return new JsonResponse<>(Status.OK).build();
}
/**
* bind a setting to note
*
* @throws IOException
*/
@PUT
@Path("interpreter/bind/{noteId}")
@ZeppelinApi
public Response bind(@PathParam("noteId") String noteId, String req) throws IOException {
checkIfUserCanWrite(noteId,
"Insufficient privileges you cannot bind any interpreters to this note");
List<String> settingIdList = gson.fromJson(req, new TypeToken<List<String>>() {
}.getType());
notebook.bindInterpretersToNote(SecurityUtils.getPrincipal(), noteId, settingIdList);
return new JsonResponse<>(Status.OK).build();
}
/**
* list bound setting
*/
@GET
@Path("interpreter/bind/{noteId}")
@ZeppelinApi
public Response bind(@PathParam("noteId") String noteId) {
checkIfUserCanRead(noteId, "Insufficient privileges you cannot get any interpreters settings");
List<InterpreterSettingsList> settingList =
InterpreterBindingUtils.getInterpreterBindings(notebook, noteId);
notebookServer.broadcastInterpreterBindings(noteId, settingList);
return new JsonResponse<>(Status.OK, "", settingList).build();
}
@GET
@Path("/")
@ZeppelinApi
public Response getNoteList() throws IOException {
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
HashSet<String> userAndRoles = SecurityUtils.getRoles();
userAndRoles.add(subject.getUser());
List<Map<String, String>> notesInfo = notebookServer.generateNotesInfo(false, subject,
userAndRoles);
return new JsonResponse<>(Status.OK, "", notesInfo).build();
}
@GET
@Path("{noteId}")
@ZeppelinApi
public Response getNote(@PathParam("noteId") String noteId) throws IOException {
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanRead(noteId, "Insufficient privileges you cannot get this note");
return new JsonResponse<>(Status.OK, "", note).build();
}
/**
* export note REST API
*
* @param noteId ID of Note
* @return note JSON with status.OK
* @throws IOException
*/
@GET
@Path("export/{noteId}")
@ZeppelinApi
public Response exportNote(@PathParam("noteId") String noteId) throws IOException {
checkIfUserCanRead(noteId, "Insufficient privileges you cannot export this note");
String exportJson = notebook.exportNote(noteId);
return new JsonResponse<>(Status.OK, "", exportJson).build();
}
/**
* import new note REST API
*
* @param req - note Json
* @return JSON with new note ID
* @throws IOException
*/
@POST
@Path("import")
@ZeppelinApi
public Response importNote(String req) throws IOException {
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
Note newNote = notebook.importNote(req, null, subject);
return new JsonResponse<>(Status.OK, "", newNote.getId()).build();
}
/**
* Create new note REST API
*
* @param message - JSON with new note name
* @return JSON with new note ID
* @throws IOException
*/
@POST
@Path("/")
@ZeppelinApi
public Response createNote(String message) throws IOException {
String user = SecurityUtils.getPrincipal();
LOG.info("Create new note by JSON {}", message);
NewNoteRequest request = NewNoteRequest.fromJson(message);
AuthenticationInfo subject = new AuthenticationInfo(user);
Note note = notebook.createNote(subject);
if (request != null) {
List<NewParagraphRequest> initialParagraphs = request.getParagraphs();
if (initialParagraphs != null) {
for (NewParagraphRequest paragraphRequest : initialParagraphs) {
Paragraph p = note.addNewParagraph(subject);
initParagraph(p, paragraphRequest, user);
}
}
}
note.addNewParagraph(subject); // add one paragraph to the last
String noteName = request.getName();
if (noteName.isEmpty()) {
noteName = "Note " + note.getId();
}
note.setName(noteName);
note.persist(subject);
notebookServer.broadcastNote(note);
notebookServer.broadcastNoteList(subject, SecurityUtils.getRoles());
return new JsonResponse<>(Status.OK, "", note.getId()).build();
}
/**
* Delete note REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException
*/
@DELETE
@Path("{noteId}")
@ZeppelinApi
public Response deleteNote(@PathParam("noteId") String noteId) throws IOException {
LOG.info("Delete note {} ", noteId);
checkIfUserIsOwner(noteId, "Insufficient privileges you cannot delete this note");
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
if (!(noteId.isEmpty())) {
Note note = notebook.getNote(noteId);
if (note != null) {
notebook.removeNote(noteId, subject);
}
}
notebookServer.broadcastNoteList(subject, SecurityUtils.getRoles());
return new JsonResponse<>(Status.OK, "").build();
}
/**
* Clone note REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException, CloneNotSupportedException, IllegalArgumentException
*/
@POST
@Path("{noteId}")
@ZeppelinApi
public Response cloneNote(@PathParam("noteId") String noteId, String message)
throws IOException, CloneNotSupportedException, IllegalArgumentException {
LOG.info("clone note by JSON {}", message);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot clone this note");
NewNoteRequest request = NewNoteRequest.fromJson(message);
String newNoteName = null;
if (request != null) {
newNoteName = request.getName();
}
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
Note newNote = notebook.cloneNote(noteId, newNoteName, subject);
notebookServer.broadcastNote(newNote);
notebookServer.broadcastNoteList(subject, SecurityUtils.getRoles());
return new JsonResponse<>(Status.OK, "", newNote.getId()).build();
}
/**
* Insert paragraph REST API
*
* @param message - JSON containing paragraph's information
* @return JSON with status.OK
* @throws IOException
*/
@POST
@Path("{noteId}/paragraph")
@ZeppelinApi
public Response insertParagraph(@PathParam("noteId") String noteId, String message)
throws IOException {
String user = SecurityUtils.getPrincipal();
LOG.info("insert paragraph {} {}", noteId, message);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot add paragraph to this note");
NewParagraphRequest request = NewParagraphRequest.fromJson(message);
AuthenticationInfo subject = new AuthenticationInfo(user);
Paragraph p;
Double indexDouble = request.getIndex();
if (indexDouble == null) {
p = note.addNewParagraph(subject);
} else {
p = note.insertNewParagraph(indexDouble.intValue(), subject);
}
initParagraph(p, request, user);
note.persist(subject);
notebookServer.broadcastNote(note);
return new JsonResponse<>(Status.OK, "", p.getId()).build();
}
/**
* Get paragraph REST API
*
* @param noteId ID of Note
* @return JSON with information of the paragraph
* @throws IOException
*/
@GET
@Path("{noteId}/paragraph/{paragraphId}")
@ZeppelinApi
public Response getParagraph(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId) throws IOException {
LOG.info("get paragraph {} {}", noteId, paragraphId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanRead(noteId, "Insufficient privileges you cannot get this paragraph");
Paragraph p = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(p);
return new JsonResponse<>(Status.OK, "", p).build();
}
@PUT
@Path("{noteId}/paragraph/{paragraphId}/config")
@ZeppelinApi
public Response updateParagraphConfig(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId, String message) throws IOException {
String user = SecurityUtils.getPrincipal();
LOG.info("{} will update paragraph config {} {}", user, noteId, paragraphId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot update this paragraph config");
Paragraph p = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(p);
Map<String, Object> newConfig = gson.fromJson(message, HashMap.class);
configureParagraph(p, newConfig, user);
AuthenticationInfo subject = new AuthenticationInfo(user);
note.persist(subject);
return new JsonResponse<>(Status.OK, "", p).build();
}
/**
* Move paragraph REST API
*
* @param newIndex - new index to move
* @return JSON with status.OK
* @throws IOException
*/
@POST
@Path("{noteId}/paragraph/{paragraphId}/move/{newIndex}")
@ZeppelinApi
public Response moveParagraph(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId, @PathParam("newIndex") String newIndex)
throws IOException {
LOG.info("move paragraph {} {} {}", noteId, paragraphId, newIndex);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot move paragraph");
Paragraph p = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(p);
try {
note.moveParagraph(paragraphId, Integer.parseInt(newIndex), true);
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
note.persist(subject);
notebookServer.broadcastNote(note);
return new JsonResponse(Status.OK, "").build();
} catch (IndexOutOfBoundsException e) {
LOG.error("Exception in NotebookRestApi while moveParagraph ", e);
return new JsonResponse(Status.BAD_REQUEST, "paragraph's new index is out of bound").build();
}
}
/**
* Delete paragraph REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException
*/
@DELETE
@Path("{noteId}/paragraph/{paragraphId}")
@ZeppelinApi
public Response deleteParagraph(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId) throws IOException {
LOG.info("delete paragraph {} {}", noteId, paragraphId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanRead(noteId,
"Insufficient privileges you cannot remove paragraph from this note");
Paragraph p = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(p);
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
note.removeParagraph(SecurityUtils.getPrincipal(), paragraphId);
note.persist(subject);
notebookServer.broadcastNote(note);
return new JsonResponse(Status.OK, "").build();
}
/**
* Clear result of all paragraphs REST API
*
* @param noteId ID of Note
* @return JSON with status.ok
*/
@PUT
@Path("{noteId}/clear")
@ZeppelinApi
public Response clearAllParagraphOutput(@PathParam("noteId") String noteId)
throws IOException {
LOG.info("clear all paragraph output of note {}", noteId);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot clear this note");
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
note.clearAllParagraphOutput();
return new JsonResponse(Status.OK, "").build();
}
/**
* Run note jobs REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@POST
@Path("job/{noteId}")
@ZeppelinApi
public Response runNoteJobs(@PathParam("noteId") String noteId)
throws IOException, IllegalArgumentException {
LOG.info("run note jobs {} ", noteId);
Note note = notebook.getNote(noteId);
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot run job for this note");
try {
note.runAll(subject);
} catch (Exception ex) {
LOG.error("Exception from run", ex);
return new JsonResponse<>(Status.PRECONDITION_FAILED,
ex.getMessage() + "- Not selected or Invalid Interpreter bind").build();
}
return new JsonResponse<>(Status.OK).build();
}
/**
* Stop(delete) note jobs REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@DELETE
@Path("job/{noteId}")
@ZeppelinApi
public Response stopNoteJobs(@PathParam("noteId") String noteId)
throws IOException, IllegalArgumentException {
LOG.info("stop note jobs {} ", noteId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot stop this job for this note");
for (Paragraph p : note.getParagraphs()) {
if (!p.isTerminated()) {
p.abort();
}
}
return new JsonResponse<>(Status.OK).build();
}
/**
* Get note job status REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@GET
@Path("job/{noteId}")
@ZeppelinApi
public Response getNoteJobStatus(@PathParam("noteId") String noteId)
throws IOException, IllegalArgumentException {
LOG.info("get note job status.");
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanRead(noteId, "Insufficient privileges you cannot get job status");
return new JsonResponse<>(Status.OK, null, note.generateParagraphsInfo()).build();
}
/**
* Get note paragraph job status REST API
*
* @param noteId ID of Note
* @param paragraphId ID of Paragraph
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@GET
@Path("job/{noteId}/{paragraphId}")
@ZeppelinApi
public Response getNoteParagraphJobStatus(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId)
throws IOException, IllegalArgumentException {
LOG.info("get note paragraph job status.");
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanRead(noteId, "Insufficient privileges you cannot get job status");
Paragraph paragraph = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(paragraph);
return new JsonResponse<>(Status.OK, null, note.generateSingleParagraphInfo(paragraphId)).
build();
}
/**
* Run asynchronously paragraph job REST API
*
* @param message - JSON with params if user wants to update dynamic form's value
* null, empty string, empty json if user doesn't want to update
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@POST
@Path("job/{noteId}/{paragraphId}")
@ZeppelinApi
public Response runParagraph(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId, String message)
throws IOException, IllegalArgumentException {
LOG.info("run paragraph job asynchronously {} {} {}", noteId, paragraphId, message);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot run job for this note");
Paragraph paragraph = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(paragraph);
// handle params if presented
handleParagraphParams(message, note, paragraph);
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
paragraph.setAuthenticationInfo(subject);
note.persist(subject);
note.run(paragraph.getId());
return new JsonResponse<>(Status.OK).build();
}
/**
* Run synchronously a paragraph REST API
*
* @param noteId - noteId
* @param paragraphId - paragraphId
* @param message - JSON with params if user wants to update dynamic form's value
* null, empty string, empty json if user doesn't want to update
*
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@POST
@Path("run/{noteId}/{paragraphId}")
@ZeppelinApi
public Response runParagraphSynchronously(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId,
String message) throws
IOException, IllegalArgumentException {
LOG.info("run paragraph synchronously {} {} {}", noteId, paragraphId, message);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot run paragraph");
Paragraph paragraph = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(paragraph);
// handle params if presented
handleParagraphParams(message, note, paragraph);
if (paragraph.getListener() == null) {
note.initializeJobListenerForParagraph(paragraph);
}
paragraph.run();
final InterpreterResult result = paragraph.getResult();
if (result.code() == InterpreterResult.Code.SUCCESS) {
return new JsonResponse<>(Status.OK, result).build();
} else {
return new JsonResponse<>(Status.INTERNAL_SERVER_ERROR, result).build();
}
}
/**
* Stop(delete) paragraph job REST API
*
* @param noteId ID of Note
* @param paragraphId ID of Paragraph
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@DELETE
@Path("job/{noteId}/{paragraphId}")
@ZeppelinApi
public Response stopParagraph(@PathParam("noteId") String noteId,
@PathParam("paragraphId") String paragraphId) throws IOException, IllegalArgumentException {
LOG.info("stop paragraph job {} ", noteId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot stop paragraph");
Paragraph p = note.getParagraph(paragraphId);
checkIfParagraphIsNotNull(p);
p.abort();
return new JsonResponse<>(Status.OK).build();
}
/**
* Register cron job REST API
*
* @param message - JSON with cron expressions.
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@POST
@Path("cron/{noteId}")
@ZeppelinApi
public Response registerCronJob(@PathParam("noteId") String noteId, String message)
throws IOException, IllegalArgumentException {
LOG.info("Register cron job note={} request cron msg={}", noteId, message);
CronRequest request = CronRequest.fromJson(message);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanWrite(noteId, "Insufficient privileges you cannot set a cron job for this note");
if (!CronExpression.isValidExpression(request.getCronString())) {
return new JsonResponse<>(Status.BAD_REQUEST, "wrong cron expressions.").build();
}
Map<String, Object> config = note.getConfig();
config.put("cron", request.getCronString());
note.setConfig(config);
notebook.refreshCron(note.getId());
return new JsonResponse<>(Status.OK).build();
}
/**
* Remove cron job REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@DELETE
@Path("cron/{noteId}")
@ZeppelinApi
public Response removeCronJob(@PathParam("noteId") String noteId)
throws IOException, IllegalArgumentException {
LOG.info("Remove cron job note {}", noteId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserIsOwner(noteId,
"Insufficient privileges you cannot remove this cron job from this note");
Map<String, Object> config = note.getConfig();
config.put("cron", null);
note.setConfig(config);
notebook.refreshCron(note.getId());
return new JsonResponse<>(Status.OK).build();
}
/**
* Get cron job REST API
*
* @param noteId ID of Note
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@GET
@Path("cron/{noteId}")
@ZeppelinApi
public Response getCronJob(@PathParam("noteId") String noteId)
throws IOException, IllegalArgumentException {
LOG.info("Get cron job note {}", noteId);
Note note = notebook.getNote(noteId);
checkIfNoteIsNotNull(note);
checkIfUserCanRead(noteId, "Insufficient privileges you cannot get cron information");
return new JsonResponse<>(Status.OK, note.getConfig().get("cron")).build();
}
/**
* Get note jobs for job manager
*
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@GET
@Path("jobmanager/")
@ZeppelinApi
public Response getJobListforNote() throws IOException, IllegalArgumentException {
LOG.info("Get note jobs for job manager");
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
List<Map<String, Object>> noteJobs = notebook
.getJobListByUnixTime(false, 0, subject);
Map<String, Object> response = new HashMap<>();
response.put("lastResponseUnixTime", System.currentTimeMillis());
response.put("jobs", noteJobs);
return new JsonResponse<>(Status.OK, response).build();
}
/**
* Get updated note jobs for job manager
*
* Return the `Note` change information within the post unix timestamp.
*
* @return JSON with status.OK
* @throws IOException, IllegalArgumentException
*/
@GET
@Path("jobmanager/{lastUpdateUnixtime}/")
@ZeppelinApi
public Response getUpdatedJobListforNote(
@PathParam("lastUpdateUnixtime") long lastUpdateUnixTime)
throws IOException, IllegalArgumentException {
LOG.info("Get updated note jobs lastUpdateTime {}", lastUpdateUnixTime);
List<Map<String, Object>> noteJobs;
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
noteJobs = notebook.getJobListByUnixTime(false, lastUpdateUnixTime, subject);
Map<String, Object> response = new HashMap<>();
response.put("lastResponseUnixTime", System.currentTimeMillis());
response.put("jobs", noteJobs);
return new JsonResponse<>(Status.OK, response).build();
}
/**
* Search for a Notes with permissions
*/
@GET
@Path("search")
@ZeppelinApi
public Response search(@QueryParam("q") String queryTerm) {
LOG.info("Searching notes for: {}", queryTerm);
String principal = SecurityUtils.getPrincipal();
HashSet<String> roles = SecurityUtils.getRoles();
HashSet<String> userAndRoles = new HashSet<>();
userAndRoles.add(principal);
userAndRoles.addAll(roles);
List<Map<String, String>> notesFound = noteSearchService.query(queryTerm);
for (int i = 0; i < notesFound.size(); i++) {
String[] Id = notesFound.get(i).get("id").split("/", 2);
String noteId = Id[0];
if (!notebookAuthorization.isOwner(noteId, userAndRoles) &&
!notebookAuthorization.isReader(noteId, userAndRoles) &&
!notebookAuthorization.isWriter(noteId, userAndRoles)) {
notesFound.remove(i);
i--;
}
}
LOG.info("{} notes found", notesFound.size());
return new JsonResponse<>(Status.OK, notesFound).build();
}
private void handleParagraphParams(String message, Note note, Paragraph paragraph)
throws IOException {
// handle params if presented
if (!StringUtils.isEmpty(message)) {
RunParagraphWithParametersRequest request =
RunParagraphWithParametersRequest.fromJson(message);
Map<String, Object> paramsForUpdating = request.getParams();
if (paramsForUpdating != null) {
paragraph.settings.getParams().putAll(paramsForUpdating);
AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
note.persist(subject);
}
}
}
private void initParagraph(Paragraph p, NewParagraphRequest request, String user)
throws IOException {
LOG.info("Init Paragraph for user {}", user);
checkIfParagraphIsNotNull(p);
p.setTitle(request.getTitle());
p.setText(request.getText());
Map< String, Object > config = request.getConfig();
if ( config != null && !config.isEmpty()) {
configureParagraph(p, config, user);
}
}
private void configureParagraph(Paragraph p, Map< String, Object> newConfig, String user)
throws IOException {
LOG.info("Configure Paragraph for user {}", user);
if (newConfig == null || newConfig.isEmpty()) {
LOG.warn("{} is trying to update paragraph {} of note {} with empty config",
user, p.getId(), p.getNote().getId());
throw new BadRequestException("paragraph config cannot be empty");
}
Map<String, Object> origConfig = p.getConfig();
for ( final Map.Entry<String, Object> entry : newConfig.entrySet()){
origConfig.put(entry.getKey(), entry.getValue());
}
p.setConfig(origConfig);
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.api.response;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cloudstack.acl.ControlledEntity;
import com.cloud.api.ApiDBUtils;
import com.cloud.network.security.SecurityGroup;
import com.cloud.network.security.SecurityGroupRules;
import com.cloud.serializer.Param;
import com.cloud.user.Account;
import org.apache.cloudstack.api.InternalIdentity;
public class SecurityGroupResultObject implements ControlledEntity, InternalIdentity {
@Param(name = "id")
private Long id;
@Param(name = "name")
private String name;
@Param(name = "description")
private String description;
@Param(name = "domainid")
private long domainId;
@Param(name = "accountid")
private long accountId;
@Param(name = "accountname")
private String accountName = null;
@Param(name = "securitygrouprules")
private List<SecurityGroupRuleResultObject> securityGroupRules = null;
public SecurityGroupResultObject() {
}
public SecurityGroupResultObject(Long id, String name, String description, long domainId, long accountId, String accountName, List<SecurityGroupRuleResultObject> ingressRules) {
this.id = id;
this.name = name;
this.description = description;
this.domainId = domainId;
this.accountId = accountId;
this.accountName = accountName;
this.securityGroupRules = ingressRules;
}
public long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public long getDomainId() {
return domainId;
}
public void setDomainId(Long domainId) {
this.domainId = domainId;
}
public long getAccountId() {
return accountId;
}
public void setAccountId(Long accountId) {
this.accountId = accountId;
}
public String getAccountName() {
return accountName;
}
public void setAccountName(String accountName) {
this.accountName = accountName;
}
public List<SecurityGroupRuleResultObject> getSecurityGroupRules() {
return securityGroupRules;
}
public void setSecurityGroupRules(List<SecurityGroupRuleResultObject> securityGroupRules) {
this.securityGroupRules = securityGroupRules;
}
public static List<SecurityGroupResultObject> transposeNetworkGroups(List<? extends SecurityGroupRules> groups) {
List<SecurityGroupResultObject> resultObjects = new ArrayList<SecurityGroupResultObject>();
Map<Long, SecurityGroup> allowedSecurityGroups = new HashMap<Long, SecurityGroup>();
Map<Long, Account> accounts = new HashMap<Long, Account>();
if ((groups != null) && !groups.isEmpty()) {
List<SecurityGroupRuleResultObject> securityGroupRuleDataList = new ArrayList<SecurityGroupRuleResultObject>();
SecurityGroupResultObject currentGroup = null;
List<Long> processedGroups = new ArrayList<Long>();
for (SecurityGroupRules netGroupRule : groups) {
Long groupId = netGroupRule.getId();
if (!processedGroups.contains(groupId)) {
processedGroups.add(groupId);
if (currentGroup != null) {
if (!securityGroupRuleDataList.isEmpty()) {
currentGroup.setSecurityGroupRules(securityGroupRuleDataList);
securityGroupRuleDataList = new ArrayList<SecurityGroupRuleResultObject>();
}
resultObjects.add(currentGroup);
}
// start a new group
SecurityGroupResultObject groupResult = new SecurityGroupResultObject();
groupResult.setId(netGroupRule.getId());
groupResult.setName(netGroupRule.getName());
groupResult.setDescription(netGroupRule.getDescription());
groupResult.setDomainId(netGroupRule.getDomainId());
Account account = accounts.get(netGroupRule.getAccountId());
if (account == null) {
account = ApiDBUtils.findAccountById(netGroupRule.getAccountId());
accounts.put(account.getId(), account);
}
groupResult.setAccountId(account.getId());
groupResult.setAccountName(account.getAccountName());
currentGroup = groupResult;
}
if (netGroupRule.getRuleId() != null) {
// there's at least one securitygroup rule for this network group, add the securitygroup rule data
SecurityGroupRuleResultObject securityGroupRuleData = new SecurityGroupRuleResultObject();
securityGroupRuleData.setEndPort(netGroupRule.getEndPort());
securityGroupRuleData.setStartPort(netGroupRule.getStartPort());
securityGroupRuleData.setId(netGroupRule.getRuleId());
securityGroupRuleData.setProtocol(netGroupRule.getProtocol());
securityGroupRuleData.setRuleType(netGroupRule.getRuleType());
Long allowedSecurityGroupId = netGroupRule.getAllowedNetworkId();
if (allowedSecurityGroupId != null) {
SecurityGroup allowedSecurityGroup = allowedSecurityGroups.get(allowedSecurityGroupId);
if (allowedSecurityGroup == null) {
allowedSecurityGroup = ApiDBUtils.findSecurityGroupById(allowedSecurityGroupId);
allowedSecurityGroups.put(allowedSecurityGroupId, allowedSecurityGroup);
}
securityGroupRuleData.setAllowedSecurityGroup(allowedSecurityGroup.getName());
Account allowedAccount = accounts.get(allowedSecurityGroup.getAccountId());
if (allowedAccount == null) {
allowedAccount = ApiDBUtils.findAccountById(allowedSecurityGroup.getAccountId());
accounts.put(allowedAccount.getId(), allowedAccount);
}
securityGroupRuleData.setAllowedSecGroupAcct(allowedAccount.getAccountName());
} else if (netGroupRule.getAllowedSourceIpCidr() != null) {
securityGroupRuleData.setAllowedSourceIpCidr(netGroupRule.getAllowedSourceIpCidr());
}
securityGroupRuleDataList.add(securityGroupRuleData);
}
}
// all rules have been processed, add the final data into the list
if (currentGroup != null) {
if (!securityGroupRuleDataList.isEmpty()) {
currentGroup.setSecurityGroupRules(securityGroupRuleDataList);
}
resultObjects.add(currentGroup);
}
}
return resultObjects;
}
}
| |
package application;
import javafx.application.Application;
import javafx.geometry.Pos;
import javafx.stage.Stage;
import javafx.scene.Scene;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.paint.Color;
public class Main extends Application {
@Override
public void start(Stage primaryStage) {
try {
BorderPane root = new BorderPane();
Scene scene = new Scene(root, 1200, 700);
primaryStage.setTitle("Quadratic Function Drawer");
// Add Elements to scene ================
HBox hbox1 = new HBox(8);
hbox1.setId("hbox1");
hbox1.setMinHeight(50);
Label labelA = new Label("Parameter a:");
labelA.setId("label");
Label labelB = new Label("Parameter b:");
labelB.setId("label");
Label labelC = new Label("Parameter c:");
labelC.setId("label");
TextField txtA = new TextField();
txtA.setId("txt");
TextField txtB = new TextField();
txtB.setId("txt");
TextField txtC = new TextField();
txtC.setId("txt");
Button but1 = new Button();
but1.setId("button");
but1.setText("Draw!");
but1.setMinWidth(90);
Button but2 = new Button();
but2.setId("button");
but2.setText("Clear");
but2.setMinWidth(90);
hbox1.getChildren().add(labelA);
hbox1.getChildren().add(txtA);
hbox1.getChildren().add(labelB);
hbox1.getChildren().add(txtB);
hbox1.getChildren().add(labelC);
hbox1.getChildren().add(txtC);
hbox1.getChildren().add(but1);
hbox1.getChildren().add(but2);
hbox1.setAlignment(Pos.CENTER);
root.setTop(hbox1);
// Canvas ================
Canvas can = new Canvas(1196, 645);
can.setId("can");
// Drawing on Canvas ======
GraphicsContext gc = can.getGraphicsContext2D();
// Main Y line ============
gc.setStroke(Color.GOLD);
gc.setLineWidth(3);
gc.strokeLine(can.getWidth() / 2, 0, can.getWidth() / 2, can.getHeight());
// Draw Y arrow ===========
gc.strokeLine(can.getWidth() / 2, 0, can.getWidth() / 2 - 10, 10);
gc.strokeLine(can.getWidth() / 2, 0, can.getWidth() / 2 + 10, 10);
gc.fillText("Y", can.getWidth() / 2 + 20, 15);
// Main X line ============
gc.setStroke(Color.GOLD);
gc.setLineWidth(3);
gc.strokeLine(0, can.getHeight() / 2, can.getWidth(), can.getHeight() / 2);
// Draw X arrow ============
gc.strokeLine(can.getWidth(), can.getHeight() / 2, can.getWidth() - 10, can.getHeight() / 2 - 10); // X
gc.strokeLine(can.getWidth(), can.getHeight() / 2, can.getWidth() - 10, can.getHeight() / 2 + 10);
gc.fillText("X", can.getWidth() - 15, can.getHeight() / 2 - 15);
for (int i = 0; i < can.getWidth(); i += 100) {
// X lines ============
gc.setStroke(Color.BISQUE);
gc.setLineWidth(1);
gc.strokeLine(0 + i, 0, 0 + i, can.getHeight());
gc.fillText(String.valueOf((i / 100) - 6), 0 + i - 5, can.getHeight() / 2 - 5); // cyfry
}
for (int i = 23; i < can.getHeight(); i += 100) {
// Y lines ============
gc.setStroke(Color.BISQUE);
gc.setLineWidth(1);
gc.strokeLine(0, 0 + i, can.getWidth(), 0 + i);
gc.fillText(String.valueOf((-i - 23) / 100 + 3), can.getWidth() / 2 + 5, 0 + i + 3); // cyfry
}
// Draw Button Event ==========
draw(txtA, txtB, txtC, but1, can, gc);
// Clear Button Event =========
clear(but2, can, gc);
// Add Canvas to scene ========
HBox hbox2 = new HBox(1);
hbox2.setId("hbox2");
hbox2.getChildren().add(can);
root.setCenter(hbox2);
scene.getStylesheets().add(getClass().getResource("application.css").toExternalForm());
primaryStage.setScene(scene);
primaryStage.setResizable(false);
primaryStage.show();
} catch (Exception e) {
e.printStackTrace();
}
}
public void clear(Button but2, Canvas can, GraphicsContext gc) {
but2.setOnAction(event -> {
// Clear Canvas
gc.clearRect(0, 0, can.getWidth(), can.getHeight());
// Draw grids
gc.setStroke(Color.GOLD);
gc.setLineWidth(3);
gc.strokeLine(can.getWidth() / 2, 0, can.getWidth() / 2, can.getHeight());
gc.strokeLine(0, can.getHeight() / 2, can.getWidth(), can.getHeight() / 2);
gc.strokeLine(can.getWidth() / 2, 0, can.getWidth() / 2 - 10, 10);
gc.strokeLine(can.getWidth() / 2, 0, can.getWidth() / 2 + 10, 10);
gc.fillText("Y", can.getWidth() / 2 + 20, 15);
gc.strokeLine(can.getWidth(), can.getHeight() / 2, can.getWidth() - 10, can.getHeight() / 2 - 10); // X
gc.strokeLine(can.getWidth(), can.getHeight() / 2, can.getWidth() - 10, can.getHeight() / 2 + 10);
gc.fillText("X", can.getWidth() - 15, can.getHeight() / 2 - 15);
gc.setStroke(Color.BISQUE);
gc.setLineWidth(1);
for (int i = 0; i < can.getWidth(); i += 100) {
gc.strokeLine(0 + i, 0, 0 + i, can.getHeight());
gc.fillText(String.valueOf((i / 100) - 6), 0 + i - 5, can.getHeight() / 2 - 5);
}
for (int i = 23; i < can.getHeight(); i += 100) {
gc.strokeLine(0, 0 + i, can.getWidth(), 0 + i);
gc.fillText(String.valueOf((-i - 23) / 100 + 3), can.getWidth() / 2 + 5, 0 + i + 3);
}
});
}
public void draw(TextField txtA, TextField txtB, TextField txtC, Button but1, Canvas can, GraphicsContext gc) {
but1.setOnAction(event -> {
String textA = txtA.getText();
double a = Double.parseDouble(textA);
String textB = txtB.getText();
double b = Double.parseDouble(textB);
String textC = txtC.getText();
double c = Double.parseDouble(textC);
for (int j = 0; j < 10000; j++) {
// Function - random positive X's and computing Y's
double x1 = Math.random() * 10;
double y1 = -((x1 * x1) * a + b * x1 + c);
double x = (x1 * 100) + can.getWidth() / 2;
double y = ((y1 * 100) + can.getHeight() / 2);
// Draw positive function points
gc.setStroke(Color.RED);
gc.setLineWidth(4);
gc.strokeLine(x, y, x, y);
// Function - random negative X's and computing Y's
double minx1 = -(Math.random() * 10);
double miny1 = -((minx1 * minx1) * a + b * minx1 + c);
double minx = (minx1 * 100) + can.getWidth() / 2;
double miny = ((miny1 * 100) + can.getHeight() / 2);
// Draw negative function points
gc.setStroke(Color.BLUE);
gc.setLineWidth(4);
gc.strokeLine(minx, miny, minx, miny);
}
});
}
public static void main(String[] args) {
launch(args);
}
}
| |
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.eventbus.EventBus;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Provider;
import java.applet.Applet;
import java.io.File;
import java.util.Locale;
import javax.inject.Singleton;
import joptsimple.ArgumentAcceptingOptionSpec;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.util.EnumConverter;
import lombok.extern.slf4j.Slf4j;
import net.runelite.api.Client;
import net.runelite.client.account.SessionManager;
import net.runelite.client.chat.ChatMessageManager;
import net.runelite.client.chat.CommandManager;
import net.runelite.client.config.ConfigManager;
import net.runelite.client.discord.DiscordService;
import net.runelite.client.game.ClanManager;
import net.runelite.client.game.ItemManager;
import net.runelite.client.menus.MenuManager;
import net.runelite.client.plugins.PluginManager;
import net.runelite.client.ui.ClientUI;
import net.runelite.client.ui.DrawManager;
import net.runelite.client.ui.TitleToolbar;
import net.runelite.client.ui.overlay.OverlayRenderer;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
@Singleton
@Slf4j
public class RuneLite
{
public static final File RUNELITE_DIR = new File(System.getProperty("user.home"), ".runelite");
public static final File PROFILES_DIR = new File(RUNELITE_DIR, "profiles");
public static final File SCREENSHOT_DIR = new File(RUNELITE_DIR, "screenshots");
private static final File LOGS_DIR = new File(RUNELITE_DIR, "logs");
private static final File LOGS_FILE_NAME = new File(LOGS_DIR, "application");
private static Injector injector;
private static OptionSet options;
@Inject
private PluginManager pluginManager;
@Inject
private MenuManager menuManager;
@Inject
private EventBus eventBus;
@Inject
private ConfigManager configManager;
@Inject
private ChatMessageManager chatMessageManager;
@Inject
private CommandManager commandManager;
@Inject
private OverlayRenderer overlayRenderer;
@Inject
private DrawManager drawManager;
@Inject
private SessionManager sessionManager;
@Inject
private DiscordService discordService;
@Inject
private ClientSessionManager clientSessionManager;
@Inject
private ClientUI clientUI;
@Inject
private TitleToolbar titleToolbar;
@Inject
private Provider<ItemManager> itemManager;
@Inject
private ClanManager clanManager;
Client client;
public static void main(String[] args) throws Exception
{
Locale.setDefault(Locale.ENGLISH);
OptionParser parser = new OptionParser();
parser.accepts("developer-mode", "Enable developer tools");
parser.accepts("debug", "Show extra debugging output");
ArgumentAcceptingOptionSpec<UpdateCheckMode> updateMode = parser.accepts("rs", "Select client type")
.withRequiredArg()
.ofType(UpdateCheckMode.class)
.defaultsTo(UpdateCheckMode.AUTO)
.withValuesConvertedBy(new EnumConverter<UpdateCheckMode>(UpdateCheckMode.class)
{
@Override
public UpdateCheckMode convert(String v)
{
return super.convert(v.toUpperCase());
}
});
parser.accepts("help", "Show this text").forHelp();
setOptions(parser.parse(args));
if (getOptions().has("help"))
{
parser.printHelpOn(System.out);
System.exit(0);
}
PROFILES_DIR.mkdirs();
// Setup logger
MDC.put("logFileName", LOGS_FILE_NAME.getAbsolutePath());
if (options.has("debug"))
{
final Logger logger = (Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
logger.setLevel(Level.DEBUG);
}
Thread.setDefaultUncaughtExceptionHandler((thread, throwable) ->
{
log.error("Uncaught exception:", throwable);
if (throwable instanceof AbstractMethodError)
{
log.error("Classes are out of date; Build with maven again.");
}
});
setInjector(Guice.createInjector(new RuneLiteModule()));
injector.getInstance(RuneLite.class).start(getOptions().valueOf(updateMode));
}
public void start(UpdateCheckMode updateMode) throws Exception
{
// Load RuneLite or Vanilla client
final Applet client = new ClientLoader().loadRs(updateMode);
final boolean isOutdated = !(client instanceof Client);
if (!isOutdated)
{
this.client = (Client) client;
}
// Initialize UI
clientUI.init(client);
// Initialize Discord service
discordService.init();
// Register event listeners
eventBus.register(clientUI);
eventBus.register(overlayRenderer);
eventBus.register(drawManager);
eventBus.register(menuManager);
eventBus.register(chatMessageManager);
eventBus.register(commandManager);
eventBus.register(pluginManager);
eventBus.register(clanManager);
if (this.client != null)
{
eventBus.register(itemManager.get());
}
// Load user configuration
configManager.load();
// Tell the plugin manager if client is outdated or not
pluginManager.setOutdated(isOutdated);
// Load the plugins, but does not start them yet.
// This will initialize configuration
pluginManager.loadCorePlugins();
// Plugins have provided their config, so set default config
// to main settings
pluginManager.loadDefaultPluginConfiguration();
// Start client session
clientSessionManager.start();
// Load the session, including saved configuration
sessionManager.loadSession();
// Start plugins
pluginManager.startCorePlugins();
// Refresh title toolbar
titleToolbar.refresh();
// Show UI after all plugins are loaded
clientUI.show();
}
public void shutdown()
{
clientSessionManager.shutdown();
discordService.close();
}
@VisibleForTesting
public void setClient(Client client)
{
this.client = client;
}
public static Injector getInjector()
{
return injector;
}
public static void setInjector(Injector injector)
{
RuneLite.injector = injector;
}
public static OptionSet getOptions()
{
return options;
}
public static void setOptions(OptionSet options)
{
RuneLite.options = options;
}
}
| |
//$Id: QueryCacheTest.java 10977 2006-12-12 23:28:04Z steve.ebersole@jboss.com $
package org.hibernate.test.querycache;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import junit.framework.Test;
import org.hibernate.Hibernate;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.testing.junit.functional.FunctionalTestCase;
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
import org.hibernate.stat.EntityStatistics;
import org.hibernate.stat.QueryStatistics;
import org.hibernate.transform.Transformers;
/**
* @author Gavin King
*/
public class QueryCacheTest extends FunctionalTestCase {
public QueryCacheTest(String str) {
super(str);
}
public String[] getMappings() {
return new String[] { "querycache/Item.hbm.xml" };
}
public void configure(Configuration cfg) {
super.configure( cfg );
cfg.setProperty( Environment.USE_QUERY_CACHE, "true" );
cfg.setProperty( Environment.CACHE_REGION_PREFIX, "foo" );
cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "true" );
cfg.setProperty( Environment.GENERATE_STATISTICS, "true" );
}
public static Test suite() {
return new FunctionalTestClassTestSuite( QueryCacheTest.class );
}
public void testInvalidationFromBulkHQL() {
// http://opensource.atlassian.com/projects/hibernate/browse/HHH-5426
getSessions().getCache().evictQueryRegions();
getSessions().getStatistics().clear();
Session s = openSession();
List list = new ArrayList();
s.beginTransaction();
for (int i = 0; i < 3; i++) {
Item a = new Item();
a.setName("a" + i);
a.setDescription("a" + i);
list.add(a);
s.persist(a);
}
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
String queryString = "select count(*) from Item";
// this query will hit the database and create the cache
Long result = (Long) s.createQuery(queryString).setCacheable(true).uniqueResult();
assertEquals(3, result.intValue());
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
String updateString = "delete from Item";
s.createQuery(updateString).executeUpdate();
s.getTransaction().commit();
s.close();
s = openSession();
s.beginTransaction();
// and this one SHOULD not be served by the cache
Number result2 = (Number) s.createQuery(queryString).setCacheable(true).uniqueResult();
assertEquals(0, result2.intValue());
s.getTransaction().commit();
s.close();
}
//https://jira.jboss.org/jira/browse/JBPAPP-4224
public void testHitCacheInSameSession() {
getSessions().evictQueries();
getSessions().getStatistics().clear();
Session s = openSession();
List list = new ArrayList();
s.beginTransaction();
for ( int i = 0; i < 3; i++ ) {
Item a = new Item();
a.setName( "a" + i );
a.setDescription( "a" + i );
list.add( a );
s.persist( a );
}
s.getTransaction().commit();
// s.close();
// s=openSession();
s.beginTransaction();
String queryString = "from Item";
// this query will hit the database and create the cache
s.createQuery( queryString ).setCacheable( true ).list();
s.getTransaction().commit();
s.beginTransaction();
//and this one SHOULD served by the cache
s.createQuery( queryString ).setCacheable( true ).list();
s.getTransaction().commit();
QueryStatistics qs = s.getSessionFactory().getStatistics().getQueryStatistics( queryString );
assertEquals( 1, qs.getCacheHitCount() );
assertEquals( 1, qs.getCachePutCount() );
s.close();
s = openSession();
s.beginTransaction();
for(Object obj:list){
s.delete( obj );
}
s.getTransaction().commit();
s.close();
}
public void testQueryCacheInvalidation() throws Exception {
getSessions().evictQueries();
getSessions().getStatistics().clear();
final String queryString = "from Item i where i.name='widget'";
Session s = openSession();
Transaction t = s.beginTransaction();
s.createQuery( queryString ).setCacheable(true).list();
Item i = new Item();
i.setName("widget");
i.setDescription("A really top-quality, full-featured widget.");
s.save(i);
t.commit();
s.close();
QueryStatistics qs = s.getSessionFactory().getStatistics().getQueryStatistics( queryString );
EntityStatistics es = s.getSessionFactory().getStatistics().getEntityStatistics( Item.class.getName() );
Thread.sleep(200);
s = openSession();
t = s.beginTransaction();
List result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 0 );
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 1 );
assertEquals( s.getSessionFactory().getStatistics().getEntityFetchCount(), 0 );
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
assertTrue( Hibernate.isInitialized( result.get(0) ) );
i = (Item) result.get(0);
i.setName("Widget");
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 2 );
assertEquals( qs.getCacheMissCount(), 2 );
assertEquals( s.getSessionFactory().getStatistics().getEntityFetchCount(), 0 );
Thread.sleep(200);
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
if ( dialectIsCaseSensitive("i.name='widget' should not match on case sensitive database.") ) {
assertEquals( result.size(), 0 );
}
i = (Item) s.get( Item.class, new Long(i.getId()) );
assertEquals( i.getName(), "Widget" );
s.delete(i);
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 2 );
assertEquals( qs.getCacheMissCount(), 3 );
assertEquals( qs.getCachePutCount(), 3 );
assertEquals( qs.getExecutionCount(), 3 );
assertEquals( es.getFetchCount(), 0 ); //check that it was being cached
}
public void testQueryCacheFetch() throws Exception {
getSessions().evictQueries();
getSessions().getStatistics().clear();
Session s = openSession();
Transaction t = s.beginTransaction();
Item i = new Item();
i.setName("widget");
i.setDescription("A really top-quality, full-featured widget.");
Item i2 = new Item();
i2.setName("other widget");
i2.setDescription("Another decent widget.");
s.persist(i);
s.persist(i2);
t.commit();
s.close();
final String queryString = "from Item i where i.name like '%widget'";
QueryStatistics qs = s.getSessionFactory().getStatistics().getQueryStatistics( queryString );
Thread.sleep(200);
s = openSession();
t = s.beginTransaction();
List result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 2 );
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 0 );
assertEquals( s.getSessionFactory().getStatistics().getEntityFetchCount(), 0 );
getSessions().evict(Item.class);
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 2 );
assertTrue( Hibernate.isInitialized( result.get(0) ) );
assertTrue( Hibernate.isInitialized( result.get(1) ) );
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 1 );
assertEquals( s.getSessionFactory().getStatistics().getEntityFetchCount(), 1 );
s = openSession();
t = s.beginTransaction();
s.createQuery("delete Item").executeUpdate();
t.commit();
s.close();
}
public void testProjectionCache() throws Exception {
getSessions().evictQueries();
getSessions().getStatistics().clear();
final String queryString = "select i.description as desc from Item i where i.name='widget'";
Session s = openSession();
Transaction t = s.beginTransaction();
s.createQuery( queryString ).setCacheable(true).list();
Item i = new Item();
i.setName("widget");
i.setDescription("A really top-quality, full-featured widget.");
s.save(i);
t.commit();
s.close();
QueryStatistics qs = s.getSessionFactory().getStatistics().getQueryStatistics( queryString );
EntityStatistics es = s.getSessionFactory().getStatistics().getEntityStatistics( Item.class.getName() );
assertEquals( qs.getCacheHitCount(), 0 );
assertEquals( qs.getCacheMissCount(), 1 );
assertEquals( qs.getCachePutCount(), 1 );
Thread.sleep(200);
s = openSession();
t = s.beginTransaction();
List result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
assertEquals( i.getDescription(), ( ( String ) result.get( 0 ) ) );
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 0 );
assertEquals( qs.getCacheMissCount(), 2 );
assertEquals( qs.getCachePutCount(), 2 );
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
assertEquals( i.getDescription(), result.get( 0 ) );
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 1 );
assertEquals( qs.getCacheMissCount(), 2 );
assertEquals( qs.getCachePutCount(), 2 );
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP).list();
assertEquals( result.size(), 1 );
Map m = (Map) result.get(0);
assertEquals( 1, m.size() );
assertEquals( i.getDescription(), m.get( "desc" ) );
t.commit();
s.close();
assertEquals( "hit count should go up since data is not transformed until after it is cached", qs.getCacheHitCount(), 2 );
assertEquals( qs.getCacheMissCount(), 2 );
assertEquals( qs.getCachePutCount(), 2 );
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).setResultTransformer(Transformers.ALIAS_TO_ENTITY_MAP).list();
assertEquals( result.size(), 1 );
m = (Map) result.get(0);
assertEquals(1, m.size());
assertEquals( i.getDescription(), m.get( "desc" ) );
t.commit();
s.close();
assertEquals( "hit count should go up since data is not transformed until after it is cachedr", qs.getCacheHitCount(), 3 );
assertEquals( qs.getCacheMissCount(), 2 );
assertEquals( qs.getCachePutCount(), 2 );
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
assertTrue( Hibernate.isInitialized( result.get(0) ) );
i = (Item) s.get( Item.class, new Long(i.getId()) );
i.setName("widget");
i.setDescription("A middle-quality widget.");
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 4 );
assertEquals( qs.getCacheMissCount(), 2 );
assertEquals( qs.getCachePutCount(), 2 );
Thread.sleep(200);
s = openSession();
t = s.beginTransaction();
result = s.createQuery( queryString ).setCacheable(true).list();
assertEquals( result.size(), 1 );
i = (Item) s.get( Item.class, new Long(i.getId()) );
assertEquals( (String) result.get(0), "A middle-quality widget." );
assertEquals( qs.getCacheHitCount(), 4 );
assertEquals( qs.getCacheMissCount(), 3 );
assertEquals( qs.getCachePutCount(), 3 );
s.delete(i);
t.commit();
s.close();
assertEquals( qs.getCacheHitCount(), 4 );
assertEquals( qs.getCacheMissCount(), 3 );
assertEquals( qs.getCachePutCount(), 3 );
assertEquals( qs.getExecutionCount(), 3 );
assertEquals( es.getFetchCount(), 0 ); //check that it was being cached
}
}
| |
/**
* Copyright (C) 2014-2016 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.common.utils;
import com.google.common.collect.ImmutableList;
import java.util.Collections;
import java.util.List;
import org.apache.helix.HelixManager;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.IdealState;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
/**
* Test for the service status.
*/
public class ServiceStatusTest {
private static final ServiceStatus.ServiceStatusCallback ALWAYS_GOOD = new ServiceStatus.ServiceStatusCallback() {
@Override
public ServiceStatus.Status getServiceStatus() {
return ServiceStatus.Status.GOOD;
}
};
private static final ServiceStatus.ServiceStatusCallback ALWAYS_STARTING = new ServiceStatus.ServiceStatusCallback() {
@Override
public ServiceStatus.Status getServiceStatus() {
return ServiceStatus.Status.STARTING;
}
};
private static final ServiceStatus.ServiceStatusCallback ALWAYS_BAD = new ServiceStatus.ServiceStatusCallback() {
@Override
public ServiceStatus.Status getServiceStatus() {
return ServiceStatus.Status.BAD;
}
};
public static final String TABLE_NAME = "myTable_OFFLINE";
public static final String INSTANCE_NAME = "Server_1.2.3.4_1234";
@Test
public void testMultipleServiceStatusCallback() {
// Only good should return good
ServiceStatus.MultipleCallbackServiceStatusCallback onlyGood = new ServiceStatus.MultipleCallbackServiceStatusCallback(
ImmutableList.of(ALWAYS_GOOD)
);
assertEquals(onlyGood.getServiceStatus(), ServiceStatus.Status.GOOD);
// Only bad should return bad
ServiceStatus.MultipleCallbackServiceStatusCallback onlyBad = new ServiceStatus.MultipleCallbackServiceStatusCallback(
ImmutableList.of(ALWAYS_BAD)
);
assertEquals(onlyBad.getServiceStatus(), ServiceStatus.Status.BAD);
// Only starting should return starting
ServiceStatus.MultipleCallbackServiceStatusCallback onlyStarting = new ServiceStatus.MultipleCallbackServiceStatusCallback(
ImmutableList.of(ALWAYS_STARTING)
);
assertEquals(onlyStarting.getServiceStatus(), ServiceStatus.Status.STARTING);
// Good + starting = starting
ServiceStatus.MultipleCallbackServiceStatusCallback goodAndStarting = new ServiceStatus.MultipleCallbackServiceStatusCallback(
ImmutableList.of(ALWAYS_GOOD, ALWAYS_STARTING)
);
assertEquals(goodAndStarting.getServiceStatus(), ServiceStatus.Status.STARTING);
// Good + starting + bad = starting (check for left-to-right evaluation)
ServiceStatus.MultipleCallbackServiceStatusCallback goodStartingAndBad = new ServiceStatus.MultipleCallbackServiceStatusCallback(
ImmutableList.of(ALWAYS_GOOD, ALWAYS_STARTING, ALWAYS_BAD)
);
assertEquals(goodStartingAndBad.getServiceStatus(), ServiceStatus.Status.STARTING);
}
@Test
public void testIdealStateMatch() {
TestIdealStateAndExternalViewMatchServiceStatusCallback callback;
// No ideal state = STARTING
callback = buildTestISEVCallback();
callback.setExternalView(new ExternalView(TABLE_NAME));
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.STARTING);
// No external view = STARTING
callback = buildTestISEVCallback();
callback.setIdealState(new IdealState(TABLE_NAME));
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.STARTING);
// Empty ideal state + empty external view = GOOD
callback = buildTestISEVCallback();
callback.setIdealState(new IdealState(TABLE_NAME));
callback.setExternalView(new ExternalView(TABLE_NAME));
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD);
// Once the status is GOOD, it should keep on reporting GOOD no matter what
callback.setIdealState(null);
callback.setExternalView(null);
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD);
// Non empty ideal state + empty external view = STARTING
callback = buildTestISEVCallback();
IdealState idealState = new IdealState(TABLE_NAME);
idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED);
idealState.setPartitionState("mySegment", INSTANCE_NAME, "ONLINE");
callback.setIdealState(idealState);
callback.setExternalView(new ExternalView(TABLE_NAME));
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.STARTING);
// Should be good if the only ideal state is disabled
callback.getResourceIdealState(TABLE_NAME).enable(false);
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD);
// Should ignore offline segments in ideal state
callback = buildTestISEVCallback();
idealState = new IdealState(TABLE_NAME);
idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED);
idealState.setPartitionState("mySegment_1", INSTANCE_NAME, "ONLINE");
idealState.setPartitionState("mySegment_2", INSTANCE_NAME, "OFFLINE");
callback.setIdealState(idealState);
ExternalView externalView = new ExternalView(TABLE_NAME);
externalView.setState("mySegment_1", INSTANCE_NAME, "ONLINE");
callback.setExternalView(externalView);
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD);
// Should ignore segments in error state in external view
callback = buildTestISEVCallback();
idealState = new IdealState(TABLE_NAME);
idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED);
idealState.setPartitionState("mySegment_1", INSTANCE_NAME, "ONLINE");
idealState.setPartitionState("mySegment_2", INSTANCE_NAME, "OFFLINE");
callback.setIdealState(idealState);
externalView = new ExternalView(TABLE_NAME);
externalView.setState("mySegment_1", INSTANCE_NAME, "ERROR");
callback.setExternalView(externalView);
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD);
// Should ignore other instances
callback = buildTestISEVCallback();
idealState = new IdealState(TABLE_NAME);
idealState.setRebalanceMode(IdealState.RebalanceMode.CUSTOMIZED);
idealState.setPartitionState("mySegment_1", INSTANCE_NAME, "ONLINE");
idealState.setPartitionState("mySegment_2", INSTANCE_NAME + "2", "ONLINE");
callback.setIdealState(idealState);
externalView = new ExternalView(TABLE_NAME);
externalView.setState("mySegment_1", INSTANCE_NAME, "ONLINE");
externalView.setState("mySegment_2", INSTANCE_NAME + "2", "OFFLINE");
callback.setExternalView(externalView);
assertEquals(callback.getServiceStatus(), ServiceStatus.Status.GOOD);
}
private TestIdealStateAndExternalViewMatchServiceStatusCallback buildTestISEVCallback() {
return new TestIdealStateAndExternalViewMatchServiceStatusCallback(null, "potato", INSTANCE_NAME,
Collections.singletonList(TABLE_NAME));
}
private static class TestIdealStateAndExternalViewMatchServiceStatusCallback extends ServiceStatus.IdealStateAndExternalViewMatchServiceStatusCallback {
private IdealState _idealState;
private ExternalView _externalView;
public TestIdealStateAndExternalViewMatchServiceStatusCallback(HelixManager helixManager, String clusterName,
String instanceName) {
super(helixManager, clusterName, instanceName);
}
public TestIdealStateAndExternalViewMatchServiceStatusCallback(HelixManager helixManager, String clusterName,
String instanceName, List<String> resourcesToMonitor) {
super(helixManager, clusterName, instanceName, resourcesToMonitor);
}
@Override
public IdealState getResourceIdealState(String resourceName) {
return _idealState;
}
@Override
public ExternalView getState(String resourceName) {
return _externalView;
}
public void setIdealState(IdealState idealState) {
_idealState = idealState;
}
public void setExternalView(ExternalView externalView) {
_externalView = externalView;
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.fleet.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.seqno.SeqNoStats;
import org.elasticsearch.index.shard.GlobalCheckpointListeners;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeoutException;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
public class GetGlobalCheckpointsShardAction extends ActionType<GetGlobalCheckpointsShardAction.Response> {
public static final GetGlobalCheckpointsShardAction INSTANCE = new GetGlobalCheckpointsShardAction();
public static final String NAME = "indices:monitor/fleet/global_checkpoints[s]";
private GetGlobalCheckpointsShardAction() {
super(NAME, GetGlobalCheckpointsShardAction.Response::new);
}
public static class Response extends ActionResponse {
private final long globalCheckpoint;
private final boolean timedOut;
public Response(long globalCheckpoint, boolean timedOut) {
this.globalCheckpoint = globalCheckpoint;
this.timedOut = timedOut;
}
public Response(StreamInput in) throws IOException {
super(in);
globalCheckpoint = in.readLong();
timedOut = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(globalCheckpoint);
out.writeBoolean(timedOut);
}
public long getGlobalCheckpoint() {
return globalCheckpoint;
}
public boolean timedOut() {
return timedOut;
}
}
public static class Request extends SingleShardRequest<Request> {
private final ShardId shardId;
private final boolean waitForAdvance;
private final long checkpoint;
private final TimeValue timeout;
Request(ShardId shardId, boolean waitForAdvance, long checkpoint, TimeValue timeout) {
super(shardId.getIndexName());
this.shardId = shardId;
this.waitForAdvance = waitForAdvance;
this.checkpoint = checkpoint;
this.timeout = timeout;
}
Request(StreamInput in) throws IOException {
super(in);
this.shardId = new ShardId(in);
this.waitForAdvance = in.readBoolean();
this.checkpoint = in.readLong();
this.timeout = in.readTimeValue();
}
@Override
public ActionRequestValidationException validate() {
return null;
}
public ShardId getShardId() {
return shardId;
}
public TimeValue timeout() {
return timeout;
}
public boolean waitForAdvance() {
return waitForAdvance;
}
public long checkpoint() {
return checkpoint;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardId.writeTo(out);
out.writeBoolean(waitForAdvance);
out.writeLong(checkpoint);
out.writeTimeValue(timeout);
}
}
public static class TransportAction extends TransportSingleShardAction<Request, Response> {
private final IndicesService indicesService;
@Inject
public TransportAction(
ThreadPool threadPool,
ClusterService clusterService,
TransportService transportService,
ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
IndicesService indicesService
) {
super(
NAME,
threadPool,
clusterService,
transportService,
actionFilters,
indexNameExpressionResolver,
Request::new,
ThreadPool.Names.GENERIC
);
this.indicesService = indicesService;
}
@Override
protected Response shardOperation(Request request, ShardId shardId) {
final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
final IndexShard indexShard = indexService.getShard(shardId.id());
final SeqNoStats seqNoStats = indexShard.seqNoStats();
return new Response(seqNoStats.getGlobalCheckpoint(), false);
}
@Override
protected void asyncShardOperation(Request request, ShardId shardId, ActionListener<Response> listener) throws IOException {
final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
final IndexShard indexShard = indexService.getShard(shardId.id());
final SeqNoStats seqNoStats = indexShard.seqNoStats();
if (request.waitForAdvance() && request.checkpoint() >= seqNoStats.getGlobalCheckpoint()) {
indexShard.addGlobalCheckpointListener(request.checkpoint() + 1, new GlobalCheckpointListeners.GlobalCheckpointListener() {
@Override
public Executor executor() {
return threadPool.executor(ThreadPool.Names.GENERIC);
}
@Override
public void accept(final long g, final Exception e) {
if (g != UNASSIGNED_SEQ_NO) {
assert request.checkpoint() < g
: shardId + " only advanced to [" + g + "] while waiting for [" + request.checkpoint() + "]";
globalCheckpointAdvanced(shardId, request, listener);
} else {
assert e != null;
globalCheckpointAdvancementFailure(indexShard, request, e, listener);
}
}
}, request.timeout());
} else {
super.asyncShardOperation(request, shardId, listener);
}
}
private void globalCheckpointAdvanced(final ShardId shardId, final Request request, final ActionListener<Response> listener) {
try {
super.asyncShardOperation(request, shardId, listener);
} catch (final IOException caught) {
listener.onFailure(caught);
}
}
private void globalCheckpointAdvancementFailure(
final IndexShard indexShard,
final Request request,
final Exception e,
final ActionListener<Response> listener
) {
try {
if (e instanceof TimeoutException) {
final long globalCheckpoint = indexShard.seqNoStats().getGlobalCheckpoint();
if (request.checkpoint() >= globalCheckpoint) {
listener.onResponse(new Response(globalCheckpoint, true));
} else {
listener.onResponse(new Response(globalCheckpoint, false));
}
} else {
listener.onFailure(e);
}
} catch (RuntimeException e2) {
listener.onFailure(e2);
}
}
@Override
protected Writeable.Reader<Response> getResponseReader() {
return Response::new;
}
@Override
protected boolean resolveIndex(Request request) {
return true;
}
@Override
protected ShardsIterator shards(ClusterState state, InternalRequest request) {
return state.routingTable().shardRoutingTable(request.request().getShardId()).primaryShardIt();
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.ui.database.event;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.lang.StringUtils;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.BaseDatabaseMeta;
import org.pentaho.di.core.database.DatabaseConnectionPoolParameter;
import org.pentaho.di.core.database.DatabaseInterface;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.database.GenericDatabaseMeta;
import org.pentaho.di.core.database.MSSQLServerNativeDatabaseMeta;
import org.pentaho.di.core.database.PartitionDatabaseMeta;
import org.pentaho.di.core.database.SAPR3DatabaseMeta;
import org.pentaho.di.core.exception.KettlePluginException;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.plugins.DatabasePluginType;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.ui.database.Messages;
import org.pentaho.ui.util.Launch;
import org.pentaho.ui.util.Launch.Status;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulException;
import org.pentaho.ui.xul.components.XulButton;
import org.pentaho.ui.xul.components.XulCheckbox;
import org.pentaho.ui.xul.components.XulLabel;
import org.pentaho.ui.xul.components.XulMessageBox;
import org.pentaho.ui.xul.components.XulTextbox;
import org.pentaho.ui.xul.components.XulTreeCell;
import org.pentaho.ui.xul.containers.XulDeck;
import org.pentaho.ui.xul.containers.XulDialog;
import org.pentaho.ui.xul.containers.XulListbox;
import org.pentaho.ui.xul.containers.XulRoot;
import org.pentaho.ui.xul.containers.XulTree;
import org.pentaho.ui.xul.containers.XulTreeItem;
import org.pentaho.ui.xul.containers.XulTreeRow;
import org.pentaho.ui.xul.containers.XulWindow;
import org.pentaho.ui.xul.impl.AbstractXulEventHandler;
/**
* Handles all manipulation of the DatabaseMeta, data retrieval from XUL DOM and rudimentary validation.
*
* TODO: 2. Needs to be abstracted away from the DatabaseMeta object, so other tools in the platform can use the dialog
* and their preferred database object. 3. Needs exception handling, string resourcing and logging
*
* @author gmoran
* @created Mar 19, 2008
*
*/
public class DataHandler extends AbstractXulEventHandler {
public static final SortedMap<String, DatabaseInterface> connectionMap =
new TreeMap<String, DatabaseInterface>();
public static final Map<String, String> connectionNametoID = new HashMap<String, String>();
// The connectionMap allows us to keep track of the connection
// type we are working with and the correlating database interface
static {
PluginRegistry registry = PluginRegistry.getInstance();
List<PluginInterface> plugins = registry.getPlugins( DatabasePluginType.class );
for ( PluginInterface plugin : plugins ) {
try {
DatabaseInterface databaseInterface = (DatabaseInterface) registry.loadClass( plugin );
databaseInterface.setPluginId( plugin.getIds()[0] );
databaseInterface.setName( plugin.getName() );
connectionMap.put( plugin.getName(), databaseInterface );
connectionNametoID.put( plugin.getName(), plugin.getIds()[0] );
} catch ( KettlePluginException cnfe ) {
System.out.println( "Could not create connection entry for "
+ plugin.getName() + ". " + cnfe.getCause().getClass().getName() );
LogChannel.GENERAL.logError( "Could not create connection entry for "
+ plugin.getName() + ". " + cnfe.getCause().getClass().getName() );
} catch ( Exception e ) {
throw new RuntimeException( "Error creating class for: " + plugin, e );
}
}
}
protected DatabaseMeta databaseMeta = null;
private DatabaseMeta cache = new DatabaseMeta();
private XulDeck dialogDeck;
private XulListbox deckOptionsBox;
private XulListbox connectionBox;
private XulListbox accessBox;
private XulTextbox connectionNameBox;
protected XulTextbox hostNameBox;
protected XulTextbox databaseNameBox;
protected XulTextbox portNumberBox;
protected XulTextbox userNameBox;
protected XulTextbox passwordBox;
// Generic database specific
protected XulTextbox customDriverClassBox;
// Generic database specific
protected XulTextbox customUrlBox;
// Oracle specific
protected XulTextbox dataTablespaceBox;
// Oracle specific
protected XulTextbox indexTablespaceBox;
// MS SQL Server specific
private XulTextbox serverInstanceBox;
// Informix specific
private XulTextbox serverNameBox;
// SAP R/3 specific
protected XulTextbox languageBox;
// SAP R/3 specific
protected XulTextbox systemNumberBox;
// SAP R/3 specific
protected XulTextbox clientBox;
// MS SQL Server specific
private XulCheckbox doubleDecimalSeparatorCheck;
// private XulCheckbox mssqlIntegratedSecurity;
// MySQL specific
private XulCheckbox resultStreamingCursorCheck;
// ==== Options Panel ==== //
protected XulTree optionsParameterTree;
// ==== Clustering Panel ==== //
private XulCheckbox clusteringCheck;
protected XulTree clusterParameterTree;
private XulLabel clusterParameterDescriptionLabel;
// ==== Advanced Panel ==== //
XulCheckbox supportBooleanDataType;
XulCheckbox supportTimestampDataType;
XulCheckbox quoteIdentifiersCheck;
XulCheckbox lowerCaseIdentifiersCheck;
XulCheckbox upperCaseIdentifiersCheck;
XulCheckbox preserveReservedCaseCheck;
XulCheckbox useIntegratedSecurityCheck;
XulTextbox preferredSchemaName;
XulTextbox sqlBox;
// ==== Pooling Panel ==== //
private XulLabel poolSizeLabel;
private XulLabel maxPoolSizeLabel;
private XulCheckbox poolingCheck;
protected XulTextbox poolSizeBox;
protected XulTextbox maxPoolSizeBox;
private XulTextbox poolingDescription;
private XulLabel poolingParameterDescriptionLabel;
private XulLabel poolingDescriptionLabel;
protected XulTree poolParameterTree;
protected XulButton acceptButton;
private XulButton cancelButton;
private XulButton testButton;
private XulLabel noticeLabel;
public DataHandler() {
}
public void loadConnectionData() {
// HACK: need to check if onload event was already fired.
// It is called from XulDatabaseDialog from dcDialog.getSwtInstance(shell); AND dialog.show();
// Multiple calls lead to multiple numbers of database types.
// Therefore we check if the connectionBox was already filled.
if ( connectionBox != null ) {
return;
}
getControls();
// Add sorted types to the listbox now.
for ( String key : connectionMap.keySet() ) {
connectionBox.addItem( key );
}
// HACK: Need to force height of list control, as it does not behave
// well when using relative layouting
connectionBox.setRows( connectionBox.getRows() );
Object key = connectionBox.getSelectedItem();
// Nothing selected yet...select first item.
// TODO Implement a connection type preference,
// and use that type as the default for
// new databases.
if ( key == null ) {
key = connectionMap.firstKey();
connectionBox.setSelectedItem( key );
}
// HACK: Need to force selection of first panel
if ( dialogDeck != null ) {
setDeckChildIndex();
}
setDefaultPoolParameters();
// HACK: reDim the pooling table
if ( poolParameterTree != null ) {
poolParameterTree.setRows( poolParameterTree.getRows() );
}
}
// On Database type change
public void loadAccessData() {
getControls();
pushCache();
Object key = connectionBox.getSelectedItem();
// Nothing selected yet...
if ( key == null ) {
key = connectionMap.firstKey();
connectionBox.setSelectedItem( key );
return;
}
DatabaseInterface database = connectionMap.get( key );
int[] acc = database.getAccessTypeList();
Object accessKey = accessBox.getSelectedItem();
accessBox.removeItems();
// Add those access types applicable to this conneciton type
for ( int value : acc ) {
accessBox.addItem( DatabaseMeta.getAccessTypeDescLong( value ) );
}
// HACK: Need to force height of list control, as it does not behave
// well when using relative layouting
accessBox.setRows( accessBox.getRows() );
// May not exist for this connection type.
if ( accessKey != null ) { // This check keeps the SwtListbox from complaining about a null value
accessBox.setSelectedItem( accessKey );
}
// Last resort, set first as default
if ( accessBox.getSelectedItem() == null ) {
accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( acc[0] ) );
}
Map<String, String> options = null;
if ( this.databaseMeta != null ) {
options = this.databaseMeta.getExtraOptions();
}
setOptionsData( options );
PartitionDatabaseMeta[] clusterInfo = null;
if ( this.databaseMeta != null ) {
clusterInfo = this.databaseMeta.getPartitioningInformation();
}
setClusterData( clusterInfo );
popCache();
}
public void editOptions( int index ) {
if ( index + 1 == optionsParameterTree.getRows() ) {
// editing last row add a new one below
Object[][] values = optionsParameterTree.getValues();
Object[] row = values[values.length - 1];
if ( row != null && ( !StringUtils.isEmpty( (String) row[0] ) || !StringUtils.isEmpty( (String) row[1] ) ) ) {
// acutally have something in current last row
XulTreeRow newRow = optionsParameterTree.getRootChildren().addNewRow();
newRow.addCellText( 0, "" );
newRow.addCellText( 1, "" );
}
}
}
public void getOptionHelp() {
String message = null;
DatabaseMeta database = new DatabaseMeta();
getInfo( database );
String url = database.getExtraOptionsHelpText();
if ( ( url == null ) || ( url.trim().length() == 0 ) ) {
message = Messages.getString( "DataHandler.USER_NO_HELP_AVAILABLE" );
showMessage( message, false );
return;
}
Status status = Launch.openURL( url );
if ( status.equals( Status.Failed ) ) {
message = Messages.getString( "DataHandler.USER_UNABLE_TO_LAUNCH_BROWSER", url );
showMessage( message, false );
}
}
public void setDeckChildIndex() {
getControls();
// if pooling selected, check the parameter validity before allowing
// a deck panel switch...
int originalSelection = dialogDeck.getSelectedIndex();
boolean passed = true;
if ( originalSelection == 3 ) {
passed = checkPoolingParameters();
}
if ( passed ) {
int selected = deckOptionsBox.getSelectedIndex();
if ( selected < 0 ) {
selected = 0;
deckOptionsBox.setSelectedIndex( 0 );
}
dialogDeck.setSelectedIndex( selected );
} else {
dialogDeck.setSelectedIndex( originalSelection );
deckOptionsBox.setSelectedIndex( originalSelection );
}
}
public void onPoolingCheck() {
if ( poolingCheck != null ) {
boolean dis = !poolingCheck.isChecked();
if ( poolSizeBox != null ) {
poolSizeBox.setDisabled( dis );
}
if ( maxPoolSizeBox != null ) {
maxPoolSizeBox.setDisabled( dis );
}
if ( poolSizeLabel != null ) {
poolSizeLabel.setDisabled( dis );
}
if ( maxPoolSizeLabel != null ) {
maxPoolSizeLabel.setDisabled( dis );
}
if ( poolParameterTree != null ) {
poolParameterTree.setDisabled( dis );
}
if ( poolingParameterDescriptionLabel != null ) {
poolingParameterDescriptionLabel.setDisabled( dis );
}
if ( poolingDescriptionLabel != null ) {
poolingDescriptionLabel.setDisabled( dis );
}
if ( poolingDescription != null ) {
poolingDescription.setDisabled( dis );
}
}
}
public void onClusterCheck() {
if ( clusteringCheck != null ) {
boolean dis = !clusteringCheck.isChecked();
if ( clusterParameterTree != null ) {
clusterParameterTree.setDisabled( dis );
}
if ( clusterParameterDescriptionLabel != null ) {
clusterParameterDescriptionLabel.setDisabled( dis );
}
}
}
public Object getData() {
if ( databaseMeta == null ) {
databaseMeta = new DatabaseMeta();
}
if ( !windowClosed() ) {
this.getInfo( databaseMeta );
}
return databaseMeta;
}
public void setData( Object data ) {
if ( data instanceof DatabaseMeta ) {
databaseMeta = (DatabaseMeta) data;
}
setInfo( databaseMeta );
}
public void pushCache() {
getConnectionSpecificInfo( cache );
}
public void popCache() {
setConnectionSpecificInfo( cache );
}
public void onCancel() {
close();
}
private void close() {
XulComponent window = document.getElementById( "general-datasource-window" );
if ( window == null ) { // window must be root
window = document.getRootElement();
}
if ( window instanceof XulDialog ) {
( (XulDialog) window ).hide();
} else if ( window instanceof XulWindow ) {
( (XulWindow) window ).close();
}
}
private boolean windowClosed() {
boolean closedWindow = true;
XulComponent window = document.getElementById( "general-datasource-window" );
if ( window == null ) { // window must be root
window = document.getRootElement();
}
if ( window instanceof XulWindow ) {
closedWindow = ( (XulWindow) window ).isClosed();
}
return closedWindow;
}
public void onOK() {
DatabaseMeta database = new DatabaseMeta();
this.getInfo( database );
boolean passed = checkPoolingParameters();
if ( !passed ) {
return;
}
String[] remarks = database.checkParameters();
String message = "";
if ( remarks.length != 0 ) {
for ( int i = 0; i < remarks.length; i++ ) {
message = message.concat( "* " ).concat( remarks[i] ).concat( System.getProperty( "line.separator" ) );
}
showMessage( message, false );
} else {
if ( databaseMeta == null ) {
databaseMeta = new DatabaseMeta();
}
this.getInfo( databaseMeta );
databaseMeta.setChanged();
close();
}
}
public void testDatabaseConnection() {
DatabaseMeta database = new DatabaseMeta();
getInfo( database );
String[] remarks = database.checkParameters();
String message = "";
if ( remarks.length != 0 ) {
for ( int i = 0; i < remarks.length; i++ ) {
message = message.concat( "* " ).concat( remarks[i] ).concat( System.getProperty( "line.separator" ) );
}
} else {
message = database.testConnection();
}
showMessage( message, message.length() > 300 );
}
protected void getInfo( DatabaseMeta meta ) {
getControls();
if ( this.databaseMeta != null && this.databaseMeta != meta ) {
meta.initializeVariablesFrom( this.databaseMeta );
}
// Let's not remove any (default) options or attributes
// We just need to display the correct ones for the database type below...
//
// In fact, let's just clear the database port...
//
// TODO: what about the port number?
// Name:
meta.setName( connectionNameBox.getValue() );
// Display Name: (PDI-12292)
meta.setDisplayName( connectionNameBox.getValue() );
// Connection type:
Object connection = connectionBox.getSelectedItem();
if ( connection != null ) {
meta.setDatabaseType( (String) connection );
}
// Access type:
Object access = accessBox.getSelectedItem();
if ( access != null ) {
meta.setAccessType( DatabaseMeta.getAccessType( (String) access ) );
}
getConnectionSpecificInfo( meta );
// Port number:
if ( portNumberBox != null ) {
meta.setDBPort( portNumberBox.getValue() );
}
// Option parameters:
if ( optionsParameterTree != null ) {
Object[][] values = optionsParameterTree.getValues();
for ( int i = 0; i < values.length; i++ ) {
String parameter = (String) values[i][0];
String value = (String) values[i][1];
if ( value == null ) {
value = "";
}
String dbType = meta.getPluginId();
// Only if parameter are supplied, we will add to the map...
if ( ( parameter != null ) && ( parameter.trim().length() > 0 ) ) {
if ( value.trim().length() <= 0 ) {
value = DatabaseMeta.EMPTY_OPTIONS_STRING;
}
meta.addExtraOption( dbType, parameter, value );
}
}
}
// Advanced panel settings:
if ( supportBooleanDataType != null ) {
meta.setSupportsBooleanDataType( supportBooleanDataType.isChecked() );
}
if ( supportTimestampDataType != null ) {
meta.setSupportsTimestampDataType( supportTimestampDataType.isChecked() );
}
if ( quoteIdentifiersCheck != null ) {
meta.setQuoteAllFields( quoteIdentifiersCheck.isChecked() );
}
if ( lowerCaseIdentifiersCheck != null ) {
meta.setForcingIdentifiersToLowerCase( lowerCaseIdentifiersCheck.isChecked() );
}
if ( upperCaseIdentifiersCheck != null ) {
meta.setForcingIdentifiersToUpperCase( upperCaseIdentifiersCheck.isChecked() );
}
if ( preserveReservedCaseCheck != null ) {
meta.setPreserveReservedCase( preserveReservedCaseCheck.isChecked() );
}
if ( preferredSchemaName != null ) {
meta.setPreferredSchemaName( preferredSchemaName.getValue() );
}
if ( sqlBox != null ) {
meta.setConnectSQL( sqlBox.getValue() );
}
// Cluster panel settings
if ( clusteringCheck != null ) {
meta.setPartitioned( clusteringCheck.isChecked() );
}
if ( ( clusterParameterTree != null ) && ( meta.isPartitioned() ) ) {
Object[][] values = clusterParameterTree.getValues();
List<PartitionDatabaseMeta> pdms = new ArrayList<PartitionDatabaseMeta>();
for ( int i = 0; i < values.length; i++ ) {
String partitionId = (String) values[i][0];
if ( ( partitionId == null ) || ( partitionId.trim().length() <= 0 ) ) {
continue;
}
String hostname = (String) values[i][1];
String port = (String) values[i][2];
String dbName = (String) values[i][3];
String username = (String) values[i][4];
String password = (String) values[i][5];
PartitionDatabaseMeta pdm = new PartitionDatabaseMeta( partitionId, hostname, port, dbName );
pdm.setUsername( username );
pdm.setPassword( password );
pdms.add( pdm );
}
PartitionDatabaseMeta[] pdmArray = new PartitionDatabaseMeta[pdms.size()];
meta.setPartitioningInformation( pdms.toArray( pdmArray ) );
}
if ( poolingCheck != null ) {
meta.setUsingConnectionPool( poolingCheck.isChecked() );
}
if ( meta.isUsingConnectionPool() ) {
if ( poolSizeBox != null ) {
try {
int initialPoolSize = Integer.parseInt( poolSizeBox.getValue() );
meta.setInitialPoolSize( initialPoolSize );
} catch ( NumberFormatException e ) {
// TODO log exception and move on ...
}
}
if ( maxPoolSizeBox != null ) {
try {
int maxPoolSize = Integer.parseInt( maxPoolSizeBox.getValue() );
meta.setMaximumPoolSize( maxPoolSize );
} catch ( NumberFormatException e ) {
// TODO log exception and move on ...
}
}
if ( poolParameterTree != null ) {
Object[][] values = poolParameterTree.getValues();
Properties properties = new Properties();
for ( int i = 0; i < values.length; i++ ) {
boolean isChecked = false;
if ( values[i][0] instanceof Boolean ) {
isChecked = ( (Boolean) values[i][0] ).booleanValue();
} else {
isChecked = Boolean.valueOf( (String) values[i][0] );
}
if ( !isChecked ) {
continue;
}
String parameter = (String) values[i][1];
String value = (String) values[i][2];
if ( ( parameter != null )
&& ( parameter.trim().length() > 0 ) && ( value != null ) && ( value.trim().length() > 0 ) ) {
properties.setProperty( parameter, value );
}
}
meta.setConnectionPoolingProperties( properties );
}
}
}
private void setInfo( DatabaseMeta meta ) {
if ( meta == null ) {
return;
}
getControls();
// Name:
connectionNameBox.setValue( meta.getDisplayName() );
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface dInterface = registry.getPlugin( DatabasePluginType.class, meta.getPluginId() );
// Connection type:
int index = new ArrayList<String>( connectionMap.keySet() ).indexOf( dInterface.getName() );
if ( index >= 0 ) {
connectionBox.setSelectedIndex( index );
} else {
LogChannel.GENERAL.logError( "Unable to find database type "
+ dInterface.getName() + " in our connection map" );
}
// Access type:
accessBox.setSelectedItem( DatabaseMeta.getAccessTypeDescLong( meta.getAccessType() ) );
// this is broken out so we can set the cache information only when caching
// connection values
setConnectionSpecificInfo( meta );
loadAccessData();
// Port number:
if ( portNumberBox != null ) {
portNumberBox.setValue( meta.getDatabasePortNumberString() );
}
// Options Parameters:
setOptionsData( meta.getExtraOptions() );
// Advanced panel settings:
if ( supportBooleanDataType != null ) {
supportBooleanDataType.setChecked( meta.supportsBooleanDataType() );
}
if ( supportTimestampDataType != null ) {
supportTimestampDataType.setChecked( meta.supportsTimestampDataType() );
}
if ( quoteIdentifiersCheck != null ) {
quoteIdentifiersCheck.setChecked( meta.isQuoteAllFields() );
}
if ( lowerCaseIdentifiersCheck != null ) {
lowerCaseIdentifiersCheck.setChecked( meta.isForcingIdentifiersToLowerCase() );
}
if ( upperCaseIdentifiersCheck != null ) {
upperCaseIdentifiersCheck.setChecked( meta.isForcingIdentifiersToUpperCase() );
}
if ( preserveReservedCaseCheck != null ) {
preserveReservedCaseCheck.setChecked( meta.preserveReservedCase() );
}
if ( preferredSchemaName != null ) {
preferredSchemaName.setValue( Const.NVL( meta.getPreferredSchemaName(), "" ) );
}
if ( sqlBox != null ) {
sqlBox.setValue( meta.getConnectSQL() == null ? "" : meta.getConnectSQL() );
}
// Clustering panel settings
if ( clusteringCheck != null ) {
clusteringCheck.setChecked( meta.isPartitioned() );
}
setClusterData( meta.getPartitioningInformation() );
// Pooling panel settings
if ( poolingCheck != null ) {
poolingCheck.setChecked( meta.isUsingConnectionPool() );
}
if ( meta.isUsingConnectionPool() ) {
if ( poolSizeBox != null ) {
poolSizeBox.setValue( Integer.toString( meta.getInitialPoolSize() ) );
}
if ( maxPoolSizeBox != null ) {
maxPoolSizeBox.setValue( Integer.toString( meta.getMaximumPoolSize() ) );
}
setPoolProperties( meta.getConnectionPoolingProperties() );
}
setReadOnly( meta.isReadOnly() );
setDeckChildIndex();
onPoolingCheck();
onClusterCheck();
}
private void traverseDomSetReadOnly( XulComponent component, boolean readonly ) {
component.setDisabled( readonly );
List<XulComponent> children = component.getChildNodes();
if ( children != null && children.size() > 0 ) {
for ( XulComponent child : children ) {
child.setDisabled( readonly );
traverseDomSetReadOnly( child, readonly );
}
}
}
private void setReadOnly( boolean readonly ) {
// set the readonly status of EVERYTHING!
traverseDomSetReadOnly( document.getRootElement(), readonly );
noticeLabel.setVisible( readonly );
if ( readonly ) {
// now turn back on the cancel and test buttons
if ( cancelButton != null ) {
cancelButton.setDisabled( false );
}
if ( testButton != null ) {
testButton.setDisabled( false );
}
noticeLabel.setValue( Messages.getString( "DatabaseDialog.label.ConnectionIsReadOnly" ) );
}
}
/**
*
* @return the list of parameters that were enabled, but had invalid return values (null or empty)
*/
private boolean checkPoolingParameters() {
List<String> returnList = new ArrayList<String>();
if ( poolParameterTree != null ) {
Object[][] values = poolParameterTree.getValues();
for ( int i = 0; i < values.length; i++ ) {
boolean isChecked = false;
if ( values[i][0] instanceof Boolean ) {
isChecked = ( (Boolean) values[i][0] ).booleanValue();
} else {
isChecked = Boolean.valueOf( (String) values[i][0] );
}
if ( !isChecked ) {
continue;
}
String parameter = (String) values[i][1];
String value = (String) values[i][2];
if ( ( value == null ) || ( value.trim().length() <= 0 ) ) {
returnList.add( parameter );
}
}
if ( returnList.size() > 0 ) {
String parameters = System.getProperty( "line.separator" );
for ( String parameter : returnList ) {
parameters = parameters.concat( parameter ).concat( System.getProperty( "line.separator" ) );
}
String message = Messages.getString( "DataHandler.USER_INVALID_PARAMETERS" ).concat( parameters );
showMessage( message, false );
}
}
return returnList.size() <= 0;
}
private void setPoolProperties( Properties properties ) {
if ( poolParameterTree != null ) {
Object[][] values = poolParameterTree.getValues();
for ( int i = 0; i < values.length; i++ ) {
String parameter = (String) values[i][1];
boolean isChecked = properties.containsKey( parameter );
if ( !isChecked ) {
continue;
}
XulTreeItem item = poolParameterTree.getRootChildren().getItem( i );
item.getRow().addCellText( 0, "true" ); // checks the checkbox
String value = properties.getProperty( parameter );
item.getRow().addCellText( 2, value );
}
}
}
public void restoreDefaults() {
if ( poolParameterTree != null ) {
for ( int i = 0; i < poolParameterTree.getRootChildren().getItemCount(); i++ ) {
XulTreeItem item = poolParameterTree.getRootChildren().getItem( i );
String parameterName = item.getRow().getCell( 1 ).getLabel();
String defaultValue =
DatabaseConnectionPoolParameter
.findParameter( parameterName, BaseDatabaseMeta.poolingParameters ).getDefaultValue();
if ( ( defaultValue == null ) || ( defaultValue.trim().length() <= 0 ) ) {
continue;
}
item.getRow().addCellText( 2, defaultValue );
}
}
}
private void setDefaultPoolParameters() {
if ( poolParameterTree != null ) {
for ( DatabaseConnectionPoolParameter parameter : BaseDatabaseMeta.poolingParameters ) {
XulTreeRow row = poolParameterTree.getRootChildren().addNewRow();
row.addCellText( 0, "false" );
row.addCellText( 1, parameter.getParameter() );
row.addCellText( 2, parameter.getDefaultValue() );
}
}
}
private void removeTypedOptions( Map<String, String> extraOptions ) {
List<Integer> removeList = new ArrayList<Integer>();
Object[][] values = optionsParameterTree.getValues();
for ( int i = 0; i < values.length; i++ ) {
String parameter = (String) values[i][0];
// See if it's defined
Iterator<String> keys = extraOptions.keySet().iterator();
if ( extraOptions.keySet().size() > 0 ) {
while ( keys.hasNext() ) {
String param = keys.next();
String parameterKey = param.substring( param.indexOf( '.' ) + 1 );
if ( parameter.equals( parameterKey ) || "".equals( parameter ) ) {
// match, remove it if not already in the list
if ( !removeList.contains( i ) ) {
removeList.add( i );
}
}
}
} else if ( "".equals( parameter ) ) {
if ( !removeList.contains( i ) ) {
removeList.add( i );
}
}
}
for ( int i = removeList.size() - 1; i >= 0; i-- ) {
optionsParameterTree.getRootChildren().removeItem( removeList.get( i ) );
}
}
private void setOptionsData( Map<String, String> extraOptions ) {
if ( optionsParameterTree == null ) {
return;
}
if ( extraOptions != null ) {
removeTypedOptions( extraOptions );
Iterator<String> keys = extraOptions.keySet().iterator();
Object connection = connectionBox.getSelectedItem();
String currentType = null;
if ( connection != null ) {
currentType = connectionMap.get( connection.toString() ).getPluginId();
}
while ( keys.hasNext() ) {
String parameter = keys.next();
String value = extraOptions.get( parameter );
if ( ( value == null )
|| ( value.trim().length() <= 0 ) || ( value.equals( DatabaseMeta.EMPTY_OPTIONS_STRING ) ) ) {
value = "";
}
// If the parameter starts with a database type code we show it in the options, otherwise we don't.
// For example MySQL.defaultFetchSize
//
int dotIndex = parameter.indexOf( '.' );
if ( dotIndex >= 0 ) {
String parameterOption = parameter.substring( dotIndex + 1 );
String databaseTypeString = parameter.substring( 0, dotIndex );
String databaseType = databaseTypeString;
if ( currentType != null && currentType.equals( databaseType ) ) {
XulTreeRow row = optionsParameterTree.getRootChildren().addNewRow();
row.addCellText( 0, parameterOption );
row.addCellText( 1, value );
}
}
}
}
// Add 5 blank rows if none are already there, otherwise, just add one.
int numToAdd = 5;
if ( extraOptions != null && extraOptions.keySet().size() > 0 ) {
numToAdd = 1;
}
while ( numToAdd-- > 0 ) {
XulTreeRow row = optionsParameterTree.getRootChildren().addNewRow();
row.addCellText( 0, "" ); // easy way of putting new cells in the row
row.addCellText( 1, "" );
}
}
private void setClusterData( PartitionDatabaseMeta[] clusterInformation ) {
if ( clusterParameterTree == null ) {
// there's nothing to do
return;
}
clusterParameterTree.getRootChildren().removeAll();
if ( ( clusterInformation != null ) && ( clusterParameterTree != null ) ) {
for ( int i = 0; i < clusterInformation.length; i++ ) {
PartitionDatabaseMeta meta = clusterInformation[i];
XulTreeRow row = clusterParameterTree.getRootChildren().addNewRow();
row.addCellText( 0, Const.NVL( meta.getPartitionId(), "" ) );
row.addCellText( 1, Const.NVL( meta.getHostname(), "" ) );
row.addCellText( 2, Const.NVL( meta.getPort(), "" ) );
row.addCellText( 3, Const.NVL( meta.getDatabaseName(), "" ) );
row.addCellText( 4, Const.NVL( meta.getUsername(), "" ) );
row.addCellText( 5, Const.NVL( meta.getPassword(), "" ) );
}
}
// Add 5 blank rows if none are already there, otherwise, just add one.
int numToAdd = 5;
/*
* if(clusterInformation != null && clusterInformation.length > 0){ numToAdd = 1; }
*/
while ( numToAdd-- > 0 ) {
XulTreeRow row = clusterParameterTree.getRootChildren().addNewRow();
row.addCellText( 0, "" ); // easy way of putting new cells in the row
row.addCellText( 1, "" );
row.addCellText( 2, "" );
row.addCellText( 3, "" );
row.addCellText( 4, "" );
row.addCellText( 5, "" );
}
}
public void poolingRowChange( int idx ) {
if ( idx != -1 ) {
if ( idx >= BaseDatabaseMeta.poolingParameters.length ) {
idx = BaseDatabaseMeta.poolingParameters.length - 1;
}
if ( idx < 0 ) {
idx = 0;
}
poolingDescription.setValue( BaseDatabaseMeta.poolingParameters[idx].getDescription() );
XulTreeRow row = poolParameterTree.getRootChildren().getItem( idx ).getRow();
if ( row.getSelectedColumnIndex() == 2 ) {
row.addCellText( 0, "true" );
}
}
}
private void getConnectionSpecificInfo( DatabaseMeta meta ) {
// Hostname:
if ( hostNameBox != null ) {
meta.setHostname( hostNameBox.getValue() );
}
// Database name:
if ( databaseNameBox != null ) {
meta.setDBName( databaseNameBox.getValue() );
}
// Username:
if ( userNameBox != null ) {
meta.setUsername( userNameBox.getValue() );
}
// Password:
if ( passwordBox != null ) {
meta.setPassword( passwordBox.getValue() );
}
// if(this.portNumberBox != null){
// meta.setDBPort(portNumberBox.getValue());
// }
// Streaming result cursor:
if ( resultStreamingCursorCheck != null ) {
meta.setStreamingResults( resultStreamingCursorCheck.isChecked() );
}
// Data tablespace:
if ( dataTablespaceBox != null ) {
meta.setDataTablespace( dataTablespaceBox.getValue() );
}
// Index tablespace
if ( indexTablespaceBox != null ) {
meta.setIndexTablespace( indexTablespaceBox.getValue() );
}
// The SQL Server instance name overrides the option.
// Empty doesn't clear the option, we have mercy.
if ( serverInstanceBox != null ) {
meta.setSQLServerInstance( serverInstanceBox.getValue() );
if ( optionsParameterTree != null && optionsParameterTree.getRootChildren() != null ) {
for ( int i = 0; i < optionsParameterTree.getRootChildren().getItemCount(); i++ ) {
XulTreeItem potRow = optionsParameterTree.getRootChildren().getItem( i );
if ( potRow != null && potRow.getRow() != null ) {
XulTreeCell cell = potRow.getRow().getCell( 0 );
XulTreeCell cell2 = potRow.getRow().getCell( 1 );
if ( cell != null && cell.getLabel() != null && cell.getLabel().equals( "instance" ) ) {
cell2.setLabel( serverInstanceBox.getValue() );
if ( serverInstanceBox.getValue().trim().length() == 0 ) {
cell.setLabel( "" );
}
}
}
}
}
}
// SQL Server double decimal separator
if ( doubleDecimalSeparatorCheck != null ) {
meta.setUsingDoubleDecimalAsSchemaTableSeparator( doubleDecimalSeparatorCheck.isChecked() );
}
// SAP Attributes...
if ( languageBox != null ) {
meta.getAttributes().put( SAPR3DatabaseMeta.ATTRIBUTE_SAP_LANGUAGE, languageBox.getValue() );
}
if ( systemNumberBox != null ) {
meta.getAttributes().put( SAPR3DatabaseMeta.ATTRIBUTE_SAP_SYSTEM_NUMBER, systemNumberBox.getValue() );
}
if ( clientBox != null ) {
meta.getAttributes().put( SAPR3DatabaseMeta.ATTRIBUTE_SAP_CLIENT, clientBox.getValue() );
}
// Generic settings...
if ( customUrlBox != null ) {
meta.getAttributes().put( GenericDatabaseMeta.ATRRIBUTE_CUSTOM_URL, customUrlBox.getValue() );
}
if ( customDriverClassBox != null ) {
meta
.getAttributes()
.put( GenericDatabaseMeta.ATRRIBUTE_CUSTOM_DRIVER_CLASS, customDriverClassBox.getValue() );
}
// Server Name: (Informix)
if ( serverNameBox != null ) {
meta.setServername( serverNameBox.getValue() );
}
// Microsoft SQL Server Use Integrated Security
if ( useIntegratedSecurityCheck != null ) {
Boolean useIntegratedSecurity = useIntegratedSecurityCheck.isChecked();
meta.getAttributes().put(
MSSQLServerNativeDatabaseMeta.ATTRIBUTE_USE_INTEGRATED_SECURITY,
useIntegratedSecurity != null ? useIntegratedSecurity.toString() : "false" );
}
}
private void setConnectionSpecificInfo( DatabaseMeta meta ) {
getControls();
if ( hostNameBox != null ) {
hostNameBox.setValue( meta.getHostname() );
}
// Database name:
if ( databaseNameBox != null ) {
databaseNameBox.setValue( meta.getDatabaseName() );
}
// Username:
if ( userNameBox != null ) {
userNameBox.setValue( meta.getUsername() );
}
// Password:
if ( passwordBox != null ) {
passwordBox.setValue( meta.getPassword() );
}
// if(this.portNumberBox != null){
// this.portNumberBox.setValue(meta.getDatabasePortNumberString());
// }
// Streaming result cursor:
if ( resultStreamingCursorCheck != null ) {
resultStreamingCursorCheck.setChecked( meta.isStreamingResults() );
}
// Data tablespace:
if ( dataTablespaceBox != null ) {
dataTablespaceBox.setValue( meta.getDataTablespace() );
}
// Index tablespace
if ( indexTablespaceBox != null ) {
indexTablespaceBox.setValue( meta.getIndexTablespace() );
}
if ( serverInstanceBox != null ) {
serverInstanceBox.setValue( meta.getSQLServerInstance() );
}
// SQL Server double decimal separator
if ( doubleDecimalSeparatorCheck != null ) {
doubleDecimalSeparatorCheck.setChecked( meta.isUsingDoubleDecimalAsSchemaTableSeparator() );
}
// SAP Attributes...
if ( languageBox != null ) {
languageBox.setValue( meta.getAttributes().getProperty( SAPR3DatabaseMeta.ATTRIBUTE_SAP_LANGUAGE ) );
}
if ( systemNumberBox != null ) {
systemNumberBox.setValue( meta.getAttributes().getProperty( SAPR3DatabaseMeta.ATTRIBUTE_SAP_SYSTEM_NUMBER ) );
}
if ( clientBox != null ) {
clientBox.setValue( meta.getAttributes().getProperty( SAPR3DatabaseMeta.ATTRIBUTE_SAP_CLIENT ) );
}
// Generic settings...
if ( customUrlBox != null ) {
customUrlBox.setValue( meta.getAttributes().getProperty( GenericDatabaseMeta.ATRRIBUTE_CUSTOM_URL ) );
}
if ( customDriverClassBox != null ) {
customDriverClassBox.setValue( meta.getAttributes().getProperty(
GenericDatabaseMeta.ATRRIBUTE_CUSTOM_DRIVER_CLASS ) );
}
// Server Name: (Informix)
if ( serverNameBox != null ) {
serverNameBox.setValue( meta.getServername() );
}
// Microsoft SQL Server Use Integrated Security
if ( useIntegratedSecurityCheck != null ) {
Object value = meta.getAttributes().get( MSSQLServerNativeDatabaseMeta.ATTRIBUTE_USE_INTEGRATED_SECURITY );
if ( value != null && value instanceof String ) {
String useIntegratedSecurity = (String) value;
useIntegratedSecurityCheck.setChecked( Boolean.parseBoolean( useIntegratedSecurity ) );
} else {
useIntegratedSecurityCheck.setChecked( false );
}
}
}
protected void getControls() {
// Not all of these controls are created at the same time.. that's OK, for now, just check
// each one for null before using.
dialogDeck = (XulDeck) document.getElementById( "dialog-panel-deck" );
deckOptionsBox = (XulListbox) document.getElementById( "deck-options-list" );
connectionBox = (XulListbox) document.getElementById( "connection-type-list" );
accessBox = (XulListbox) document.getElementById( "access-type-list" );
connectionNameBox = (XulTextbox) document.getElementById( "connection-name-text" );
hostNameBox = (XulTextbox) document.getElementById( "server-host-name-text" );
databaseNameBox = (XulTextbox) document.getElementById( "database-name-text" );
portNumberBox = (XulTextbox) document.getElementById( "port-number-text" );
userNameBox = (XulTextbox) document.getElementById( "username-text" );
passwordBox = (XulTextbox) document.getElementById( "password-text" );
dataTablespaceBox = (XulTextbox) document.getElementById( "data-tablespace-text" );
indexTablespaceBox = (XulTextbox) document.getElementById( "index-tablespace-text" );
serverInstanceBox = (XulTextbox) document.getElementById( "instance-text" );
serverNameBox = (XulTextbox) document.getElementById( "server-name-text" );
customUrlBox = (XulTextbox) document.getElementById( "custom-url-text" );
customDriverClassBox = (XulTextbox) document.getElementById( "custom-driver-class-text" );
languageBox = (XulTextbox) document.getElementById( "language-text" );
systemNumberBox = (XulTextbox) document.getElementById( "system-number-text" );
clientBox = (XulTextbox) document.getElementById( "client-text" );
doubleDecimalSeparatorCheck = (XulCheckbox) document.getElementById( "decimal-separator-check" );
resultStreamingCursorCheck = (XulCheckbox) document.getElementById( "result-streaming-check" );
poolingCheck = (XulCheckbox) document.getElementById( "use-pool-check" );
clusteringCheck = (XulCheckbox) document.getElementById( "use-cluster-check" );
clusterParameterDescriptionLabel = (XulLabel) document.getElementById( "cluster-parameter-description-label" );
poolSizeLabel = (XulLabel) document.getElementById( "pool-size-label" );
poolSizeBox = (XulTextbox) document.getElementById( "pool-size-text" );
maxPoolSizeLabel = (XulLabel) document.getElementById( "max-pool-size-label" );
maxPoolSizeBox = (XulTextbox) document.getElementById( "max-pool-size-text" );
poolParameterTree = (XulTree) document.getElementById( "pool-parameter-tree" );
clusterParameterTree = (XulTree) document.getElementById( "cluster-parameter-tree" );
optionsParameterTree = (XulTree) document.getElementById( "options-parameter-tree" );
poolingDescription = (XulTextbox) document.getElementById( "pooling-description" );
poolingParameterDescriptionLabel = (XulLabel) document.getElementById( "pool-parameter-description-label" );
poolingDescriptionLabel = (XulLabel) document.getElementById( "pooling-description-label" );
supportBooleanDataType = (XulCheckbox) document.getElementById( "supports-boolean-data-type" );
supportTimestampDataType = (XulCheckbox) document.getElementById( "supports-timestamp-data-type" );
quoteIdentifiersCheck = (XulCheckbox) document.getElementById( "quote-identifiers-check" );
lowerCaseIdentifiersCheck = (XulCheckbox) document.getElementById( "force-lower-case-check" );
upperCaseIdentifiersCheck = (XulCheckbox) document.getElementById( "force-upper-case-check" );
preserveReservedCaseCheck = (XulCheckbox) document.getElementById( "preserve-reserved-case" );
preferredSchemaName = (XulTextbox) document.getElementById( "preferred-schema-name-text" );
sqlBox = (XulTextbox) document.getElementById( "sql-text" );
useIntegratedSecurityCheck = (XulCheckbox) document.getElementById( "use-integrated-security-check" );
acceptButton = (XulButton) document.getElementById( "general-datasource-window_accept" );
cancelButton = (XulButton) document.getElementById( "general-datasource-window_cancel" );
testButton = (XulButton) document.getElementById( "test-button" );
noticeLabel = (XulLabel) document.getElementById( "notice-label" );
if ( portNumberBox != null && serverInstanceBox != null ) {
if ( Boolean.parseBoolean( serverInstanceBox.getAttributeValue( "shouldDisablePortIfPopulated" ) ) ) {
serverInstanceBox.addPropertyChangeListener( new PropertyChangeListener() {
@Override
public void propertyChange( PropertyChangeEvent evt ) {
if ( "value".equals( evt.getPropertyName() ) ) {
disablePortIfInstancePopulated();
}
}
} );
}
}
}
public void disablePortIfInstancePopulated() {
String serverInstance = serverInstanceBox.getValue();
if ( serverInstance != null && serverInstance.length() > 0 ) {
portNumberBox.setDisabled( true );
} else {
portNumberBox.setDisabled( false );
}
}
protected void showMessage( String message, boolean scroll ) {
try {
XulMessageBox box = (XulMessageBox) document.createElement( "messagebox" );
box.setMessage( message );
box.setModalParent( ( (XulRoot) document.getElementById( "general-datasource-window" ) ).getRootObject() );
if ( scroll ) {
box.setScrollable( true );
box.setWidth( 500 );
box.setHeight( 400 );
}
box.open();
} catch ( XulException e ) {
System.out.println( "Error creating messagebox " + e.getMessage() );
}
}
public void handleUseSecurityCheckbox() {
if ( useIntegratedSecurityCheck != null ) {
if ( useIntegratedSecurityCheck.isChecked() ) {
userNameBox.setDisabled( true );
passwordBox.setDisabled( true );
} else {
userNameBox.setDisabled( false );
passwordBox.setDisabled( false );
}
}
}
}
| |
package heaney.lebold.bagsandweights;
import heaney.lebold.bagsandweights.constraints.BagFilledConstraint;
import heaney.lebold.bagsandweights.constraints.IConstraint;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Scanner;
public class BagsAndWeights {
//private int stepsTaken;
private List<Weight> weights;
private List<Bag> bags;
private List<IConstraint> constraints;
private List<List<Bag>> memoizationList;
private boolean topLevelValid;
private boolean subLevelValid;
public BagsAndWeights(List<Weight> weights, List<Bag> bags, List<IConstraint> constraints){
this.weights = weights;
this.bags = bags;
this.constraints = constraints;
this.topLevelValid = true;
this.subLevelValid = true;
//this.stepsTaken = 0;
this.memoizationList = new ArrayList<List<Bag>>();
}
public void init(){
//Call backtracking method
if(this.solve()){ //true if solved
//Print solution
for(Bag bag: this.bags){
System.out.print(bag.getID());
bag.forEach((w) -> System.out.print(" " + w.getID()));
System.out.println();
System.out.println("number of items: " + bag.size());
System.out.println("total weight: " + bag.getTotalWeight() + "/" + bag.getMaxWeight());
System.out.println("wasted capacity: " + (bag.getMaxWeight() - bag.getTotalWeight()));
System.out.println();
}
//System.out.println("\n\nTotal Steps Taken: " + this.stepsTaken);
}
else{
System.out.println("There are no solutions with the given constraints.");
}
}
private boolean solve(){
//this.stepsTaken++;
//Memoization
for(List<Bag> localBagList: this.memoizationList){
boolean isCopy = true;
for(int n=0; n< localBagList.size(); n++){
Bag localBag = localBagList.get(n);
Bag globalBag = this.bags.get(n);
if(!localBag.getContents().containsAll(globalBag.getContents()))
isCopy = false;
if(!globalBag.getContents().containsAll(localBag.getContents()))
isCopy = false;
}
if(isCopy)
return false;
else
this.memoizationList.add(new ArrayList<Bag>(this.bags));
}
//allFinal remains true if all constraints are satisfied
boolean allFinal = true;
for(IConstraint constraint: this.constraints){
//Check if current state violates constraint directly
if(!constraint.isValid(this.bags))
return false;
//Check if current state violates constraint at all
if(!constraint.isFinal(this.bags))
allFinal = false;
}
//All constraints satisfied and all weights placed. This is a solution!
if(allFinal && this.weights.isEmpty())
return true;
//Copy weights available at this stage
List<Weight> weightsToTestBase = new ArrayList<Weight>(this.weights);
for(Bag bag: this.bags){
// Get local list of weights for this bag
List<Weight> weightsToTest = new ArrayList<Weight>(weightsToTestBase);
// Apply heuristic to remove weights that are too heavy
applyMRVHeuristic(bag,weightsToTest);
// Apply heuristic to order weights based on forward checking
applyLCVHeuristic(bag,weightsToTest);
// For each weight in best -> worst order
for(Weight weight: weightsToTest){
if(bag.canFit(weight)){
/* Take action (add weight to bag) */
this.weights.remove(weight);
weightsToTestBase.remove(weight);
bag.addWeight(weight);
//If the problem can be solved by this partial solution, escape
if(solve())
return true;
/* Revoke action (pull weight from bag) */
bag.removeWeight(weight);
weightsToTestBase.add(weight);
this.weights.add(weight);
}
}
}
//This partial state yields no solutions
return false;
}
/* Remove weights that put bags over capacity */
private void applyMRVHeuristic(Bag bag, List<Weight> unsortedWeights){
unsortedWeights.sort((w1,w2) -> {
return w2.getWeight() - w1.getWeight();
});
while(!unsortedWeights.isEmpty() && bag.getTotalWeight() + unsortedWeights.get(0).getWeight() > bag.getMaxWeight()){
unsortedWeights.remove(0);
}
}
/* Order weights based on heuristic/forward checking */
private void applyLCVHeuristic(Bag bag, List<Weight> unsortedWeightsBase){
//Get local copy of unplaced weights
List<Weight> unsortedWeights = new ArrayList<Weight>(unsortedWeightsBase);
//Map used to rank each weight based on number of valid sub-placements
HashMap<Weight,Integer> weightValues = new HashMap<Weight,Integer>();
for(int n = unsortedWeights.size()-1; n >= 0; n--){
Weight weight = unsortedWeights.get(n);
if(!weightValues.containsKey(weight))
weightValues.put(weight, 0);
//Place weight in bag, check to see if new state is valid. (if not, dump weight from list)
bag.addWeight(weight);
this.topLevelValid = true;
this.constraints.forEach((c) -> {
if(!c.isValid(this.bags))
this.topLevelValid = false;
});
if(!topLevelValid){
unsortedWeightsBase.remove(weight);
}
// subList -> weights placed in turn after placing last weight
List<Weight> subList = new ArrayList<Weight>(unsortedWeights);
subList.remove(weight);
// validCount -> number of valid moves after last weight place
int validCount = 0;
for(Bag b: this.bags){
for(Weight subWeight: subList){
//Test weight in bag, if valid state, increment validCount
b.addWeight(subWeight);
this.subLevelValid = true;
this.constraints.forEach((c) -> {
if(!c.isValid(this.bags))
this.subLevelValid = false;
});
b.removeWeight(subWeight);
if(this.subLevelValid)
validCount++;
}
}
//Push to map validCount with key of the placed weight
weightValues.put(weight, validCount);
//Undo weight placement
bag.removeWeight(weight);
}
//Sort list based on validCount for each weight
unsortedWeightsBase.sort((w1,w2) -> {
return weightValues.get(w2) - weightValues.get(w1);
});
}
public static void main(String[] args){
//Handle arguments
if(args.length != 1){
System.out.println("Invalid syntax: $ java BagsAndWeights.jar <inputdata.txt>");
return;
}
File inputFile = new File(args[0]);
try {
//Load the file into buffer
Scanner scanner = new Scanner(inputFile);
InputParser[] sections = {InputParser.VARIABLES,
InputParser.VALUES,InputParser.FITTING_LIMITS,
InputParser.UNARY_INCLUSIVE,InputParser.UNARY_EXCLUSIVE,
InputParser.BINARY_EQUALS,InputParser.BINARY_NOT_EQUALS,
InputParser.MUTUAL_INCLUSIVE};
scanner.nextLine(); //Bump past first "#####"
/*List of sub-lists of objects (object Type different depending on section) */
ArrayList<ArrayList<Object>> data = new ArrayList<ArrayList<Object>>();
for(InputParser section:sections){
/* List of objects to be loaded from input in this section */
ArrayList<Object> objects = new ArrayList<Object>();
data.add(objects);
// Load in each line of data for this section
while(scanner.hasNextLine()){
String line = scanner.nextLine();
if(line.startsWith("#####")) //If true, advance to next section
break;
//Obtain object from InputParser
objects.add(InputParser.getData(section, line));
}
}
scanner.close();
//Cast weights from Object sub-list to new List
ArrayList<Weight> weights = new ArrayList<Weight>();
data.get(0).forEach((w) -> weights.add((Weight)w));
//Cast bags from Object sub-list to new List
ArrayList<Bag> bags = new ArrayList<Bag>();
data.get(1).forEach((b) -> bags.add((Bag)b));
//Cast all constraints from all other sub-lists to new List
List<IConstraint> constraints = new ArrayList<IConstraint>();
for(int n=2; n < data.size(); n++)
data.get(n).forEach( (c) -> constraints.add((IConstraint)c));
//Add additional constraints not directly specified in input
for(Bag bag: bags){
BagFilledConstraint constraint = new BagFilledConstraint(bag);
constraints.add(constraint);
}
//Solve problem
BagsAndWeights solver = new BagsAndWeights(weights,bags,constraints);
solver.init();
} catch (FileNotFoundException e) {
System.out.println(" File \"" + args[0] + "\" not found.");
return;
}
}
}
| |
package edu.hm.hafner.util;
import javax.annotation.CheckForNull;
import java.util.Collection;
import java.util.Formatter;
import java.util.List;
import org.eclipse.collections.impl.factory.Lists;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* Provides several helper methods to validate method arguments and class invariants thus supporting the design by
* contract concept (DBC). <p> Note: the static methods provided by this class use a fluent interface, i.e., in order to
* verify an assertion a method sequence needs to be called. </p> Available checks: <ul> <li>Boolean assertions, e.g.,
* {@code Ensure.that(condition).isTrue(); } </li> <li>String assertions, e.g., {@code Ensure.that(string).isNotEmpty();
* } </li> <li>Object assertions, e.g., {@code Ensure.that(element).isNotNull(); } </li> <li>Array assertions, e.g.,
* {@code Ensure.that(array).isNotEmpty(); } </li> <li>Iterable assertions, e.g., {@code
* Ensure.that(collection).isNotNull(); } </li> </ul>
*
* @author Ullrich Hafner
* @see <a href="http://en.wikipedia.org/wiki/Design_by_contract"> Design by Contract (Wikipedia)</a>
*/
@SuppressWarnings({"NonBooleanMethodNameMayNotStartWithQuestion", "ConstantConditions", "CyclicClassDependency"})
public final class Ensure {
/**
* Returns a boolean condition.
*
* @param value the value to check
* @return a boolean condition
*/
@SuppressWarnings("BooleanParameter")
public static BooleanCondition that(final boolean value) {
return new BooleanCondition(value);
}
/**
* Returns an object condition.
*
* @param value the value to check
* @param additionalValues the additional values to check
* @return an object condition
*/
public static ObjectCondition that(@CheckForNull final Object value, @CheckForNull final Object... additionalValues) {
return new ObjectCondition(value, additionalValues);
}
/**
* Returns an iterable condition.
*
* @param value the value to check
* @return an iterable condition
*/
public static IterableCondition that(@CheckForNull final Iterable<?> value) {
return new IterableCondition(value);
}
/**
* Returns a collection condition.
*
* @param value the value to check
* @return a collection condition
*/
public static CollectionCondition that(@CheckForNull final Collection<?> value) {
return new CollectionCondition(value);
}
/**
* Returns an array condition.
*
* @param value the value to check
* @return an array condition
*/
public static ArrayCondition that(@CheckForNull final Object[] value) {
return new ArrayCondition(value);
}
/**
* Returns a string condition.
*
* @param value the value to check
* @return a string condition
*/
public static StringCondition that(@CheckForNull final String value) {
return new StringCondition(value);
}
/**
* Returns an exception condition.
*
* @param value the value to check
* @return an exception condition
*/
public static ExceptionCondition that(@CheckForNull final Throwable value) {
return new ExceptionCondition(value);
}
/**
* Always throws an {@link AssertionError}.
*/
public static void thatStatementIsNeverReached() {
throwException("This statement should never be reached.");
}
/**
* Always throws an {@link AssertionError}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are more
* arguments than format specifiers, the extra arguments are ignored. The number of arguments is
* variable and may be zero.
*/
public static void thatStatementIsNeverReached(final String explanation, final Object... args) {
throwException(explanation, args);
}
/**
* Throws an {@link AssertionError} with the specified detail message.
*
* @param message a {@link Formatter formatted message} with the description of the error
* @param args Arguments referenced by the format specifiers in the formatted message. If there are more
* arguments than format specifiers, the extra arguments are ignored. The number of arguments is
* variable and may be zero.
* @throws AssertionError always thrown
*/
private static void throwException(final String message, final Object... args) {
throw new AssertionError(String.format(message, args));
}
/**
* Throws a {@link NullPointerException} with the specified detail message.
*
* @param message a {@link Formatter formatted message} with the description of the error
* @param args Arguments referenced by the format specifiers in the formatted message. If there are more
* arguments than format specifiers, the extra arguments are ignored. The number of arguments is
* variable and may be zero.
* @throws AssertionError always thrown
*/
private static void throwNullPointerException(final String message, final Object... args) {
throw new NullPointerException(String.format(message, args)); // NOPMD
}
private Ensure() {
// prevents instantiation
}
/**
* Assertions for iterables.
*/
public static class IterableCondition extends ObjectCondition {
private final Iterable<?> value;
/**
* Creates a new instance of {@code IterableCondition}.
*
* @param value value of the condition
*/
public IterableCondition(@CheckForNull final Iterable<?> value) {
super(value);
this.value = value;
}
/**
* Ensures that the given iterable is not {@code null} and contains at least one element. Additionally, ensures
* that each element of the iterable is not {@code null}.
*
* @throws AssertionError if the iterable is empty (or {@code null}), or at least one iterable element is {@code
* null}.
*/
public void isNotEmpty() {
isNotEmpty("Iterable is empty or NULL");
}
/**
* Ensures that the given iterable is not {@code null} and contains at least one element. Additionally, ensures
* that each element of the iterable is not {@code null}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the iterable is empty (or {@code null}), or at least one iterable element is {@code
* null}.
*/
public void isNotEmpty(final String explanation, final Object... args) {
isNotNull(explanation);
if (value.iterator().hasNext()) {
for (Object object : value) {
if (object == null) {
throwException(explanation, args);
}
}
}
else {
throwException(explanation, args);
}
}
}
/**
* Assertions for iterables.
*/
public static class CollectionCondition extends IterableCondition {
private final Collection<?> value;
/**
* Creates a new instance of {@code CollectionCondition}.
*
* @param value value of the condition
*/
@SuppressWarnings("AssignmentToCollectionOrArrayFieldFromParameter")
public CollectionCondition(@CheckForNull final Collection<?> value) {
super(value);
this.value = value;
}
/**
* Ensures that the given collection is not {@code null} and contains the specified element.
*
* @param element the element to find
* @throws AssertionError if the collection is {@code null} or if the specified element is not found
*/
public void contains(final Object element) {
contains(element, "Collection %s does not contain element '%s'", value, element);
}
/**
* Ensures that the given collection is not {@code null} and contains the specified element.
*
* @param element the element to find
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the collection is {@code null} or if the specified element is not found
*/
public void contains(final Object element, final String explanation, final Object... args) {
isNotNull(explanation, args);
if (!value.contains(element)) {
throwException(explanation, args);
}
}
/**
* Ensures that the given collection is not {@code null} and does not contain the specified element.
*
* @param element the element that must not be in the collection
* @throws AssertionError if the collection is {@code null} or if the specified element is part of the
* collection
*/
public void doesNotContain(final Object element) {
doesNotContain(element, "Collection '%s' contains element '%s'", value, element);
}
/**
* Ensures that the given collection is not {@code null} and does not contain the specified element.
*
* @param element the element that must not be in the collection
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the collection is {@code null} or if the specified element is part of the
* collection
*/
public void doesNotContain(final Object element, final String explanation, final Object... args) {
isNotNull(explanation, args);
if (value.contains(element)) {
throwException(explanation, args);
}
}
}
/**
* Assertions for arrays.
*/
public static class ArrayCondition extends ObjectCondition {
private final Object[] values;
/**
* Creates a new instance of {@link IterableCondition}.
*
* @param values value of the condition
*/
@SuppressWarnings({"AssignmentToCollectionOrArrayFieldFromParameter", "PMD.ArrayIsStoredDirectly"})
@SuppressFBWarnings("EI2")
public ArrayCondition(@CheckForNull final Object[] values) {
super(values);
this.values = values;
}
/**
* Ensures that the given array is not {@code null} and contains at least one element. Additionally, ensures
* that each element of the array is not {@code null}.
*
* @throws AssertionError if the array is empty (or {@code null}), or at least one array element is {@code
* null}.
*/
public void isNotEmpty() {
isNotEmpty("Array is empty or NULL");
}
/**
* Ensures that the given array is not {@code null} and contains at least one element. Additionally, ensures
* that each element of the array is not {@code null}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the array is empty (or {@code null}), or at least one array element is {@code
* null}.
*/
public void isNotEmpty(final String explanation, final Object... args) {
isNotNull(explanation);
if (values.length == 0) {
throwException(explanation, args);
}
else {
for (Object object : values) {
if (object == null) {
throwException(explanation, args);
}
}
}
}
}
/**
* Assertions for strings.
*/
public static class StringCondition extends ObjectCondition {
private final String value;
/**
* Creates a new instance of {@code StringCondition}.
*
* @param value value of the condition
*/
public StringCondition(@CheckForNull final String value) {
super(value);
this.value = value;
}
/**
* Ensures that the given string is not {@code null} and contains at least one character.
*
* @throws AssertionError if the string is empty (or {@code null})
*/
public void isNotEmpty() {
isNotEmpty("The string is empty or NULL");
}
/**
* Ensures that the given string is not {@code null} and contains at least one character.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the string is empty (or {@code null})
*/
public void isNotEmpty(final String explanation, final Object... args) {
isNotNull(explanation);
if (value.isEmpty()) {
throwException(explanation, args);
}
}
/**
* Ensures that the given string is not {@code null} and contains at least one non-whitespace character.
*
* @throws AssertionError if the string is empty (or {@code null})
*/
public void isNotBlank() {
isNotBlank("The string is blank");
}
/**
* Ensures that the given string is not {@code null} and contains at least one non-whitespace character.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the string is empty (or {@code null})
*/
public void isNotBlank(final String explanation, final Object... args) {
isNotNull();
if (isBlank()) {
throwException(explanation, args);
}
}
private boolean isBlank() {
if (value.isEmpty()) {
return true;
}
for (int i = 0; i < value.length(); i++) {
if (!Character.isWhitespace(value.charAt(i))) {
return false;
}
}
return true;
}
}
/**
* Assertions for objects.
*/
public static class ObjectCondition {
private final Object value;
private final Object[] additionalValues;
/**
* Creates a new instance of {@code ObjectCondition}.
*
* @param value value of the condition
*/
public ObjectCondition(@CheckForNull final Object value) {
this(value, new Object[0]);
}
/**
* Creates a new instance of {@code ObjectCondition}.
*
* @param value value of the condition
* @param additionalValues additional values of the condition
*/
@SuppressFBWarnings("EI2")
@SuppressWarnings({"AssignmentToCollectionOrArrayFieldFromParameter", "PMD.ArrayIsStoredDirectly"})
public ObjectCondition(@CheckForNull final Object value, @CheckForNull final Object[] additionalValues) {
this.value = value;
this.additionalValues = additionalValues;
}
/**
* Ensures that the given object is not {@code null}.
*
* @throws AssertionError if the object is {@code null}
*/
public void isNotNull() {
isNotNull("Object is NULL");
}
/**
* Ensures that the given object is not {@code null}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the object is {@code null}
*/
public void isNotNull(final String explanation, final Object... args) {
if (value == null) {
throwNullPointerException(explanation, args);
}
for (Object additionalValue : additionalValues) {
if (additionalValue == null) {
throwNullPointerException(explanation, args);
}
}
}
/**
* Ensures that the given object is {@code null}.
*
* @throws AssertionError if the object is not {@code null}
*/
public void isNull() {
isNull("Object is not NULL");
}
/**
* Ensures that the given object is {@code null}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the object is not {@code null}
*/
@SuppressWarnings("VariableNotUsedInsideIf")
public void isNull(final String explanation, final Object... args) {
if (value != null) {
throwException(explanation, args);
}
}
/**
* Ensures that the given object is an instance of one of the specified types.
*
* @param type the type to check the specified object for
* @param additionalTypes the additional types to check the specified object for
* @throws AssertionError the specified object is not an instance of the given type (or {@code null})
*/
public void isInstanceOf(final Class<?> type, final Class<?>... additionalTypes) {
isNotNull();
List<Class<?>> types = Lists.mutable.of(additionalTypes);
types.add(type);
for (Class<?> clazz : types) {
if (clazz.isInstance(value)) {
return;
}
}
throwException("Object is of wrong type. Actual: %s. Expected one of: %s", value, types);
}
/**
* Ensures that the given object is an instance of the specified type.
*
* @param type the type to check the specified object for
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError the specified object is not an instance of the given type (or {@code null})
*/
public void isInstanceOf(final Class<?> type, final String explanation, final Object... args) {
isNotNull(explanation);
if (!type.isInstance(value)) {
throwException(explanation, args);
}
}
}
/**
* Assertions for booleans.
*/
public static class BooleanCondition {
/** The value of the condition. */
private final boolean value;
/**
* Creates a new instance of {@code BooleanCondition}.
*
* @param value value of the condition
*/
@SuppressWarnings("BooleanParameter")
public BooleanCondition(final boolean value) {
this.value = value;
}
/**
* Ensures that the given condition is {@code false}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the condition is {@code true}
*/
public void isFalse(final String explanation, final Object... args) {
if (value) {
throwException(explanation, args);
}
}
/**
* Ensures that the given condition is {@code false}.
*
* @throws AssertionError if the condition is {@code true}
*/
public void isFalse() {
isFalse("Value is not FALSE");
}
/**
* Ensures that the given condition is {@code true}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError if the condition is {@code false}
*/
public void isTrue(final String explanation, final Object... args) {
if (!value) {
throwException(explanation, args);
}
}
/**
* Ensures that the given condition is {@code true}.
*
* @throws AssertionError if the condition is {@code false}
*/
public void isTrue() {
isTrue("Value is not TRUE");
}
}
/**
* Assertions for exceptions.
*/
public static class ExceptionCondition {
/** The value of the condition. */
private final Throwable value;
/**
* Creates a new instance of {@link BooleanCondition}.
*
* @param value value of the condition
*/
public ExceptionCondition(@CheckForNull final Throwable value) {
this.value = value;
}
/**
* Ensures that the exception is never thrown. I.e., this method will always throw an {@link AssertionError}.
*
* @param explanation a {@link Formatter formatted message} explaining the assertion
* @param args Arguments referenced by the format specifiers in the formatted explanation. If there are
* more arguments than format specifiers, the extra arguments are ignored. The number of
* arguments is variable and may be zero.
* @throws AssertionError always thrown
*/
public void isNeverThrown(final String explanation, final Object... args) {
throw new AssertionError(String.format(explanation, args), value);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.